Plugins are loaded from /kafka/connect, Using the following environment variables:, GROUP_ID=sysint-kafka-connect, CONFIG_STORAGE_TOPIC=_sysint_connect_configs, OFFSET_STORAGE_TOPIC=_sysint_connect_offsets, STATUS_STORAGE_TOPIC=_sysint_connect_status, BOOTSTRAP_SERVERS=kafka:29093, REST_HOST_NAME=172.18.0.4, REST_PORT=8083, ADVERTISED_HOST_NAME=172.18.0.4, ADVERTISED_PORT=8083, KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter, VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter, INTERNAL_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter, INTERNAL_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter, OFFSET_FLUSH_INTERVAL_MS=5000, OFFSET_FLUSH_TIMEOUT_MS=5000, SHUTDOWN_TIMEOUT=10000, --- Setting property from CONNECT_INTERNAL_VALUE_CONVERTER: internal.value.converter=org.apache.kafka.connect.json.JsonConverter, --- Setting property from CONNECT_VALUE_CONVERTER: value.converter=org.apache.kafka.connect.json.JsonConverter, --- Setting property from CONNECT_REST_ADVERTISED_HOST_NAME: rest.advertised.host.name=172.18.0.4, --- Setting property from CONNECT_OFFSET_FLUSH_INTERVAL_MS: offset.flush.interval.ms=5000, --- Setting property from CONNECT_GROUP_ID: group.id=sysint-kafka-connect, --- Setting property from CONNECT_BOOTSTRAP_SERVERS: bootstrap.servers=kafka:29093, --- Setting property from CONNECT_KEY_CONVERTER: key.converter=org.apache.kafka.connect.json.JsonConverter, --- Setting property from CONNECT_TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS: task.shutdown.graceful.timeout.ms=10000, --- Setting property from CONNECT_REST_HOST_NAME: rest.host.name=172.18.0.4, --- Setting property from CONNECT_PLUGIN_PATH: plugin.path=/kafka/connect, --- Setting property from CONNECT_REST_PORT: rest.port=8083, --- Setting property from CONNECT_OFFSET_FLUSH_TIMEOUT_MS: offset.flush.timeout.ms=5000, --- Setting property from CONNECT_STATUS_STORAGE_TOPIC: status.storage.topic=_sysint_connect_status, --- Setting property from CONNECT_INTERNAL_KEY_CONVERTER: internal.key.converter=org.apache.kafka.connect.json.JsonConverter, --- Setting property from CONNECT_CONFIG_STORAGE_TOPIC: config.storage.topic=_sysint_connect_configs, --- Setting property from CONNECT_REST_ADVERTISED_PORT: rest.advertised.port=8083, --- Setting property from CONNECT_OFFSET_STORAGE_TOPIC: offset.storage.topic=_sysint_connect_offsets, 2020-10-13 08:27:24,179 INFO || WorkerInfo values: , jvm.args = -Xms256M, -Xmx2G, -XX:+UseG1GC, -XX:MaxGCPauseMillis=20, -XX:InitiatingHeapOccupancyPercent=35, -XX:+ExplicitGCInvokesConcurrent, -XX:MaxInlineLevel=15, -Djava.awt.headless=true, -Dcom.sun.management.jmxremote, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.port=1976, -Dkafka.logs.dir=/kafka/bin/../logs, -Dlog4j.configuration=file:/kafka/config/log4j.properties, -javaagent:/kafka/jmx_prometheus_javaagent.jar=8080:/kafka/config.yml, jvm.spec = Oracle Corporation, OpenJDK 64-Bit Server VM, 11.0.8, 11.0.8+10-LTS, jvm.classpath = /kafka/bin/../libs/activation-1.1.1.jar:/kafka/bin/../libs/aopalliance-repackaged-2.5.0.jar:/kafka/bin/../libs/argparse4j-0.7.0.jar:/kafka/bin/../libs/audience-annotations-0.5.0.jar:/kafka/bin/../libs/avro-1.9.2.jar:/kafka/bin/../libs/common-config-5.5.1.jar:/kafka/bin/../libs/common-utils-5.5.1.jar:/kafka/bin/../libs/commons-cli-1.4.jar:/kafka/bin/../libs/commons-lang3-3.8.1.jar:/kafka/bin/../libs/connect-api-2.6.0.jar:/kafka/bin/../libs/connect-basic-auth-extension-2.6.0.jar:/kafka/bin/../libs/connect-file-2.6.0.jar:/kafka/bin/../libs/connect-json-2.6.0.jar:/kafka/bin/../libs/connect-mirror-2.6.0.jar:/kafka/bin/../libs/connect-mirror-client-2.6.0.jar:/kafka/bin/../libs/connect-runtime-2.6.0.jar:/kafka/bin/../libs/connect-transforms-2.6.0.jar:/kafka/bin/../libs/hk2-api-2.5.0.jar:/kafka/bin/../libs/hk2-locator-2.5.0.jar:/kafka/bin/../libs/hk2-utils-2.5.0.jar:/kafka/bin/../libs/jackson-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-core-2.10.2.jar:/kafka/bin/../libs/jackson-databind-2.10.2.jar:/kafka/bin/../libs/jackson-dataformat-csv-2.10.2.jar:/kafka/bin/../libs/jackson-datatype-jdk8-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-base-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-json-provider-2.10.2.jar:/kafka/bin/../libs/jackson-module-jaxb-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-module-paranamer-2.10.2.jar:/kafka/bin/../libs/jackson-module-scala_2.12-2.10.2.jar:/kafka/bin/../libs/jakarta.activation-api-1.2.1.jar:/kafka/bin/../libs/jakarta.annotation-api-1.3.4.jar:/kafka/bin/../libs/jakarta.inject-2.5.0.jar:/kafka/bin/../libs/jakarta.ws.rs-api-2.1.5.jar:/kafka/bin/../libs/jakarta.xml.bind-api-2.3.2.jar:/kafka/bin/../libs/javassist-3.22.0-CR2.jar:/kafka/bin/../libs/javassist-3.26.0-GA.jar:/kafka/bin/../libs/javax.servlet-api-3.1.0.jar:/kafka/bin/../libs/javax.ws.rs-api-2.1.1.jar:/kafka/bin/../libs/jaxb-api-2.3.0.jar:/kafka/bin/../libs/jersey-client-2.28.jar:/kafka/bin/../libs/jersey-common-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-core-2.28.jar:/kafka/bin/../libs/jersey-hk2-2.28.jar:/kafka/bin/../libs/jersey-media-jaxb-2.28.jar:/kafka/bin/../libs/jersey-server-2.28.jar:/kafka/bin/../libs/jetty-client-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-continuation-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-http-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-io-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-security-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-server-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlet-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlets-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-util-9.4.24.v20191120.jar:/kafka/bin/../libs/jopt-simple-5.0.4.jar:/kafka/bin/../libs/kafka-avro-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-clients-2.6.0.jar:/kafka/bin/../libs/kafka-connect-avro-converter-5.5.1.jar:/kafka/bin/../libs/kafka-connect-avro-data-5.5.1.jar:/kafka/bin/../libs/kafka-log4j-appender-2.6.0.jar:/kafka/bin/../libs/kafka-schema-registry-client-5.5.1.jar:/kafka/bin/../libs/kafka-schema-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-streams-2.6.0.jar:/kafka/bin/../libs/kafka-streams-examples-2.6.0.jar:/kafka/bin/../libs/kafka-streams-scala_2.12-2.6.0.jar:/kafka/bin/../libs/kafka-streams-test-utils-2.6.0.jar:/kafka/bin/../libs/kafka-tools-2.6.0.jar:/kafka/bin/../libs/kafka_2.12-2.6.0.jar:/kafka/bin/../libs/log4j-1.2.17.jar:/kafka/bin/../libs/lz4-java-1.7.1.jar:/kafka/bin/../libs/maven-artifact-3.6.3.jar:/kafka/bin/../libs/metrics-core-2.2.0.jar:/kafka/bin/../libs/netty-buffer-4.1.50.Final.jar:/kafka/bin/../libs/netty-codec-4.1.50.Final.jar:/kafka/bin/../libs/netty-common-4.1.50.Final.jar:/kafka/bin/../libs/netty-handler-4.1.50.Final.jar:/kafka/bin/../libs/netty-resolver-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-epoll-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-unix-common-4.1.50.Final.jar:/kafka/bin/../libs/osgi-resource-locator-1.0.1.jar:/kafka/bin/../libs/paranamer-2.8.jar:/kafka/bin/../libs/plexus-utils-3.2.1.jar:/kafka/bin/../libs/reflections-0.9.12.jar:/kafka/bin/../libs/rocksdbjni-5.18.4.jar:/kafka/bin/../libs/scala-collection-compat_2.12-2.1.6.jar:/kafka/bin/../libs/scala-java8-compat_2.12-0.9.1.jar:/kafka/bin/../libs/scala-library-2.12.11.jar:/kafka/bin/../libs/scala-logging_2.12-3.9.2.jar:/kafka/bin/../libs/scala-reflect-2.12.11.jar:/kafka/bin/../libs/slf4j-api-1.7.30.jar:/kafka/bin/../libs/slf4j-log4j12-1.7.30.jar:/kafka/bin/../libs/snappy-java-1.1.7.3.jar:/kafka/bin/../libs/validation-api-2.0.1.Final.jar:/kafka/bin/../libs/zookeeper-3.5.8.jar:/kafka/bin/../libs/zookeeper-jute-3.5.8.jar:/kafka/bin/../libs/zstd-jni-1.4.4-7.jar, os.spec = Linux, amd64, 4.19.76-linuxkit, os.vcpus = 4, [org.apache.kafka.connect.runtime.WorkerInfo], 2020-10-13 08:27:24,202 INFO || Scanning for plugin classes. This might take a moment ... [org.apache.kafka.connect.cli.ConnectDistributed], 2020-10-13 08:27:24,251 INFO || Loading plugin from: /kafka/connect/kafka-connect-insert-uuid [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,423 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/kafka-connect-insert-uuid/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,426 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,426 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,427 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,427 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,428 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:24,436 INFO || Loading plugin from: /kafka/connect/debezium-connector-mongodb [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,482 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mongodb/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,483 INFO || Added plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,483 INFO || Added plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,483 INFO || Added plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,483 INFO || Added plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,484 INFO || Added plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,485 INFO || Added plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,485 INFO || Added plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:25,488 INFO || Loading plugin from: /kafka/connect/debezium-connector-mysql [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:26,509 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mysql/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:26,509 INFO || Added plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:26,522 INFO || Loading plugin from: /kafka/connect/debezium-connector-sqlserver [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:26,913 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-sqlserver/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:26,913 INFO || Added plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:26,976 INFO || Loading plugin from: /kafka/connect/debezium-connector-postgres [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:27,519 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-postgres/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:27,519 INFO || Added plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,130 INFO || Registered loader: jdk.internal.loader.ClassLoaders$AppClassLoader@3d4eac69 [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,132 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,132 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,132 INFO || Added plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,132 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,132 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,132 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,133 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,133 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,133 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,133 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,133 INFO || Added plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,133 INFO || Added plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,134 INFO || Added plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,134 INFO || Added plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,134 INFO || Added plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,134 INFO || Added plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,134 INFO || Added plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,134 INFO || Added plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,135 INFO || Added plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,135 INFO || Added plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,135 INFO || Added plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,135 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,135 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,135 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,136 INFO || Added plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,136 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,136 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,136 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,137 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,137 INFO || Added plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,137 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,137 INFO || Added plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,138 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,138 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,138 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,138 INFO || Added plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,139 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,139 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,139 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,139 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,140 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,140 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,140 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,140 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,141 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,143 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,143 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,143 INFO || Added plugin 'org.apache.kafka.common.config.provider.FileConfigProvider' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,143 INFO || Added plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'MongoDbConnector' and 'MongoDb' to plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'MySqlConnector' and 'MySql' to plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'PostgresConnector' and 'Postgres' to plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'SqlServerConnector' and 'SqlServer' to plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'FileStreamSinkConnector' and 'FileStreamSink' to plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'FileStreamSourceConnector' and 'FileStreamSource' to plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'MirrorCheckpointConnector' and 'MirrorCheckpoint' to plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'MirrorHeartbeatConnector' and 'MirrorHeartbeat' to plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,144 INFO || Added aliases 'MirrorSourceConnector' and 'MirrorSource' to plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'MockConnector' and 'Mock' to plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'MockSinkConnector' and 'MockSink' to plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'MockSourceConnector' and 'MockSource' to plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'SchemaSourceConnector' and 'SchemaSource' to plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'VerifiableSinkConnector' and 'VerifiableSink' to plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'VerifiableSourceConnector' and 'VerifiableSource' to plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'AvroConverter' and 'Avro' to plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,145 INFO || Added aliases 'CloudEventsConverter' and 'CloudEvents' to plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,146 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added alias 'SimpleHeaderConverter' to plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,147 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,148 INFO || Added alias 'ExtractNewDocumentState' to plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,148 INFO || Added alias 'ByLogicalTableRouter' to plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,148 INFO || Added alias 'ExtractNewRecordState' to plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,148 INFO || Added alias 'EventRouter' to plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,148 INFO || Added aliases 'PredicatedTransformation' and 'Predicated' to plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,151 INFO || Added alias 'Filter' to plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'RegexRouter' to plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'TimestampRouter' to plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'ValueToKey' to plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'HasHeaderKey' to plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'RecordIsTombstone' to plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'TopicNameMatches' to plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,152 INFO || Added alias 'BasicAuthSecurityRestExtension' to plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,153 INFO || Added aliases 'AllConnectorClientConfigOverridePolicy' and 'All' to plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,153 INFO || Added aliases 'NoneConnectorClientConfigOverridePolicy' and 'None' to plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,153 INFO || Added aliases 'PrincipalConnectorClientConfigOverridePolicy' and 'Principal' to plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader], 2020-10-13 08:27:31,325 INFO || DistributedConfig values: , access.control.allow.methods = , access.control.allow.origin = , admin.listeners = null, bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , config.providers = [], config.storage.replication.factor = 1, config.storage.topic = _sysint_connect_configs, connect.protocol = sessioned, connections.max.idle.ms = 540000, connector.client.config.override.policy = None, group.id = sysint-kafka-connect, header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter, heartbeat.interval.ms = 3000, inter.worker.key.generation.algorithm = HmacSHA256, inter.worker.key.size = null, inter.worker.key.ttl.ms = 3600000, inter.worker.signature.algorithm = HmacSHA256, inter.worker.verification.algorithms = [HmacSHA256], internal.key.converter = class org.apache.kafka.connect.json.JsonConverter, internal.value.converter = class org.apache.kafka.connect.json.JsonConverter, key.converter = class org.apache.kafka.connect.json.JsonConverter, listeners = null, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, offset.flush.interval.ms = 5000, offset.flush.timeout.ms = 5000, offset.storage.partitions = 25, offset.storage.replication.factor = 1, offset.storage.topic = _sysint_connect_offsets, plugin.path = [/kafka/connect], rebalance.timeout.ms = 60000, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 40000, response.http.headers.config = , rest.advertised.host.name = 172.18.0.4, rest.advertised.listener = null, rest.advertised.port = 8083, rest.extension.classes = [], rest.host.name = 172.18.0.4, rest.port = 8083, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, scheduled.rebalance.max.delay.ms = 300000, security.protocol = PLAINTEXT, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.client.auth = none, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, status.storage.partitions = 5, status.storage.replication.factor = 1, status.storage.topic = _sysint_connect_status, task.shutdown.graceful.timeout.ms = 10000, topic.creation.enable = true, topic.tracking.allow.reset = true, topic.tracking.enable = true, value.converter = class org.apache.kafka.connect.json.JsonConverter, worker.sync.timeout.ms = 3000, worker.unsync.backoff.ms = 300000, [org.apache.kafka.connect.runtime.distributed.DistributedConfig], 2020-10-13 08:27:31,326 INFO || Worker configuration property 'internal.key.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig], 2020-10-13 08:27:31,326 INFO || Worker configuration property 'internal.value.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig], 2020-10-13 08:27:31,330 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:31,333 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,557 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,557 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,557 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,558 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,558 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,558 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,559 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,559 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,559 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,559 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,563 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,563 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,563 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,563 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,563 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,564 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,564 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,564 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,564 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,564 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,565 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:31,565 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:31,569 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:31,569 INFO || Kafka startTimeMs: 1602577651565 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:33,080 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:34,225 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:34,426 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:34,829 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:35,732 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:36,736 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:42,619 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:43,626 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:44,630 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:45,534 WARN || [AdminClient clientId=adminclient-1] Connection to node -1 (kafka/172.18.0.5:29093) could not be established. Broker may not be available. [org.apache.kafka.clients.NetworkClient], 2020-10-13 08:27:46,712 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:46,729 INFO || Logging initialized @25247ms to org.eclipse.jetty.util.log.Slf4jLog [org.eclipse.jetty.util.log], 2020-10-13 08:27:46,772 INFO || Added connector for http://172.18.0.4:8083 [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,773 INFO || Initializing REST server [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,781 INFO || jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 11.0.8+10-LTS [org.eclipse.jetty.server.Server], 2020-10-13 08:27:46,808 INFO || Started http_172.18.0.48083@12b5454f{HTTP/1.1,[http/1.1]}{172.18.0.4:8083} [org.eclipse.jetty.server.AbstractConnector], 2020-10-13 08:27:46,808 INFO || Started @25327ms [org.eclipse.jetty.server.Server], 2020-10-13 08:27:46,829 INFO || Advertised URI: http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,829 INFO || REST server listening at http://172.18.0.4:8083/, advertising URL http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,829 INFO || Advertised URI: http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,829 INFO || REST admin endpoints at http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,830 INFO || Advertised URI: http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:46,831 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:46,831 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,835 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,835 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,835 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,835 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,835 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,835 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,836 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,836 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,836 INFO || Kafka startTimeMs: 1602577666836 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,867 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:46,876 INFO || Setting up None Policy for ConnectorClientConfigOverride. This will disallow any client configuration to be overridden [org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy], 2020-10-13 08:27:46,882 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:46,883 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,886 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,887 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:46,888 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,888 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,888 INFO || Kafka startTimeMs: 1602577666888 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,911 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:46,917 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,917 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:46,917 INFO || Kafka startTimeMs: 1602577666917 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,032 INFO || JsonConverterConfig values: , converter.type = key, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:47,034 INFO || JsonConverterConfig values: , converter.type = value, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:47,034 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,034 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,040 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,041 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,043 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,046 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,046 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,046 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,046 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,047 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,047 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,050 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,050 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,050 INFO || Kafka startTimeMs: 1602577667050 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,084 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,098 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,102 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,105 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,105 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,105 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,105 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,105 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,105 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,106 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,107 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,107 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,111 INFO || Kafka startTimeMs: 1602577667107 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,147 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,154 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,155 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,158 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,158 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,158 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,158 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,161 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,161 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,161 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,161 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,161 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,161 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,162 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,162 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,162 INFO || Kafka startTimeMs: 1602577667162 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,175 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,188 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,188 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,190 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,191 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,191 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,191 INFO || Kafka startTimeMs: 1602577667191 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,208 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils], 2020-10-13 08:27:47,235 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,235 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,235 INFO || Kafka startTimeMs: 1602577667235 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,238 INFO || Kafka Connect distributed worker initialization took 23036ms [org.apache.kafka.connect.cli.ConnectDistributed], 2020-10-13 08:27:47,238 INFO || Kafka Connect starting [org.apache.kafka.connect.runtime.Connect], 2020-10-13 08:27:47,240 INFO || Initializing REST resources [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:47,240 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder starting [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:47,244 INFO || Worker starting [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:47,244 INFO || Starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore], 2020-10-13 08:27:47,244 INFO || Starting KafkaBasedLog with topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,245 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,247 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,247 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,247 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,247 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,247 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,248 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,249 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,249 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,249 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,249 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,249 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,249 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,249 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,249 INFO || Kafka startTimeMs: 1602577667249 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,301 INFO || Adding admin resources to main listener [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:47,330 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = producer-1, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 2147483647, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, linger.ms = 0, max.block.ms = 60000, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,345 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,345 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,345 INFO || Kafka startTimeMs: 1602577667345 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,356 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = consumer-sysint-kafka-connect-1, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = sysint-kafka-connect, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,366 INFO || [Producer clientId=producer-1] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,381 INFO || DefaultSessionIdManager workerName=node0 [org.eclipse.jetty.server.session], 2020-10-13 08:27:47,382 INFO || No SessionScavenger set, using defaults [org.eclipse.jetty.server.session], 2020-10-13 08:27:47,383 INFO || node0 Scavenging every 600000ms [org.eclipse.jetty.server.session], 2020-10-13 08:27:47,385 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,385 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,385 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,385 INFO || Kafka startTimeMs: 1602577667385 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,393 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,444 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_offsets-0, _sysint_connect_offsets-5, _sysint_connect_offsets-10, _sysint_connect_offsets-20, _sysint_connect_offsets-15, _sysint_connect_offsets-9, _sysint_connect_offsets-11, _sysint_connect_offsets-4, _sysint_connect_offsets-16, _sysint_connect_offsets-17, _sysint_connect_offsets-3, _sysint_connect_offsets-24, _sysint_connect_offsets-23, _sysint_connect_offsets-13, _sysint_connect_offsets-18, _sysint_connect_offsets-22, _sysint_connect_offsets-2, _sysint_connect_offsets-8, _sysint_connect_offsets-12, _sysint_connect_offsets-19, _sysint_connect_offsets-14, _sysint_connect_offsets-1, _sysint_connect_offsets-6, _sysint_connect_offsets-7, _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.KafkaConsumer], 2020-10-13 08:27:47,447 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,447 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-5 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,447 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-10 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-20 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-15 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-9 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-11 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-16 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-17 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-24 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-23 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-13 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-18 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-22 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-8 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-12 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-19 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-14 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-6 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-7 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,448 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,490 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-24 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,491 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-18 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,491 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-16 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,491 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-22 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,491 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-20 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,491 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-9 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,491 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-7 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-13 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-11 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-5 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-23 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-17 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-15 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-21 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-19 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-10 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-8 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-14 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-12 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-6 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,492 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,579 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,579 INFO || Started KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,579 INFO || Finished reading offsets topic and starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore], 2020-10-13 08:27:47,583 INFO || Worker started [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:47,583 INFO || Starting KafkaBasedLog with topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,585 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,589 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,590 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,591 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,591 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,591 INFO || Kafka startTimeMs: 1602577667591 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,617 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = producer-2, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 120000, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.StringSerializer, linger.ms = 0, max.block.ms = 60000, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 0, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,620 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,621 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,621 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,621 INFO || Kafka startTimeMs: 1602577667621 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,623 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = consumer-sysint-kafka-connect-2, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = sysint-kafka-connect, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,625 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,626 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,627 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,627 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,627 INFO || Kafka startTimeMs: 1602577667627 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,632 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,637 INFO || [Producer clientId=producer-2] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,639 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_status-0, _sysint_connect_status-4, _sysint_connect_status-1, _sysint_connect_status-2, _sysint_connect_status-3 [org.apache.kafka.clients.consumer.KafkaConsumer], 2020-10-13 08:27:47,640 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,640 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,640 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,640 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,640 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,651 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,651 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,651 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,651 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,652 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,734 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,735 INFO || Started KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,739 INFO || Starting KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore], 2020-10-13 08:27:47,739 INFO || Starting KafkaBasedLog with topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,740 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = , connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,744 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:47,745 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,745 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,745 INFO || Kafka startTimeMs: 1602577667745 [org.apache.kafka.common.utils.AppInfoParser], Oct 13, 2020 8:27:47 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime, WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource will be ignored. , Oct 13, 2020 8:27:47 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime, WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.RootResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.RootResource will be ignored. , Oct 13, 2020 8:27:47 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime, WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource will be ignored. , Oct 13, 2020 8:27:47 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime, WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource will be ignored. , 2020-10-13 08:27:47,770 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = producer-3, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 2147483647, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.StringSerializer, linger.ms = 0, max.block.ms = 60000, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,773 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:47,774 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,774 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,774 INFO || Kafka startTimeMs: 1602577667774 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,775 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = consumer-sysint-kafka-connect-3, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = sysint-kafka-connect, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,778 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:47,779 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,779 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,779 INFO || Kafka startTimeMs: 1602577667779 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:47,785 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,791 INFO || [Producer clientId=producer-3] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.KafkaConsumer], 2020-10-13 08:27:47,794 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,804 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_configs-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:47,829 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,832 INFO || Started KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog], 2020-10-13 08:27:47,832 INFO || Started KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore], 2020-10-13 08:27:47,832 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder started [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:47,853 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:47,854 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:47,856 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator], 2020-10-13 08:27:47,856 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:47,877 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:47,877 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], Oct 13, 2020 8:27:47 AM org.glassfish.jersey.internal.Errors logErrors, WARNING: The following warnings have been detected: WARNING: The (sub)resource method listLoggers in org.apache.kafka.connect.runtime.rest.resources.LoggingResource contains empty path annotation., WARNING: The (sub)resource method createConnector in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation., WARNING: The (sub)resource method listConnectors in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation., WARNING: The (sub)resource method listConnectorPlugins in org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource contains empty path annotation., WARNING: The (sub)resource method serverInfo in org.apache.kafka.connect.runtime.rest.resources.RootResource contains empty path annotation., , 2020-10-13 08:27:47,973 INFO || Started o.e.j.s.ServletContextHandler@3a37a501{/,null,AVAILABLE} [org.eclipse.jetty.server.handler.ContextHandler], 2020-10-13 08:27:47,973 INFO || REST resources initialized; server is started and ready to handle requests [org.apache.kafka.connect.runtime.rest.RestServer], 2020-10-13 08:27:47,973 INFO || Kafka Connect started [org.apache.kafka.connect.runtime.Connect], 2020-10-13 08:27:48,004 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 10 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:48,006 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 10 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-e0879b42-5f99-4d46-ac74-c892007b25eb', leaderUrl='http://172.18.0.4:8083/', offset=19, connectorIds=[sysint-sqlserver-proc-runinit-connector, sysint-sqlserver-inv-runonly-connector, sysint-sqlserver-dbtech-runonly-connector, sysint-sqlserver-dbtech-runinit-connector], taskIds=[sysint-sqlserver-proc-runinit-connector-0, sysint-sqlserver-inv-runonly-connector-0, sysint-sqlserver-dbtech-runonly-connector-0, sysint-sqlserver-dbtech-runinit-connector-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,008 WARN || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Catching up to assignment's config offset. [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,008 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Current config state offset -1 is behind group assignment 19, reading to end of config log [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,321 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished reading to end of log and updated config snapshot, new config log offset: 19 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,322 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset 19 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,328 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-inv-runonly-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,331 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-dbtech-runonly-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,331 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-proc-runinit-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,334 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-dbtech-runinit-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,336 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-proc-runinit-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,338 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-inv-runonly-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,339 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-dbtech-runonly-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,341 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-dbtech-runinit-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,383 INFO || Creating task sysint-sqlserver-dbtech-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,384 INFO || Creating task sysint-sqlserver-proc-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,385 INFO || Creating connector sysint-sqlserver-dbtech-runinit-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,384 INFO || Creating connector sysint-sqlserver-inv-runonly-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,388 INFO || Creating connector sysint-sqlserver-dbtech-runonly-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,388 INFO || Creating task sysint-sqlserver-dbtech-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,388 INFO || Creating connector sysint-sqlserver-proc-runinit-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,387 INFO || Creating task sysint-sqlserver-inv-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,405 INFO || ConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig], 2020-10-13 08:27:48,413 INFO || ConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig], 2020-10-13 08:27:48,411 INFO || ConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig], 2020-10-13 08:27:48,410 INFO || ConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig], 2020-10-13 08:27:48,411 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,410 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,407 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,406 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,498 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,498 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,506 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,525 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,528 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,530 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,535 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,548 INFO || TaskConfig values: , task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask, [org.apache.kafka.connect.runtime.TaskConfig], 2020-10-13 08:27:48,534 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,551 INFO || Instantiated task sysint-sqlserver-dbtech-runonly-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,551 INFO || Instantiated connector sysint-sqlserver-inv-runonly-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,553 INFO || TaskConfig values: , task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask, [org.apache.kafka.connect.runtime.TaskConfig], 2020-10-13 08:27:48,552 INFO || JsonConverterConfig values: , converter.type = key, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,559 INFO || JsonConverterConfig values: , converter.type = value, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,550 INFO || TaskConfig values: , task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask, [org.apache.kafka.connect.runtime.TaskConfig], 2020-10-13 08:27:48,560 INFO || Instantiated task sysint-sqlserver-proc-runinit-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,561 INFO || JsonConverterConfig values: , converter.type = key, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,564 INFO || JsonConverterConfig values: , converter.type = value, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,565 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-proc-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,565 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-proc-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,566 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-proc-runinit-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,550 INFO || TaskConfig values: , task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask, [org.apache.kafka.connect.runtime.TaskConfig], 2020-10-13 08:27:48,553 INFO || Instantiated connector sysint-sqlserver-dbtech-runonly-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,567 INFO || Instantiated task sysint-sqlserver-dbtech-runinit-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,568 INFO || JsonConverterConfig values: , converter.type = key, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,563 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,558 INFO || Instantiated task sysint-sqlserver-inv-runonly-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,570 INFO || JsonConverterConfig values: , converter.type = key, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,570 INFO || JsonConverterConfig values: , converter.type = value, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,571 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-inv-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,571 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-inv-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,571 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-inv-runonly-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,555 INFO || Instantiated connector sysint-sqlserver-dbtech-runinit-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,555 INFO || Finished creating connector sysint-sqlserver-inv-runonly-connector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,576 INFO || Finished creating connector sysint-sqlserver-dbtech-runonly-connector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,555 INFO || Instantiated connector sysint-sqlserver-proc-runinit-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,569 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,569 INFO || JsonConverterConfig values: , converter.type = value, decimal.format = BASE64, schemas.cache.size = 1000, schemas.enable = false, [org.apache.kafka.connect.json.JsonConverterConfig], 2020-10-13 08:27:48,581 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,581 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,581 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-dbtech-runonly-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,582 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-dbtech-runinit-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,586 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,587 INFO || Finished creating connector sysint-sqlserver-dbtech-runinit-connector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,589 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,590 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,591 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:48,593 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,591 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,601 INFO || Finished creating connector sysint-sqlserver-proc-runinit-connector [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,593 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,624 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:48,664 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,664 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,664 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,664 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker], 2020-10-13 08:27:48,674 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = connector-producer-sysint-sqlserver-dbtech-runonly-connector-0, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 2147483647, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, linger.ms = 0, max.block.ms = 9223372036854775807, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 2147483647, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,674 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = connector-producer-sysint-sqlserver-inv-runonly-connector-0, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 2147483647, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, linger.ms = 0, max.block.ms = 9223372036854775807, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 2147483647, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,673 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = connector-producer-sysint-sqlserver-proc-runinit-connector-0, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 2147483647, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, linger.ms = 0, max.block.ms = 9223372036854775807, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 2147483647, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,673 INFO || ProducerConfig values: , acks = -1, batch.size = 16384, bootstrap.servers = [kafka:29093], buffer.memory = 33554432, client.dns.lookup = use_all_dns_ips, client.id = connector-producer-sysint-sqlserver-dbtech-runinit-connector-0, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 2147483647, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, linger.ms = 0, max.block.ms = 9223372036854775807, max.in.flight.requests.per.connection = 1, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 2147483647, retries = 2147483647, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,690 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,690 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,690 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,690 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,691 INFO || Kafka startTimeMs: 1602577668690 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,690 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,700 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,700 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,700 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,700 INFO || Kafka startTimeMs: 1602577668700 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,695 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,691 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,708 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,708 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,708 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,708 INFO || Kafka startTimeMs: 1602577668708 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,701 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:48,706 INFO || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runinit-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:48,714 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,714 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,714 INFO || Kafka startTimeMs: 1602577668714 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:48,713 INFO || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runonly-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:48,712 INFO || [Producer clientId=connector-producer-sysint-sqlserver-proc-runinit-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:48,716 INFO || [Producer clientId=connector-producer-sysint-sqlserver-inv-runonly-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:48,721 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,758 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Session key updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder], 2020-10-13 08:27:48,771 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,771 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,772 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,772 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,771 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,772 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,772 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,772 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,772 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,773 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,774 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,777 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,778 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,778 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,778 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,777 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,778 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,778 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,778 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,777 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,779 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,779 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,779 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,777 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,779 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,778 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.proc [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || table.whitelist = dbo.ImpostaPagato_Configurazione [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,778 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,781 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,779 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,782 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,782 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,782 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || table.whitelist = dbo.tab1,dbo.tab2 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || database.dbname = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || database.server.name = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || name = sysint-sqlserver-dbtech-runonly-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,783 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,784 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,782 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,785 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,785 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,785 INFO || table.whitelist = dbo.VatType,dbo.Registry [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,786 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,786 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,787 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,787 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,787 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,788 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,788 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,788 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,788 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,789 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,789 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,786 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,790 INFO || database.dbname = proc [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,791 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,791 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,791 INFO || database.server.name = proc [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,786 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,791 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,792 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || name = sysint-sqlserver-proc-runinit-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,793 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,794 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,794 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,794 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,791 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,794 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,794 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,792 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,794 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,794 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || database.dbname = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || database.server.name = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || name = sysint-sqlserver-dbtech-runinit-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,796 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,795 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,797 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,797 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,797 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,797 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,798 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,798 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,798 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,796 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,796 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,798 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.inv [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || table.whitelist = dbo.InvoiceData,dbo.InvoiceDataOrder,dbo.InvoiceDataOrder_Order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,798 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,799 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,799 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || database.dbname = inv [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || database.server.name = inv [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,799 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,800 INFO || name = sysint-sqlserver-inv-runonly-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,800 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,800 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,800 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,800 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:48,800 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,800 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,801 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:48,801 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:49,223 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=dbtech-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=dbtech-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,224 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=dbtech-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,223 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=dbtech-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=dbtech-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,225 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=dbtech-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,223 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=proc-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=proc-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,225 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=proc-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,226 INFO || Requested thread factory for connector SqlServerConnector, id = proc named = db-history-config-check [io.debezium.util.Threads], 2020-10-13 08:27:49,236 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=inv-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=inv-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,236 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = db-history-config-check [io.debezium.util.Threads], 2020-10-13 08:27:49,226 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = db-history-config-check [io.debezium.util.Threads], 2020-10-13 08:27:49,236 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=dbtech': debezium.sql_server:type=connector-metrics,context=schema-history,server=dbtech [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:27:49,236 INFO || ProducerConfig values: ,, 1, batch.size = 32768, bootstrap.servers = [kafka:29093], buffer.memory = 1048576, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 120000, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.StringSerializer, linger.ms = 0, max.block.ms = 10000, max.in.flight.requests.per.connection = 5, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 1, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.StringSerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:49,237 INFO || ProducerConfig values: ,, 1, batch.size = 32768, bootstrap.servers = [kafka:29093], buffer.memory = 1048576, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 120000, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.StringSerializer, linger.ms = 0, max.block.ms = 10000, max.in.flight.requests.per.connection = 5, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 1, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.StringSerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:49,238 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=inv-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,238 INFO || Requested thread factory for connector SqlServerConnector, id = inv named = db-history-config-check [io.debezium.util.Threads], 2020-10-13 08:27:49,239 INFO || ProducerConfig values: ,, 1, batch.size = 32768, bootstrap.servers = [kafka:29093], buffer.memory = 1048576, client.dns.lookup = use_all_dns_ips, client.id = inv-dbhistory, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 120000, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.StringSerializer, linger.ms = 0, max.block.ms = 10000, max.in.flight.requests.per.connection = 5, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 1, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.StringSerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:49,239 INFO || ProducerConfig values: ,, 1, batch.size = 32768, bootstrap.servers = [kafka:29093], buffer.memory = 1048576, client.dns.lookup = use_all_dns_ips, client.id = proc-dbhistory, compression.type = none, connections.max.idle.ms = 540000, delivery.timeout.ms = 120000, enable.idempotence = false, interceptor.classes = [], internal.auto.downgrade.txn.commit = false, key.serializer = class org.apache.kafka.common.serialization.StringSerializer, linger.ms = 0, max.block.ms = 10000, max.in.flight.requests.per.connection = 5, max.request.size = 1048576, metadata.max.age.ms = 300000, metadata.max.idle.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner, receive.buffer.bytes = 32768, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 1, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, transaction.timeout.ms = 60000, transactional.id = null, value.serializer = class org.apache.kafka.common.serialization.StringSerializer, [org.apache.kafka.clients.producer.ProducerConfig], 2020-10-13 08:27:49,244 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,246 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,246 INFO || Kafka startTimeMs: 1602577669244 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,254 INFO || [Producer clientId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,258 INFO || [Producer clientId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,261 INFO || [Producer clientId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,262 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,250 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = dbtech-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,265 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,265 INFO || Kafka startTimeMs: 1602577669245 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,266 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:49,271 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser], javax.management.InstanceAlreadyExistsException: kafka.producer:type=app-info,id=dbtech-dbhistory, at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320), at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522), at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64), at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:435), at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:301), at io.debezium.relational.history.KafkaDatabaseHistory.start(KafkaDatabaseHistory.java:235), at io.debezium.relational.HistorizedRelationalDatabaseSchema.(HistorizedRelationalDatabaseSchema.java:40), at io.debezium.connector.sqlserver.SqlServerDatabaseSchema.(SqlServerDatabaseSchema.java:34), at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:83), at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106), at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232), at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185), at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:49,280 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-inv-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:49,277 INFO || [Producer clientId=inv-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,287 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = dbtech-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,291 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:49,292 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runonly-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:49,287 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,293 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,293 INFO || Kafka startTimeMs: 1602577669245 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,293 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:49,294 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = proc-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = proc-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,294 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,295 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,295 INFO || Kafka startTimeMs: 1602577669293 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,295 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,295 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,295 INFO || Kafka startTimeMs: 1602577669270 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,295 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser], javax.management.InstanceAlreadyExistsException: kafka.consumer:type=app-info,id=dbtech-dbhistory, at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890), at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320), at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522), at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64), at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:814), at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:667), at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:647), at io.debezium.relational.history.KafkaDatabaseHistory.storageExists(KafkaDatabaseHistory.java:352), at io.debezium.relational.HistorizedRelationalDatabaseSchema.initializeStorage(HistorizedRelationalDatabaseSchema.java:67), at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:84), at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106), at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232), at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185), at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:49,296 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,296 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,296 INFO || Kafka startTimeMs: 1602577669261 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,297 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,297 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,297 INFO || Kafka startTimeMs: 1602577669296 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,308 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = inv-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = inv-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,309 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-proc-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:49,310 INFO || SourceConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.SourceConnectorConfig], 2020-10-13 08:27:49,313 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,315 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,318 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,318 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,321 INFO || Kafka startTimeMs: 1602577669313 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,326 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,327 INFO || [Consumer clientId=inv-dbhistory, groupId=inv-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,336 INFO || EnrichedConnectorConfig values: , config.action.reload = restart, connector.class = io.debezium.connector.sqlserver.SqlServerConnector, errors.log.enable = true, errors.log.include.messages = true, errors.retry.delay.max.ms = 60000, errors.retry.timeout = 0, errors.tolerance = none, header.converter = null, key.converter = class org.apache.kafka.connect.json.JsonConverter, name = sysint-sqlserver-dbtech-runinit-connector, predicates = [], tasks.max = 1, topic.creation.groups = [], transforms = [unwrap, route, insertuuid], transforms.insertuuid.negate = false, transforms.insertuuid.predicate = , transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value, transforms.insertuuid.uuid.field.name = __uuid, transforms.route.negate = false, transforms.route.predicate = , transforms.route.regex = (.*), transforms.route.replacement = it.company.sysint.data.cdc.tables.$1, transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter, transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order], transforms.unwrap.add.headers = [version, connector, name], transforms.unwrap.delete.handling.mode = rewrite, transforms.unwrap.drop.tombstones = false, transforms.unwrap.negate = false, transforms.unwrap.predicate = , transforms.unwrap.route.by.field = , transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState, value.converter = class org.apache.kafka.connect.json.JsonConverter, [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig], 2020-10-13 08:27:49,684 INFO || Found previous offset SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=dbtech, changeLsn=NULL, commitLsn=0000003f:00001038:0010, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=dbtech}, snapshotCompleted=true, eventSerialNo=0] [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:49,684 INFO || Found previous offset SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=proc, changeLsn=NULL, commitLsn=00000029:00000438:0003, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=proc}, snapshotCompleted=true, eventSerialNo=0] [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:27:49,689 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = dbtech-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,690 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = proc-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = proc-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,694 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,697 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,697 INFO || Requested thread factory for connector SqlServerConnector, id = inv named = change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,697 INFO || Kafka startTimeMs: 1602577669694 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,698 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,698 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,698 INFO || Kafka startTimeMs: 1602577669697 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,697 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,702 INFO || Creating thread debezium-sqlserverconnector-dbtech-change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,702 INFO || Creating thread debezium-sqlserverconnector-inv-change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,703 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,703 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:49,703 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:49,709 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,709 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,711 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,715 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = dbtech-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,716 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,716 INFO || According to the connector configuration only schema will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,718 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,718 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,718 INFO || Kafka startTimeMs: 1602577669718 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,719 INFO || Creating thread debezium-sqlserverconnector-dbtech-db-history-config-check [io.debezium.util.Threads], 2020-10-13 08:27:49,713 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,719 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,719 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = proc-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = proc-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,720 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,719 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,722 INFO || According to the connector configuration only schema will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,722 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,721 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,724 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,722 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory-topic-check, connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 1, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,723 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,730 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,730 INFO || Kafka startTimeMs: 1602577669724 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,730 INFO || Creating thread debezium-sqlserverconnector-proc-db-history-config-check [io.debezium.util.Threads], 2020-10-13 08:27:49,731 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,731 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,732 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,732 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,732 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,732 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,732 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,732 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,732 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,732 INFO || Kafka startTimeMs: 1602577669732 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,734 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,741 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,744 INFO || AdminClientConfig values: , bootstrap.servers = [kafka:29093], client.dns.lookup = use_all_dns_ips, client.id = proc-dbhistory-topic-check, connections.max.idle.ms = 300000, default.api.timeout.ms = 60000, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retries = 1, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,746 INFO || Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:27:49,748 INFO || Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:27:49,752 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig], 2020-10-13 08:27:49,752 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,752 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,752 INFO || Kafka startTimeMs: 1602577669752 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,786 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = dbtech-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = dbtech-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,788 INFO || ConsumerConfig values: , allow.auto.create.topics = true, auto.commit.interval.ms = 5000, auto.offset.reset = earliest, bootstrap.servers = [kafka:29093], check.crcs = true, client.dns.lookup = use_all_dns_ips, client.id = proc-dbhistory, client.rack = , connections.max.idle.ms = 540000, default.api.timeout.ms = 60000, enable.auto.commit = false, exclude.internal.topics = true, fetch.max.bytes = 52428800, fetch.max.wait.ms = 500, fetch.min.bytes = 1, group.id = proc-dbhistory, group.instance.id = null, heartbeat.interval.ms = 3000, interceptor.classes = [], internal.leave.group.on.close = true, internal.throw.on.fetch.stable.offset.unsupported = false, isolation.level = read_uncommitted, key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, max.partition.fetch.bytes = 1048576, max.poll.interval.ms = 300000, max.poll.records = 500, metadata.max.age.ms = 300000, metric.reporters = [], metrics.num.samples = 2, metrics.recording.level = INFO, metrics.sample.window.ms = 30000, partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor], receive.buffer.bytes = 65536, reconnect.backoff.max.ms = 1000, reconnect.backoff.ms = 50, request.timeout.ms = 30000, retry.backoff.ms = 100, sasl.client.callback.handler.class = null, sasl.jaas.config = null, sasl.kerberos.kinit.cmd = /usr/bin/kinit, sasl.kerberos.min.time.before.relogin = 60000, sasl.kerberos.service.name = null, sasl.kerberos.ticket.renew.jitter = 0.05, sasl.kerberos.ticket.renew.window.factor = 0.8, sasl.login.callback.handler.class = null, sasl.login.class = null, sasl.login.refresh.buffer.seconds = 300, sasl.login.refresh.min.period.seconds = 60, sasl.login.refresh.window.factor = 0.8, sasl.login.refresh.window.jitter = 0.05, sasl.mechanism = GSSAPI, security.protocol = PLAINTEXT, security.providers = null, send.buffer.bytes = 131072, session.timeout.ms = 10000, ssl.cipher.suites = null, ssl.enabled.protocols = [TLSv1.2, TLSv1.3], ssl.endpoint.identification.algorithm = https, ssl.engine.factory.class = null, ssl.key.password = null, ssl.keymanager.algorithm = SunX509, ssl.keystore.location = null, ssl.keystore.password = null, ssl.keystore.type = JKS, ssl.protocol = TLSv1.3, ssl.provider = null, ssl.secure.random.implementation = null, ssl.trustmanager.algorithm = PKIX, ssl.truststore.location = null, ssl.truststore.password = null, ssl.truststore.type = JKS, value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer, [org.apache.kafka.clients.consumer.ConsumerConfig], 2020-10-13 08:27:49,789 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,789 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,789 INFO || Kafka startTimeMs: 1602577669789 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,789 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Subscribed to topic(s): it.company.sysint.data.cdc.db.history.dbtech [org.apache.kafka.clients.consumer.KafkaConsumer], 2020-10-13 08:27:49,792 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,792 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,792 INFO || Kafka startTimeMs: 1602577669792 [org.apache.kafka.common.utils.AppInfoParser], 2020-10-13 08:27:49,793 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,793 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Subscribed to topic(s): it.company.sysint.data.cdc.db.history.proc [org.apache.kafka.clients.consumer.KafkaConsumer], 2020-10-13 08:27:49,817 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata], 2020-10-13 08:27:49,818 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,821 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,831 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,831 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,831 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,836 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,843 INFO || Database history topic 'it.company.sysint.data.cdc.db.history.proc' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,848 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Finished assignment for group at generation 1: {dbtech-dbhistory-4034f01f-1ba1-44ac-9158-a909907fa13a=Assignment(partitions=[it.company.sysint.data.cdc.db.history.dbtech-0])} [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,849 INFO || Database history topic 'it.company.sysint.data.cdc.db.history.dbtech' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory], 2020-10-13 08:27:49,854 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Successfully joined group with generation 1 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,854 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Notifying assignor about the new Assignment(partitions=[it.company.sysint.data.cdc.db.history.dbtech-0]) [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,854 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Adding newly assigned partitions: it.company.sysint.data.cdc.db.history.dbtech-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,855 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,855 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,860 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Finished assignment for group at generation 1: {proc-dbhistory-9bb2ae43-16d8-47dc-9ebc-cd6788d1edeb=Assignment(partitions=[it.company.sysint.data.cdc.db.history.proc-0])} [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,865 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Successfully joined group with generation 1 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,866 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Notifying assignor about the new Assignment(partitions=[it.company.sysint.data.cdc.db.history.proc-0]) [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,866 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Adding newly assigned partitions: it.company.sysint.data.cdc.db.history.proc-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,871 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Found no committed offset for partition it.company.sysint.data.cdc.db.history.proc-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,872 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Found no committed offset for partition it.company.sysint.data.cdc.db.history.dbtech-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,874 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Resetting offset for partition it.company.sysint.data.cdc.db.history.proc-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:49,874 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Resetting offset for partition it.company.sysint.data.cdc.db.history.dbtech-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState], 2020-10-13 08:27:49,904 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Revoke previously assigned partitions it.company.sysint.data.cdc.db.history.proc-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,905 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Member proc-dbhistory-9bb2ae43-16d8-47dc-9ebc-cd6788d1edeb sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,915 INFO || Finished database history recovery of 1 change(s) in 167 ms [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:27:49,932 INFO || Requested thread factory for connector SqlServerConnector, id = proc named = change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,936 INFO || Creating thread debezium-sqlserverconnector-proc-change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,938 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:49,945 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,945 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,945 INFO || A previous offset indicating a completed snapshot has been found. Neither schema nor data will be snapshotted. [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,946 INFO || Snapshot ended with SnapshotResult [status=SKIPPED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=proc, changeLsn=NULL, commitLsn=00000029:00000438:0003, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=proc}, snapshotCompleted=true, eventSerialNo=0]] [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,958 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:27:49,958 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:49,974 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Revoke previously assigned partitions it.company.sysint.data.cdc.db.history.dbtech-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator], 2020-10-13 08:27:49,974 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Member dbtech-dbhistory-4034f01f-1ba1-44ac-9158-a909907fa13a sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator], 2020-10-13 08:27:49,974 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:49,980 INFO || Finished database history recovery of 21 change(s) in 234 ms [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:27:49,980 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,982 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,982 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,989 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:49,989 INFO || Reading structure of schema 'dbtech' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:49,992 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:49,993 INFO || Creating thread debezium-sqlserverconnector-dbtech-change-event-source-coordinator [io.debezium.util.Threads], 2020-10-13 08:27:50,006 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:50,006 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=dbtech': debezium.sql_server:type=connector-metrics,context=snapshot,server=dbtech [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:50,006 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=dbtech': debezium.sql_server:type=connector-metrics,context=streaming,server=dbtech [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:50,006 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:50,006 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:50,006 INFO || A previous offset indicating a completed snapshot has been found. Neither schema nor data will be snapshotted. [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:50,006 INFO || Snapshot ended with SnapshotResult [status=SKIPPED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=dbtech, changeLsn=NULL, commitLsn=0000003f:00001038:0010, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=dbtech}, snapshotCompleted=true, eventSerialNo=0]] [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:50,007 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:27:50,008 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:50,087 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:27:50,088 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:50,089 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:50,090 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:50,092 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:50,092 INFO || Reading structure of schema 'inv' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:50,107 INFO || Last position recorded in offsets is 00000029:00000438:0003(NULL)[0] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:50,343 INFO || CDC is enabled for table Capture instance "dbo_OtherTable" [sourceTableId=dbtech.dbo.OtherTable, changeTableId=dbtech.cdc.dbo_OtherTable_CT, startLsn=00000038:000007b0:006c, changeTableObjectId=1486628339, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:50,344 INFO || CDC is enabled for table Capture instance "dbo_Payment" [sourceTableId=dbtech.dbo.Payment, changeTableId=dbtech.cdc.dbo_Payment_CT, startLsn=00000037:00000af0:00b1, changeTableObjectId=1294627655, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:50,345 INFO || Last position recorded in offsets is 0000003f:00001038:0010(NULL)[0] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:51,257 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter], java.lang.NumberFormatException: For input string: "UL", at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65), at java.base/java.lang.Integer.parseInt(Integer.java:652), at java.base/java.lang.Integer.parseInt(Integer.java:770), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82), at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512), at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181), at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126), at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183), at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122), at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:51,273 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter], java.lang.NumberFormatException: For input string: "UL", at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65), at java.base/java.lang.Integer.parseInt(Integer.java:652), at java.base/java.lang.Integer.parseInt(Integer.java:770), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82), at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512), at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181), at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126), at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183), at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122), at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:51,544 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:51,675 INFO || Snapshot step 7 - Skipping snapshotting of data [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:51,677 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource], 2020-10-13 08:27:51,678 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:51,680 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=dbtech, changeLsn=NULL, commitLsn=00000046:00001380:0001, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-13T08:27:51.666Z], partition={server=dbtech}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:51,681 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:27:51,681 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:51,725 INFO || CDC is enabled for table Capture instance "dbo_VatType" [sourceTableId=dbtech.dbo.VatType, changeTableId=dbtech.cdc.dbo_VatType_CT, startLsn=0000003f:00000028:0042, changeTableObjectId=683149479, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:51,726 INFO || Last position recorded in offsets is 00000046:00001380:0001(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:52,205 WARN || Cannot parse column default value '(getutcdate())' to type 'datetime2'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter], com.microsoft.sqlserver.jdbc.SQLServerException: Error converting string value 'etutcdate(' into data type datetime2 using culture ''., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262), at com.microsoft.sqlserver.jdbc.SQLServerResultSet$FetchBuffer.nextRow(SQLServerResultSet.java:5427), at com.microsoft.sqlserver.jdbc.SQLServerResultSet.fetchBufferNext(SQLServerResultSet.java:1758), at com.microsoft.sqlserver.jdbc.SQLServerResultSet.next(SQLServerResultSet.java:1016), at io.debezium.jdbc.JdbcConnection.querySingleValue(JdbcConnection.java:1299), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$19(SqlServerDefaultValueConverter.java:139), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82), at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512), at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181), at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126), at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183), at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122), at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:52,210 WARN || Cannot parse column default value '(getutcdate())' to type 'datetime2'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter], com.microsoft.sqlserver.jdbc.SQLServerException: Error converting string value 'etutcdate(' into data type datetime2 using culture ''., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262), at com.microsoft.sqlserver.jdbc.SQLServerResultSet$FetchBuffer.nextRow(SQLServerResultSet.java:5427), at com.microsoft.sqlserver.jdbc.SQLServerResultSet.fetchBufferNext(SQLServerResultSet.java:1758), at com.microsoft.sqlserver.jdbc.SQLServerResultSet.next(SQLServerResultSet.java:1016), at io.debezium.jdbc.JdbcConnection.querySingleValue(JdbcConnection.java:1299), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$19(SqlServerDefaultValueConverter.java:139), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82), at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512), at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181), at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126), at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183), at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122), at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:52,213 WARN || Cannot parse column default value '(getutcdate())' to type 'datetime2'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter], com.microsoft.sqlserver.jdbc.SQLServerException: Error converting string value 'etutcdate(' into data type datetime2 using culture ''., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262), at com.microsoft.sqlserver.jdbc.SQLServerResultSet$FetchBuffer.nextRow(SQLServerResultSet.java:5427), at com.microsoft.sqlserver.jdbc.SQLServerResultSet.fetchBufferNext(SQLServerResultSet.java:1758), at com.microsoft.sqlserver.jdbc.SQLServerResultSet.next(SQLServerResultSet.java:1016), at io.debezium.jdbc.JdbcConnection.querySingleValue(JdbcConnection.java:1299), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$19(SqlServerDefaultValueConverter.java:139), at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82), at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512), at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181), at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126), at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183), at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122), at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:27:52,336 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:52,363 INFO || Snapshot step 7 - Skipping snapshotting of data [io.debezium.relational.RelationalSnapshotChangeEventSource], 2020-10-13 08:27:52,365 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource], 2020-10-13 08:27:52,366 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource], 2020-10-13 08:27:52,367 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=inv, changeLsn=NULL, commitLsn=00000030:00000d08:0001, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-13T08:27:52.358Z], partition={server=inv}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:52,367 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:27:52,367 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:27:52,385 INFO || Last position recorded in offsets is 00000030:00000d08:0001(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource], 2020-10-13 08:27:53,711 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,712 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,714 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,715 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,755 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Finished commitOffsets successfully in 41 ms [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,755 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,757 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,758 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,758 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:53,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Finished commitOffsets successfully in 13 ms [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,714 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,714 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,756 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,756 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,758 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,758 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,772 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:27:58,772 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,714 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,714 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,756 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,756 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,758 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,758 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,772 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:03,773 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,715 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,715 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,757 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,757 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,758 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,758 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,773 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:08,773 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,715 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,715 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,757 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,757 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,759 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,759 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,774 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:13,774 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,716 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,716 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,758 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,758 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,760 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,760 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,775 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:18,775 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,717 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,717 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,759 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,759 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,761 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,761 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,776 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:23,777 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,718 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,719 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,760 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,761 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,761 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,762 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,778 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:28,778 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,719 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,720 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,762 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,762 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,763 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,763 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:33,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,721 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,721 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,763 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,763 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,764 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,764 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,780 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:38,781 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,722 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,722 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,764 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,764 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,764 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,764 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,781 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:43,781 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,722 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,723 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,765 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,765 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,765 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,766 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,782 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:48,782 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,723 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,724 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,766 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,766 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,766 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,766 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:53,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,724 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,725 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,767 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,767 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,767 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,767 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,784 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:28:58,784 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,725 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,726 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,768 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,768 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,768 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,769 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,785 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:03,785 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,727 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,727 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,769 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,769 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,769 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,770 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,786 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:08,786 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,728 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,728 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,770 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,770 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,770 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,770 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:13,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,728 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,729 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,771 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,771 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,771 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,771 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:18,788 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,729 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,730 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,772 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,772 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,772 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,772 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:23,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,731 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,731 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,773 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,773 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,774 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,774 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,790 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:28,790 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,731 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,732 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,774 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,775 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,775 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,775 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:33,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,732 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,733 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,776 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,776 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,776 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,776 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:38,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,733 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,734 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,776 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,777 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,777 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,777 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,793 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:43,793 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,735 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,735 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,777 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,778 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,778 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,778 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:48,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,735 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,736 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,778 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,778 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,778 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,778 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,736 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,737 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,779 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,779 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,779 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,780 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,795 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:29:58,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,738 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,738 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,780 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,780 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,780 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,781 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:03,797 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,739 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,739 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,781 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,781 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,782 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,782 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:08,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,740 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,741 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,782 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,783 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,783 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,783 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,799 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:13,799 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,741 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,742 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,783 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,783 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,784 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,784 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:18,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,743 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,743 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,784 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,784 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,785 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,785 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:23,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,744 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,744 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,785 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,786 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,786 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,786 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:28,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,745 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,745 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,786 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,786 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,786 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,786 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:33,803 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,745 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,746 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,787 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,787 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,787 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,787 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,803 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:38,804 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,746 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,747 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,787 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,788 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,788 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,788 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,804 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:43,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,747 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,747 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,788 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,788 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,788 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,788 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:48,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,748 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,748 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,788 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,788 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,788 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,789 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:53,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,748 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,748 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,789 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,789 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,789 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,789 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:30:58,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,749 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,749 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,789 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,789 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,789 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,789 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:03,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,749 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,749 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,790 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,790 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,790 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,790 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:08,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,749 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,750 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,790 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,790 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,790 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,790 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:13,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,750 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,750 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,790 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,791 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,791 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,791 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:18,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,750 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,751 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,791 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,791 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,791 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,791 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:23,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,751 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,751 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,791 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,791 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,792 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,792 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:28,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,752 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,752 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,792 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,792 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,792 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,792 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:33,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,752 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,752 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,792 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,792 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,792 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,793 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:38,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,753 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,753 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,793 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,793 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,793 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,793 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:43,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,753 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,753 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,793 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,793 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,793 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,793 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:48,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,754 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,754 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:53,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,754 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,755 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,794 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,794 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,794 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,795 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,811 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:31:58,811 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,755 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,756 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,795 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,795 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,795 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,795 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:03,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,756 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,756 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,795 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,795 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,795 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,796 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:08,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,756 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,757 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,796 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,796 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,797 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,797 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:13,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,757 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,757 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,797 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,797 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,797 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,797 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:18,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,758 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,758 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,797 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,797 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,798 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,798 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:23,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,758 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,759 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,798 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,798 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,798 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,798 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:28,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,759 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,760 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,798 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,799 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,799 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,799 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:33,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,760 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,760 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,799 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,799 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,799 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,799 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:38,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,761 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,761 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,800 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,800 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,800 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,800 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:43,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,762 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,762 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,800 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,801 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,801 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,801 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:48,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,763 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,763 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,801 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,801 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,802 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,802 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:53,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,763 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,763 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,802 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,802 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,802 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,802 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:32:58,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,764 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,764 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,802 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,802 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,803 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,803 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:03,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,764 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,764 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,803 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,803 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,804 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,804 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,820 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:08,820 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,765 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,765 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,804 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,804 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,804 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,804 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:13,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,765 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,765 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,804 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,804 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,804 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,804 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:18,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,765 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,766 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,804 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,805 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,805 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,805 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:23,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,766 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,766 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,805 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,805 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,805 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,806 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:28,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,767 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,767 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,806 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,806 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,806 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,806 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,823 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:33,823 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,767 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,768 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,806 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,806 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,806 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,806 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,823 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:38,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,768 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,768 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,807 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,807 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,807 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,807 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:43,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,768 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,768 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,807 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,807 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,807 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,807 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:48,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,768 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,769 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,807 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,807 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,807 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,808 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:53,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,769 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,769 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,808 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,808 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,808 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,808 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:33:58,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,770 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,770 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,808 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,808 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,808 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,808 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:03,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,770 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,770 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,809 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,809 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,809 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,809 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:08,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,770 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,809 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,809 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,809 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,809 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:13,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,809 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,809 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,809 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,809 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:18,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,810 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,810 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,810 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,810 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:23,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,771 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,772 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,810 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,810 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,811 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,811 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:28,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,772 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,773 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,811 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,811 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,812 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,812 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:33,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,773 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,773 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,812 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,812 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,812 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,812 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:38,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,774 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,774 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,812 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,812 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,812 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,812 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:43,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,774 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,775 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,813 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,813 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,813 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,813 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:48,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,775 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,775 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,813 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,813 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,814 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,814 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:53,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,777 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,777 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,814 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,814 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,815 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,815 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:34:58,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,778 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,778 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,815 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,815 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,815 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,815 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:03,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,815 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,815 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,815 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,816 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:08,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,816 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,816 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,816 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,816 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:13,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,779 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,816 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,816 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,816 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,816 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:18,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,780 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,780 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,817 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,817 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,817 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,817 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:23,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,781 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,781 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,817 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,818 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,818 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,818 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:28,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,781 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,782 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,818 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,818 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,818 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,819 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:33,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,782 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,782 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,819 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,819 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,819 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,819 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:38,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,819 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,819 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,819 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,819 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:43,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,819 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,819 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,819 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,820 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:48,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,783 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,784 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,820 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,820 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,820 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,820 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:53,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,784 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,784 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,820 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,820 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,820 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,820 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:35:58,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,784 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,785 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,821 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,821 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,821 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,821 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:03,839 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,785 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,785 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,821 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,821 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,822 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,822 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,839 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:08,839 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,786 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,786 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,822 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,822 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,822 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,822 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,840 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:13,840 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,786 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,786 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,822 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,822 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,822 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,822 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,840 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:18,840 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,823 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,823 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,823 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,823 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,840 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:23,841 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,787 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,823 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,823 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,823 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,823 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,841 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:28,841 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,788 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,788 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,824 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,824 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,824 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,824 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,841 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:33,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,788 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,788 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,824 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,824 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,824 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,824 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:38,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,824 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,825 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,825 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,825 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:43,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,825 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,825 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,825 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,825 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:48,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,789 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,790 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,825 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,826 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,826 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,826 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:53,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,790 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,790 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,826 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,826 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,826 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,826 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:36:58,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,790 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,826 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,826 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,826 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,826 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:03,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,827 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,827 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:08,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,791 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,827 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,827 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:13,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,827 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,827 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:18,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,827 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,828 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,828 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,828 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:23,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,792 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,828 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,828 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,828 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,828 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:28,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,793 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,793 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,828 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,828 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,828 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,828 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:33,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,793 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,793 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,829 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,829 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,829 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,829 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:38,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,829 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,829 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,829 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,829 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:43,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,829 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,830 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,830 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,830 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:48,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,794 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,830 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,830 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,830 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,830 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:53,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,795 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,795 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,830 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,830 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,830 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,830 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:37:58,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,795 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,795 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,830 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,831 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,831 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,831 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:03,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,831 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,831 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,831 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,831 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,849 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:08,849 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,831 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,831 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,832 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,832 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,849 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:13,849 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,796 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,797 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,832 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,832 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,832 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,832 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,850 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:18,850 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,797 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,797 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,832 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,832 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,832 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,832 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,850 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:23,850 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,832 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,832 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,833 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,833 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,850 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:28,850 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,833 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,833 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,833 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,833 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,851 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:33,851 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,798 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,799 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,833 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,833 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,833 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,833 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,851 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:38,851 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,799 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,799 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,834 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,834 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,834 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,834 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,851 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:43,852 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,834 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,834 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,834 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,834 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,852 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:48,852 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,835 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,835 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,835 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,835 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,852 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:53,852 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,800 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,835 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,835 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,835 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,835 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,853 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:38:58,853 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,835 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,835 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,835 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,835 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,853 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:03,853 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,836 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,836 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,836 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,836 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,853 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:08,853 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,801 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,836 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,836 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,836 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,836 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,854 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:13,854 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,836 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,836 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,836 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,837 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,854 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:18,854 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,802 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,837 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,837 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,837 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,837 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,855 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:23,855 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,803 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,803 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,837 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,837 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,837 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,837 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,855 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:28,855 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,803 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,804 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,837 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,838 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,838 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,838 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,855 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:33,855 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,804 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,804 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,838 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,838 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,838 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,838 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,856 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:38,856 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,804 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,838 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,838 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,839 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,839 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,856 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:43,856 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,839 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,839 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,839 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,839 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,857 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:48,857 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,805 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,839 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,839 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,839 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,840 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,857 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:53,857 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,840 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,840 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,840 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,840 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,857 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:39:58,857 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,806 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,840 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,840 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,840 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,840 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,858 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:03,858 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,840 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,840 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,841 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,841 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,858 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:08,858 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,807 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,841 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,841 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,841 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,841 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,858 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:13,858 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,841 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,841 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,841 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,841 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,859 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:18,859 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,808 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,841 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,841 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,842 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,842 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,859 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:23,859 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,842 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,842 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,842 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,842 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,859 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:28,859 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,809 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,842 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,842 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,842 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,842 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,860 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:33,860 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,842 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,843 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,843 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,843 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,860 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:38,860 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,810 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,811 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,843 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,843 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,843 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,843 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,861 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:43,861 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,811 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,843 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,843 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,843 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,843 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,861 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:48,861 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,844 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,844 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,844 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,844 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,861 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:53,861 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,812 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,844 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,844 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,844 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,844 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,862 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:40:58,862 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,844 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,844 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,844 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,844 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,862 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:03,862 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,845 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,845 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,845 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,845 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,862 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:08,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,813 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,845 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,845 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,845 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,845 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:13,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,845 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,845 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,845 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:18,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,846 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,846 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:23,863 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,814 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,846 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,846 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:28,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,846 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,846 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,846 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:33,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,847 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,847 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,847 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,847 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:38,865 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,815 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,847 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,847 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,847 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,847 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,865 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:43,865 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,848 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,848 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,848 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,848 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,865 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:48,866 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,816 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,848 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,848 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,848 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,848 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,866 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:53,866 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,848 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,848 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,848 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,849 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,866 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:41:58,866 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,849 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,849 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,849 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,849 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:03,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,817 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,849 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,849 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,849 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,849 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:08,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,850 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,850 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,850 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,850 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:13,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,850 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,850 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,850 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,850 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:18,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,818 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,851 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,851 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:23,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,851 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,851 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:28,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,851 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,851 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:33,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,819 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,852 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,852 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,852 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,852 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:38,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,820 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,820 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,852 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,852 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,852 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,852 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:43,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,820 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,820 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,852 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,853 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,853 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,853 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:48,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,370 ERROR || Producer failure [io.debezium.pipeline.ErrorHandler], com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:42:53,370 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:42:53,371 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:42:53,460 ERROR || Producer failure [io.debezium.pipeline.ErrorHandler], com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:42:53,461 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:42:53,461 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:42:53,509 ERROR || Producer failure [io.debezium.pipeline.ErrorHandler], com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:42:53,510 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:42:53,510 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:42:53,667 WARN || Going to restart connector after 10 sec. after a retriable exception [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:53,671 INFO || [Producer clientId=inv-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:42:53,673 WARN || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} failed to poll records from SourceTask. Will retry operation. [org.apache.kafka.connect.runtime.WorkerSourceTask], org.apache.kafka.connect.errors.RetriableException: An exception occurred in the change event producer. This connector will be restarted., at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:38), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:283), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), ... 7 more, 2020-10-13 08:42:53,674 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:53,683 ERROR || Producer failure [io.debezium.pipeline.ErrorHandler], com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), 2020-10-13 08:42:53,683 INFO || Finished streaming [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:42:53,684 INFO || Connected metrics set to 'false' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics], 2020-10-13 08:42:53,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,853 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,853 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,853 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,853 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:53,932 WARN || Going to restart connector after 10 sec. after a retriable exception [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:53,934 INFO || [Producer clientId=proc-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:42:53,936 WARN || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} failed to poll records from SourceTask. Will retry operation. [org.apache.kafka.connect.runtime.WorkerSourceTask], org.apache.kafka.connect.errors.RetriableException: An exception occurred in the change event producer. This connector will be restarted., at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:38), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:283), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), ... 7 more, 2020-10-13 08:42:53,936 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:53,992 WARN || Going to restart connector after 10 sec. after a retriable exception [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:53,997 INFO || [Producer clientId=dbtech-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:42:53,998 WARN || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} failed to poll records from SourceTask. Will retry operation. [org.apache.kafka.connect.runtime.WorkerSourceTask], org.apache.kafka.connect.errors.RetriableException: An exception occurred in the change event producer. This connector will be restarted., at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:38), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:283), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), ... 7 more, 2020-10-13 08:42:53,999 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:54,169 WARN || Going to restart connector after 10 sec. after a retriable exception [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:54,172 INFO || [Producer clientId=dbtech-dbhistory] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:42:54,174 WARN || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} failed to poll records from SourceTask. Will retry operation. [org.apache.kafka.connect.runtime.WorkerSourceTask], org.apache.kafka.connect.errors.RetriableException: An exception occurred in the change event producer. This connector will be restarted., at io.debezium.pipeline.ErrorHandler.setProducerThrowable(ErrorHandler.java:38), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:283), at io.debezium.pipeline.ChangeEventSourceCoordinator.streamEvents(ChangeEventSourceCoordinator.java:140), at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:113), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: SQL Server did not return a response. The connection has been closed., at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2892), at com.microsoft.sqlserver.jdbc.SQLServerConnection.terminate(SQLServerConnection.java:2881), at com.microsoft.sqlserver.jdbc.TDSReader.readPacket(IOBuffer.java:6425), at com.microsoft.sqlserver.jdbc.TDSCommand.startResponse(IOBuffer.java:7579), at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:866), at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:768), at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7194), at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:2935), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:248), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:223), at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQuery(SQLServerStatement.java:693), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:623), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.getMaxLsn(SqlServerConnection.java:149), at io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource.execute(SqlServerStreamingChangeEventSource.java:128), ... 7 more, 2020-10-13 08:42:54,175 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:55,676 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:55,937 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:56,000 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:56,175 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:57,676 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:57,937 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:58,000 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:58,175 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:58,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,853 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,854 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,854 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,854 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:58,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:42:59,677 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:42:59,937 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:00,000 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:00,176 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:01,677 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:01,938 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:02,001 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:02,176 INFO || Awaiting end of restart backoff period after a retriable error [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,678 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.inv [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || table.whitelist = dbo.InvoiceData,dbo.InvoiceDataOrder,dbo.InvoiceDataOrder_Order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,678 INFO || database.dbname = inv [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || database.server.name = inv [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || name = sysint-sqlserver-inv-runonly-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,679 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,679 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,679 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,679 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,679 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,679 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,679 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,821 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,854 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,854 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,854 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,854 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,854 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:03,938 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,938 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.proc [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || table.whitelist = dbo.ImpostaPagato_Configurazione [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.dbname = proc [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.server.name = proc [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || name = sysint-sqlserver-proc-runinit-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:03,939 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,939 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,940 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,940 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,940 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:03,940 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,001 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,001 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,001 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,001 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,001 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,001 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,002 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || table.whitelist = dbo.VatType,dbo.Registry [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.dbname = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.server.name = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || name = sysint-sqlserver-dbtech-runinit-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,002 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,003 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,003 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,003 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,003 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,003 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,003 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,003 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,003 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,003 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,177 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || table.whitelist = dbo.tab1,dbo.tab2 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.dbname = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.server.name = dbtech [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || name = sysint-sqlserver-dbtech-runonly-connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,177 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:04,178 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,178 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,178 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,178 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,178 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,178 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:04,178 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration], 2020-10-13 08:43:08,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,822 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:08,854 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,854 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,854 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:08,854 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,854 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,855 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:08,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:08,872 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:13,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,822 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,822 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:13,855 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,855 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,855 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:13,855 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,855 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,855 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:13,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:13,872 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:17,969 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:17,969 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:17,969 ERROR || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask], java.lang.RuntimeException: Couldn't obtain database name, at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:474), at io.debezium.connector.sqlserver.SqlServerConnection.(SqlServerConnection.java:117), at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:75), at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106), at io.debezium.connector.common.BaseSourceTask.startIfNeededAndPossible(BaseSourceTask.java:161), at io.debezium.connector.common.BaseSourceTask.poll(BaseSourceTask.java:124), at org.apache.kafka.connect.runtime.WorkerSourceTask.poll(WorkerSourceTask.java:289), at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:256), at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185), at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host sqlserver, port 1433 has failed. Error: "sqlserver. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall."., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDriverError(SQLServerException.java:234), at com.microsoft.sqlserver.jdbc.SQLServerException.ConvertConnectExceptionToSQLServerException(SQLServerException.java:285), at com.microsoft.sqlserver.jdbc.SocketFinder.findSocket(IOBuffer.java:2431), at com.microsoft.sqlserver.jdbc.TDSChannel.open(IOBuffer.java:656), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:2440), at com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:2103), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:1950), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1162), at com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:735), at io.debezium.jdbc.JdbcConnection.lambda$patternBasedFactory$1(JdbcConnection.java:222), at io.debezium.jdbc.JdbcConnection$ConnectionFactoryDecorator.connect(JdbcConnection.java:107), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:852), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:847), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:618), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:469), ... 14 more, 2020-10-13 08:43:17,969 ERROR || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask], 2020-10-13 08:43:17,969 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:17,970 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=inv': debezium.sql_server:type=connector-metrics,context=snapshot,server=inv [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:43:17,970 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=inv': debezium.sql_server:type=connector-metrics,context=streaming,server=inv [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:43:17,970 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=inv': debezium.sql_server:type=connector-metrics,context=schema-history,server=inv [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:43:17,970 INFO || [Producer clientId=connector-producer-sysint-sqlserver-inv-runonly-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:43:18,084 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,084 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,085 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,085 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,085 ERROR || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask], java.lang.RuntimeException: Couldn't obtain database name, at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:474), at io.debezium.connector.sqlserver.SqlServerConnection.(SqlServerConnection.java:117), at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:75), at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106), at io.debezium.connector.common.BaseSourceTask.startIfNeededAndPossible(BaseSourceTask.java:161), at io.debezium.connector.common.BaseSourceTask.poll(BaseSourceTask.java:124), at org.apache.kafka.connect.runtime.WorkerSourceTask.poll(WorkerSourceTask.java:289), at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:256), at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185), at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host sqlserver, port 1433 has failed. Error: "sqlserver. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall."., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDriverError(SQLServerException.java:234), at com.microsoft.sqlserver.jdbc.SQLServerException.ConvertConnectExceptionToSQLServerException(SQLServerException.java:285), at com.microsoft.sqlserver.jdbc.SocketFinder.findSocket(IOBuffer.java:2431), at com.microsoft.sqlserver.jdbc.TDSChannel.open(IOBuffer.java:656), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:2440), at com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:2103), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:1950), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1162), at com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:735), at io.debezium.jdbc.JdbcConnection.lambda$patternBasedFactory$1(JdbcConnection.java:222), at io.debezium.jdbc.JdbcConnection$ConnectionFactoryDecorator.connect(JdbcConnection.java:107), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:852), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:847), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:618), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:469), ... 14 more, 2020-10-13 08:43:18,085 ERROR || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask], 2020-10-13 08:43:18,085 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:18,085 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=proc': debezium.sql_server:type=connector-metrics,context=snapshot,server=proc [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:43:18,085 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=proc': debezium.sql_server:type=connector-metrics,context=streaming,server=proc [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:43:18,086 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=proc': debezium.sql_server:type=connector-metrics,context=schema-history,server=proc [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:43:18,086 INFO || [Producer clientId=connector-producer-sysint-sqlserver-proc-runinit-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:43:18,085 ERROR || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask], java.lang.RuntimeException: Couldn't obtain database name, at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:474), at io.debezium.connector.sqlserver.SqlServerConnection.(SqlServerConnection.java:117), at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:75), at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106), at io.debezium.connector.common.BaseSourceTask.startIfNeededAndPossible(BaseSourceTask.java:161), at io.debezium.connector.common.BaseSourceTask.poll(BaseSourceTask.java:124), at org.apache.kafka.connect.runtime.WorkerSourceTask.poll(WorkerSourceTask.java:289), at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:256), at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185), at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host sqlserver, port 1433 has failed. Error: "sqlserver. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall."., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDriverError(SQLServerException.java:234), at com.microsoft.sqlserver.jdbc.SQLServerException.ConvertConnectExceptionToSQLServerException(SQLServerException.java:285), at com.microsoft.sqlserver.jdbc.SocketFinder.findSocket(IOBuffer.java:2431), at com.microsoft.sqlserver.jdbc.TDSChannel.open(IOBuffer.java:656), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:2440), at com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:2103), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:1950), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1162), at com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:735), at io.debezium.jdbc.JdbcConnection.lambda$patternBasedFactory$1(JdbcConnection.java:222), at io.debezium.jdbc.JdbcConnection$ConnectionFactoryDecorator.connect(JdbcConnection.java:107), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:852), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:847), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:618), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:469), ... 14 more, 2020-10-13 08:43:18,086 ERROR || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask], 2020-10-13 08:43:18,086 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:18,086 INFO || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runinit-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:43:18,823 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,823 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,855 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,855 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,855 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,855 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,873 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:18,873 WARN || Couldn't commit processed log positions with the source database due to a concurrent connector shutdown or restart [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:19,085 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:19,085 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:19,085 ERROR || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask], java.lang.RuntimeException: Couldn't obtain database name, at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:474), at io.debezium.connector.sqlserver.SqlServerConnection.(SqlServerConnection.java:117), at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:75), at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106), at io.debezium.connector.common.BaseSourceTask.startIfNeededAndPossible(BaseSourceTask.java:161), at io.debezium.connector.common.BaseSourceTask.poll(BaseSourceTask.java:124), at org.apache.kafka.connect.runtime.WorkerSourceTask.poll(WorkerSourceTask.java:289), at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:256), at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185), at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235), at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515), at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264), at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128), at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628), at java.base/java.lang.Thread.run(Thread.java:834), Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: The TCP/IP connection to the host sqlserver, port 1433 has failed. Error: "sqlserver. Verify the connection properties. Make sure that an instance of SQL Server is running on the host and accepting TCP/IP connections at the port. Make sure that TCP connections to the port are not blocked by a firewall."., at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDriverError(SQLServerException.java:234), at com.microsoft.sqlserver.jdbc.SQLServerException.ConvertConnectExceptionToSQLServerException(SQLServerException.java:285), at com.microsoft.sqlserver.jdbc.SocketFinder.findSocket(IOBuffer.java:2431), at com.microsoft.sqlserver.jdbc.TDSChannel.open(IOBuffer.java:656), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:2440), at com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:2103), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:1950), at com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1162), at com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:735), at io.debezium.jdbc.JdbcConnection.lambda$patternBasedFactory$1(JdbcConnection.java:222), at io.debezium.jdbc.JdbcConnection$ConnectionFactoryDecorator.connect(JdbcConnection.java:107), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:852), at io.debezium.jdbc.JdbcConnection.connection(JdbcConnection.java:847), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:618), at io.debezium.jdbc.JdbcConnection.queryAndMap(JdbcConnection.java:492), at io.debezium.connector.sqlserver.SqlServerConnection.retrieveRealDatabaseName(SqlServerConnection.java:469), ... 14 more, 2020-10-13 08:43:19,086 ERROR || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask], 2020-10-13 08:43:19,086 INFO || Stopping down connector [io.debezium.connector.common.BaseSourceTask], 2020-10-13 08:43:19,086 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=dbtech': debezium.sql_server:type=connector-metrics,context=snapshot,server=dbtech [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:43:19,086 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=dbtech': debezium.sql_server:type=connector-metrics,context=streaming,server=dbtech [io.debezium.pipeline.ChangeEventSourceCoordinator], 2020-10-13 08:43:19,086 WARN || Unable to unregister the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=dbtech': debezium.sql_server:type=connector-metrics,context=schema-history,server=dbtech [io.debezium.relational.history.DatabaseHistoryMetrics], 2020-10-13 08:43:19,086 INFO || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runonly-connector-0] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer], 2020-10-13 08:43:23,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,824 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,856 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,856 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,856 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,856 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,873 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:23,873 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,856 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,856 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,873 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:28,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,825 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,857 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,857 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:33,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,857 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,857 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:38,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,857 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,857 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,857 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:43,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,826 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,858 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,858 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,858 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,858 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:48,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,858 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,858 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,858 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,858 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:53,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,827 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,859 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,859 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,859 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,859 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:43:58,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,859 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,859 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,859 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,859 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:03,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,828 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,860 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,860 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,860 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,860 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:08,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,860 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,860 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,860 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,860 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:13,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,860 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,860 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,860 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,860 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:18,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,829 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,861 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,861 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,861 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,861 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:23,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,861 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,861 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,861 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,861 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:28,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,830 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,861 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,861 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,861 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,861 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:33,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,862 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,862 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,862 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,862 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:38,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,862 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,862 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,862 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,862 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,880 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:43,880 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,862 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,862 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,862 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,862 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,880 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:48,880 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,831 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:53,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:44:58,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,832 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:03,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,881 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:08,882 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,882 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:13,882 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,882 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:18,882 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,833 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,882 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:23,883 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,883 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:28,883 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,834 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,883 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:33,883 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,883 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:38,884 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,835 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,884 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:43,884 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,884 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:48,884 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,885 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:53,885 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,836 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,885 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:45:58,885 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,885 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:03,885 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,886 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:08,886 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,837 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,886 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:13,886 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,886 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:18,886 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,887 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:23,887 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,838 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,839 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,887 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:46:28,887 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask], 2020-10-13 08:47:03,841 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,872 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,872 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,872 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,872 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,890 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:03,890 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,872 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,872 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,872 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,872 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,890 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:08,890 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,842 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,873 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,873 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,873 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,873 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,891 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:13,891 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,873 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,873 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,873 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,873 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,891 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:18,891 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,843 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,873 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,873 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,874 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,874 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,891 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:23,891 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,874 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,874 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,874 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,874 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,892 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:28,892 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,844 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,874 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,874 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,874 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,874 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,892 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:33,892 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,874 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,875 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,875 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,875 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,893 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:38,893 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,875 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,875 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,875 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,875 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,893 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:43,893 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,845 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,875 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,876 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,876 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,876 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,894 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:48,894 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,876 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,876 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,876 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,876 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,894 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:53,894 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,876 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,876 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,876 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,876 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,894 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:47:58,894 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,846 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,877 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,877 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,877 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,877 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,895 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:03,895 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,877 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,877 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,877 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,877 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,895 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:08,895 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,847 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,877 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,877 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,877 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,877 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,895 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:13,895 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,878 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,878 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,878 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,878 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,896 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:18,896 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:48:19,553 INFO || Kafka Connect stopping [org.apache.kafka.connect.runtime.Connect] 2020-10-13 08:48:19,553 INFO || Stopping REST server [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:48:19,556 INFO || Stopped http_172.18.0.48083@12b5454f{HTTP/1.1,[http/1.1]}{172.18.0.4:8083} [org.eclipse.jetty.server.AbstractConnector] 2020-10-13 08:48:19,557 INFO || node0 Stopped scavenging [org.eclipse.jetty.server.session] 2020-10-13 08:48:19,559 INFO || REST server stopped [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:48:19,559 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopping [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:48:19,559 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Stopping connectors and tasks that are still assigned to this worker. [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:48:19,560 INFO || Stopping connector sysint-sqlserver-inv-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-inv-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,560 INFO || Stopping connector sysint-sqlserver-dbtech-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Stopping task sysint-sqlserver-proc-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Stopping task sysint-sqlserver-inv-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Stopping task sysint-sqlserver-dbtech-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Stopping connector sysint-sqlserver-proc-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Stopping connector sysint-sqlserver-dbtech-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Stopping task sysint-sqlserver-dbtech-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,560 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-dbtech-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,560 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-inv-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,560 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-proc-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,560 INFO || Scheduled shutdown for WorkerConnector{id=sysint-sqlserver-dbtech-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,562 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-dbtech-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,562 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-dbtech-runonly-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,564 INFO || Completed shutdown for WorkerConnector{id=sysint-sqlserver-proc-runinit-connector} [org.apache.kafka.connect.runtime.WorkerConnector] 2020-10-13 08:48:19,568 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Member connect-1-e0879b42-5f99-4d46-ac74-c892007b25eb sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:48:19,572 WARN || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Close timed out with 1 pending requests to coordinator, terminating client connections [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:48:19,574 INFO || Stopping KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:48:19,574 INFO || [Producer clientId=producer-2] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-13 08:48:19,576 INFO || Stopped KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:48:19,576 INFO || Closing KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-13 08:48:19,576 INFO || Stopping KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:48:19,577 INFO || [Producer clientId=producer-3] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-13 08:48:19,578 INFO || Stopped KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:48:19,579 INFO || Closed KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-13 08:48:19,579 INFO || Worker stopping [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,579 INFO || Stopping KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-13 08:48:19,580 INFO || Stopping KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:48:19,580 INFO || [Producer clientId=producer-1] Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2020-10-13 08:48:19,581 INFO || Stopped KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:48:19,581 INFO || Stopped KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-13 08:48:19,582 INFO || Worker stopped [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:48:19,582 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopped [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:48:19,584 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder stopped [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:48:19,584 INFO || Kafka Connect stopped [org.apache.kafka.connect.runtime.Connect] Plugins are loaded from /kafka/connect Using the following environment variables: GROUP_ID=sysint-kafka-connect CONFIG_STORAGE_TOPIC=_sysint_connect_configs OFFSET_STORAGE_TOPIC=_sysint_connect_offsets STATUS_STORAGE_TOPIC=_sysint_connect_status BOOTSTRAP_SERVERS=kafka:29093 REST_HOST_NAME=172.18.0.4 REST_PORT=8083 ADVERTISED_HOST_NAME=172.18.0.4 ADVERTISED_PORT=8083 KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter INTERNAL_KEY_CONVERTER=org.apache.kafka.connect.json.JsonConverter INTERNAL_VALUE_CONVERTER=org.apache.kafka.connect.json.JsonConverter OFFSET_FLUSH_INTERVAL_MS=5000 OFFSET_FLUSH_TIMEOUT_MS=5000 SHUTDOWN_TIMEOUT=10000 --- Setting property from CONNECT_INTERNAL_VALUE_CONVERTER: internal.value.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_VALUE_CONVERTER: value.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_REST_ADVERTISED_HOST_NAME: rest.advertised.host.name=172.18.0.4 --- Setting property from CONNECT_OFFSET_FLUSH_INTERVAL_MS: offset.flush.interval.ms=5000 --- Setting property from CONNECT_GROUP_ID: group.id=sysint-kafka-connect --- Setting property from CONNECT_BOOTSTRAP_SERVERS: bootstrap.servers=kafka:29093 --- Setting property from CONNECT_KEY_CONVERTER: key.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS: task.shutdown.graceful.timeout.ms=10000 --- Setting property from CONNECT_REST_HOST_NAME: rest.host.name=172.18.0.4 --- Setting property from CONNECT_PLUGIN_PATH: plugin.path=/kafka/connect --- Setting property from CONNECT_REST_PORT: rest.port=8083 --- Setting property from CONNECT_OFFSET_FLUSH_TIMEOUT_MS: offset.flush.timeout.ms=5000 --- Setting property from CONNECT_STATUS_STORAGE_TOPIC: status.storage.topic=_sysint_connect_status --- Setting property from CONNECT_INTERNAL_KEY_CONVERTER: internal.key.converter=org.apache.kafka.connect.json.JsonConverter --- Setting property from CONNECT_CONFIG_STORAGE_TOPIC: config.storage.topic=_sysint_connect_configs --- Setting property from CONNECT_REST_ADVERTISED_PORT: rest.advertised.port=8083 --- Setting property from CONNECT_OFFSET_STORAGE_TOPIC: offset.storage.topic=_sysint_connect_offsets 2020-10-13 08:50:57,661 INFO || WorkerInfo values: jvm.args = -Xms256M, -Xmx2G, -XX:+UseG1GC, -XX:MaxGCPauseMillis=20, -XX:InitiatingHeapOccupancyPercent=35, -XX:+ExplicitGCInvokesConcurrent, -XX:MaxInlineLevel=15, -Djava.awt.headless=true, -Dcom.sun.management.jmxremote, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Dcom.sun.management.jmxremote.port=1976, -Dkafka.logs.dir=/kafka/bin/../logs, -Dlog4j.configuration=file:/kafka/config/log4j.properties, -javaagent:/kafka/jmx_prometheus_javaagent.jar=8080:/kafka/config.yml jvm.spec = Oracle Corporation, OpenJDK 64-Bit Server VM, 11.0.8, 11.0.8+10-LTS jvm.classpath = /kafka/bin/../libs/activation-1.1.1.jar:/kafka/bin/../libs/aopalliance-repackaged-2.5.0.jar:/kafka/bin/../libs/argparse4j-0.7.0.jar:/kafka/bin/../libs/audience-annotations-0.5.0.jar:/kafka/bin/../libs/avro-1.9.2.jar:/kafka/bin/../libs/common-config-5.5.1.jar:/kafka/bin/../libs/common-utils-5.5.1.jar:/kafka/bin/../libs/commons-cli-1.4.jar:/kafka/bin/../libs/commons-lang3-3.8.1.jar:/kafka/bin/../libs/connect-api-2.6.0.jar:/kafka/bin/../libs/connect-basic-auth-extension-2.6.0.jar:/kafka/bin/../libs/connect-file-2.6.0.jar:/kafka/bin/../libs/connect-json-2.6.0.jar:/kafka/bin/../libs/connect-mirror-2.6.0.jar:/kafka/bin/../libs/connect-mirror-client-2.6.0.jar:/kafka/bin/../libs/connect-runtime-2.6.0.jar:/kafka/bin/../libs/connect-transforms-2.6.0.jar:/kafka/bin/../libs/hk2-api-2.5.0.jar:/kafka/bin/../libs/hk2-locator-2.5.0.jar:/kafka/bin/../libs/hk2-utils-2.5.0.jar:/kafka/bin/../libs/jackson-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-core-2.10.2.jar:/kafka/bin/../libs/jackson-databind-2.10.2.jar:/kafka/bin/../libs/jackson-dataformat-csv-2.10.2.jar:/kafka/bin/../libs/jackson-datatype-jdk8-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-base-2.10.2.jar:/kafka/bin/../libs/jackson-jaxrs-json-provider-2.10.2.jar:/kafka/bin/../libs/jackson-module-jaxb-annotations-2.10.2.jar:/kafka/bin/../libs/jackson-module-paranamer-2.10.2.jar:/kafka/bin/../libs/jackson-module-scala_2.12-2.10.2.jar:/kafka/bin/../libs/jakarta.activation-api-1.2.1.jar:/kafka/bin/../libs/jakarta.annotation-api-1.3.4.jar:/kafka/bin/../libs/jakarta.inject-2.5.0.jar:/kafka/bin/../libs/jakarta.ws.rs-api-2.1.5.jar:/kafka/bin/../libs/jakarta.xml.bind-api-2.3.2.jar:/kafka/bin/../libs/javassist-3.22.0-CR2.jar:/kafka/bin/../libs/javassist-3.26.0-GA.jar:/kafka/bin/../libs/javax.servlet-api-3.1.0.jar:/kafka/bin/../libs/javax.ws.rs-api-2.1.1.jar:/kafka/bin/../libs/jaxb-api-2.3.0.jar:/kafka/bin/../libs/jersey-client-2.28.jar:/kafka/bin/../libs/jersey-common-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-2.28.jar:/kafka/bin/../libs/jersey-container-servlet-core-2.28.jar:/kafka/bin/../libs/jersey-hk2-2.28.jar:/kafka/bin/../libs/jersey-media-jaxb-2.28.jar:/kafka/bin/../libs/jersey-server-2.28.jar:/kafka/bin/../libs/jetty-client-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-continuation-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-http-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-io-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-security-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-server-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlet-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-servlets-9.4.24.v20191120.jar:/kafka/bin/../libs/jetty-util-9.4.24.v20191120.jar:/kafka/bin/../libs/jopt-simple-5.0.4.jar:/kafka/bin/../libs/kafka-avro-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-clients-2.6.0.jar:/kafka/bin/../libs/kafka-connect-avro-converter-5.5.1.jar:/kafka/bin/../libs/kafka-connect-avro-data-5.5.1.jar:/kafka/bin/../libs/kafka-log4j-appender-2.6.0.jar:/kafka/bin/../libs/kafka-schema-registry-client-5.5.1.jar:/kafka/bin/../libs/kafka-schema-serializer-5.5.1.jar:/kafka/bin/../libs/kafka-streams-2.6.0.jar:/kafka/bin/../libs/kafka-streams-examples-2.6.0.jar:/kafka/bin/../libs/kafka-streams-scala_2.12-2.6.0.jar:/kafka/bin/../libs/kafka-streams-test-utils-2.6.0.jar:/kafka/bin/../libs/kafka-tools-2.6.0.jar:/kafka/bin/../libs/kafka_2.12-2.6.0.jar:/kafka/bin/../libs/log4j-1.2.17.jar:/kafka/bin/../libs/lz4-java-1.7.1.jar:/kafka/bin/../libs/maven-artifact-3.6.3.jar:/kafka/bin/../libs/metrics-core-2.2.0.jar:/kafka/bin/../libs/netty-buffer-4.1.50.Final.jar:/kafka/bin/../libs/netty-codec-4.1.50.Final.jar:/kafka/bin/../libs/netty-common-4.1.50.Final.jar:/kafka/bin/../libs/netty-handler-4.1.50.Final.jar:/kafka/bin/../libs/netty-resolver-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-epoll-4.1.50.Final.jar:/kafka/bin/../libs/netty-transport-native-unix-common-4.1.50.Final.jar:/kafka/bin/../libs/osgi-resource-locator-1.0.1.jar:/kafka/bin/../libs/paranamer-2.8.jar:/kafka/bin/../libs/plexus-utils-3.2.1.jar:/kafka/bin/../libs/reflections-0.9.12.jar:/kafka/bin/../libs/rocksdbjni-5.18.4.jar:/kafka/bin/../libs/scala-collection-compat_2.12-2.1.6.jar:/kafka/bin/../libs/scala-java8-compat_2.12-0.9.1.jar:/kafka/bin/../libs/scala-library-2.12.11.jar:/kafka/bin/../libs/scala-logging_2.12-3.9.2.jar:/kafka/bin/../libs/scala-reflect-2.12.11.jar:/kafka/bin/../libs/slf4j-api-1.7.30.jar:/kafka/bin/../libs/slf4j-log4j12-1.7.30.jar:/kafka/bin/../libs/snappy-java-1.1.7.3.jar:/kafka/bin/../libs/validation-api-2.0.1.Final.jar:/kafka/bin/../libs/zookeeper-3.5.8.jar:/kafka/bin/../libs/zookeeper-jute-3.5.8.jar:/kafka/bin/../libs/zstd-jni-1.4.4-7.jar os.spec = Linux, amd64, 4.19.76-linuxkit os.vcpus = 4 [org.apache.kafka.connect.runtime.WorkerInfo] 2020-10-13 08:50:57,665 INFO || Scanning for plugin classes. This might take a moment ... [org.apache.kafka.connect.cli.ConnectDistributed] 2020-10-13 08:50:57,675 INFO || Loading plugin from: /kafka/connect/kafka-connect-insert-uuid [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,725 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/kafka-connect-insert-uuid/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,725 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,725 INFO || Added plugin 'com.github.cjmatta.kafka.connect.smt.InsertUuid$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,725 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,725 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,725 INFO || Added plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,726 INFO || Loading plugin from: /kafka/connect/debezium-connector-mongodb [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,951 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mongodb/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Added plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:57,952 INFO || Loading plugin from: /kafka/connect/debezium-connector-mysql [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,136 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-mysql/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,136 INFO || Added plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,141 INFO || Loading plugin from: /kafka/connect/debezium-connector-sqlserver [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,226 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-sqlserver/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,226 INFO || Added plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,239 INFO || Loading plugin from: /kafka/connect/debezium-connector-postgres [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,351 INFO || Registered loader: PluginClassLoader{pluginLocation=file:/kafka/connect/debezium-connector-postgres/} [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:58,351 INFO || Added plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Registered loader: jdk.internal.loader.ClassLoaders$AppClassLoader@3d4eac69 [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,131 INFO || Added plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,132 INFO || Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.MaskField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.Cast$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.InsertField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.Flatten$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Value' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,133 INFO || Added plugin 'org.apache.kafka.connect.transforms.HoistField$Key' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,134 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,134 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,134 INFO || Added plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,134 INFO || Added plugin 'org.apache.kafka.common.config.provider.FileConfigProvider' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,134 INFO || Added plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,134 INFO || Added aliases 'MongoDbConnector' and 'MongoDb' to plugin 'io.debezium.connector.mongodb.MongoDbConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MySqlConnector' and 'MySql' to plugin 'io.debezium.connector.mysql.MySqlConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'PostgresConnector' and 'Postgres' to plugin 'io.debezium.connector.postgresql.PostgresConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'SqlServerConnector' and 'SqlServer' to plugin 'io.debezium.connector.sqlserver.SqlServerConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'FileStreamSinkConnector' and 'FileStreamSink' to plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'FileStreamSourceConnector' and 'FileStreamSource' to plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MirrorCheckpointConnector' and 'MirrorCheckpoint' to plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MirrorHeartbeatConnector' and 'MirrorHeartbeat' to plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MirrorSourceConnector' and 'MirrorSource' to plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MockConnector' and 'Mock' to plugin 'org.apache.kafka.connect.tools.MockConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MockSinkConnector' and 'MockSink' to plugin 'org.apache.kafka.connect.tools.MockSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,135 INFO || Added aliases 'MockSourceConnector' and 'MockSource' to plugin 'org.apache.kafka.connect.tools.MockSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'SchemaSourceConnector' and 'SchemaSource' to plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'VerifiableSinkConnector' and 'VerifiableSink' to plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'VerifiableSourceConnector' and 'VerifiableSource' to plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'AvroConverter' and 'Avro' to plugin 'io.confluent.connect.avro.AvroConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'CloudEventsConverter' and 'CloudEvents' to plugin 'io.debezium.converters.CloudEventsConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,136 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'ByteBufferConverter' and 'ByteBuffer' to plugin 'io.debezium.converters.ByteBufferConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,137 INFO || Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added alias 'SimpleHeaderConverter' to plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added alias 'ExtractNewDocumentState' to plugin 'io.debezium.connector.mongodb.transforms.ExtractNewDocumentState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,138 INFO || Added alias 'ByLogicalTableRouter' to plugin 'io.debezium.transforms.ByLogicalTableRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added alias 'ExtractNewRecordState' to plugin 'io.debezium.transforms.ExtractNewRecordState' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added alias 'EventRouter' to plugin 'io.debezium.transforms.outbox.EventRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added aliases 'PredicatedTransformation' and 'Predicated' to plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added alias 'Filter' to plugin 'org.apache.kafka.connect.transforms.Filter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added alias 'RegexRouter' to plugin 'org.apache.kafka.connect.transforms.RegexRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added alias 'TimestampRouter' to plugin 'org.apache.kafka.connect.transforms.TimestampRouter' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,139 INFO || Added alias 'ValueToKey' to plugin 'org.apache.kafka.connect.transforms.ValueToKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added alias 'HasHeaderKey' to plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added alias 'RecordIsTombstone' to plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added alias 'TopicNameMatches' to plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added alias 'BasicAuthSecurityRestExtension' to plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added aliases 'AllConnectorClientConfigOverridePolicy' and 'All' to plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added aliases 'NoneConnectorClientConfigOverridePolicy' and 'None' to plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,140 INFO || Added aliases 'PrincipalConnectorClientConfigOverridePolicy' and 'Principal' to plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' [org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader] 2020-10-13 08:50:59,177 INFO || DistributedConfig values: access.control.allow.methods = access.control.allow.origin = admin.listeners = null bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = config.providers = [] config.storage.replication.factor = 1 config.storage.topic = _sysint_connect_configs connect.protocol = sessioned connections.max.idle.ms = 540000 connector.client.config.override.policy = None group.id = sysint-kafka-connect header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter heartbeat.interval.ms = 3000 inter.worker.key.generation.algorithm = HmacSHA256 inter.worker.key.size = null inter.worker.key.ttl.ms = 3600000 inter.worker.signature.algorithm = HmacSHA256 inter.worker.verification.algorithms = [HmacSHA256] internal.key.converter = class org.apache.kafka.connect.json.JsonConverter internal.value.converter = class org.apache.kafka.connect.json.JsonConverter key.converter = class org.apache.kafka.connect.json.JsonConverter listeners = null metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 offset.flush.interval.ms = 5000 offset.flush.timeout.ms = 5000 offset.storage.partitions = 25 offset.storage.replication.factor = 1 offset.storage.topic = _sysint_connect_offsets plugin.path = [/kafka/connect] rebalance.timeout.ms = 60000 receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 40000 response.http.headers.config = rest.advertised.host.name = 172.18.0.4 rest.advertised.listener = null rest.advertised.port = 8083 rest.extension.classes = [] rest.host.name = 172.18.0.4 rest.port = 8083 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI scheduled.rebalance.max.delay.ms = 300000 security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.client.auth = none ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS status.storage.partitions = 5 status.storage.replication.factor = 1 status.storage.topic = _sysint_connect_status task.shutdown.graceful.timeout.ms = 10000 topic.creation.enable = true topic.tracking.allow.reset = true topic.tracking.enable = true value.converter = class org.apache.kafka.connect.json.JsonConverter worker.sync.timeout.ms = 3000 worker.unsync.backoff.ms = 300000 [org.apache.kafka.connect.runtime.distributed.DistributedConfig] 2020-10-13 08:50:59,177 INFO || Worker configuration property 'internal.key.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig] 2020-10-13 08:50:59,177 INFO || Worker configuration property 'internal.value.converter' is deprecated and may be removed in an upcoming release. The specified value 'org.apache.kafka.connect.json.JsonConverter' matches the default, so this property can be safely removed from the worker configuration. [org.apache.kafka.connect.runtime.WorkerConfig] 2020-10-13 08:50:59,179 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,180 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,241 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,242 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,242 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,243 INFO || Kafka startTimeMs: 1602579059242 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,486 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,501 INFO || Logging initialized @2308ms to org.eclipse.jetty.util.log.Slf4jLog [org.eclipse.jetty.util.log] 2020-10-13 08:50:59,536 INFO || Added connector for http://172.18.0.4:8083 [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,537 INFO || Initializing REST server [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,546 INFO || jetty-9.4.24.v20191120; built: 2019-11-20T21:37:49.771Z; git: 363d5f2df3a8a28de40604320230664b9c793c16; jvm 11.0.8+10-LTS [org.eclipse.jetty.server.Server] 2020-10-13 08:50:59,569 INFO || Started http_172.18.0.48083@1ffcf674{HTTP/1.1,[http/1.1]}{172.18.0.4:8083} [org.eclipse.jetty.server.AbstractConnector] 2020-10-13 08:50:59,570 INFO || Started @2378ms [org.eclipse.jetty.server.Server] 2020-10-13 08:50:59,588 INFO || Advertised URI: http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,588 INFO || REST server listening at http://172.18.0.4:8083/, advertising URL http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,589 INFO || Advertised URI: http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,589 INFO || REST admin endpoints at http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,589 INFO || Advertised URI: http://172.18.0.4:8083/ [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,590 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,590 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,593 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,593 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,593 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,593 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,593 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,593 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,594 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,594 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,594 INFO || Kafka startTimeMs: 1602579059594 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,605 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,614 INFO || Setting up None Policy for ConnectorClientConfigOverride. This will disallow any client configuration to be overridden [org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy] 2020-10-13 08:50:59,620 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,620 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,622 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,623 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,623 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,623 INFO || Kafka startTimeMs: 1602579059623 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,634 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,639 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,639 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,639 INFO || Kafka startTimeMs: 1602579059639 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,722 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:50:59,724 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:50:59,724 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,724 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,727 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,727 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,728 INFO || Kafka startTimeMs: 1602579059727 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,742 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,750 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,751 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,753 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,753 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,753 INFO || Kafka startTimeMs: 1602579059753 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,762 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,766 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,766 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,768 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,769 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,769 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,769 INFO || Kafka startTimeMs: 1602579059769 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,776 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,787 INFO || Creating Kafka admin client [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,788 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,789 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,790 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,790 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,790 INFO || Kafka startTimeMs: 1602579059790 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,795 INFO || Kafka cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.connect.util.ConnectUtils] 2020-10-13 08:50:59,811 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,811 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,812 INFO || Kafka startTimeMs: 1602579059811 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,814 INFO || Kafka Connect distributed worker initialization took 2149ms [org.apache.kafka.connect.cli.ConnectDistributed] 2020-10-13 08:50:59,814 INFO || Kafka Connect starting [org.apache.kafka.connect.runtime.Connect] 2020-10-13 08:50:59,815 INFO || Initializing REST resources [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,815 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder starting [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:50:59,818 INFO || Worker starting [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:50:59,818 INFO || Starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-13 08:50:59,818 INFO || Starting KafkaBasedLog with topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:50:59,819 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:50:59,821 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,822 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,822 INFO || Kafka startTimeMs: 1602579059821 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,852 INFO || Adding admin resources to main listener [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:50:59,880 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-1 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,894 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,895 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:50:59,896 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,896 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,896 INFO || Kafka startTimeMs: 1602579059895 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,902 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-1 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,919 INFO || [Producer clientId=producer-1] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:50:59,927 INFO || DefaultSessionIdManager workerName=node0 [org.eclipse.jetty.server.session] 2020-10-13 08:50:59,928 INFO || No SessionScavenger set, using defaults [org.eclipse.jetty.server.session] 2020-10-13 08:50:59,929 INFO || node0 Scavenging every 600000ms [org.eclipse.jetty.server.session] 2020-10-13 08:50:59,932 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,932 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,932 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,932 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:50:59,933 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,933 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,933 INFO || Kafka startTimeMs: 1602579059933 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:50:59,940 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:50:59,966 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_offsets-0, _sysint_connect_offsets-5, _sysint_connect_offsets-10, _sysint_connect_offsets-20, _sysint_connect_offsets-15, _sysint_connect_offsets-9, _sysint_connect_offsets-11, _sysint_connect_offsets-4, _sysint_connect_offsets-16, _sysint_connect_offsets-17, _sysint_connect_offsets-3, _sysint_connect_offsets-24, _sysint_connect_offsets-23, _sysint_connect_offsets-13, _sysint_connect_offsets-18, _sysint_connect_offsets-22, _sysint_connect_offsets-2, _sysint_connect_offsets-8, _sysint_connect_offsets-12, _sysint_connect_offsets-19, _sysint_connect_offsets-14, _sysint_connect_offsets-1, _sysint_connect_offsets-6, _sysint_connect_offsets-7, _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-13 08:50:59,969 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,969 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-5 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-10 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-20 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-15 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-9 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-11 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-16 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-17 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-24 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-23 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-13 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-18 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-22 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-8 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-12 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-19 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-14 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-6 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-7 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:50:59,972 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_offsets-21 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,000 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-24 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,001 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-18 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,001 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-16 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,001 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-22 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,001 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-20 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,001 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-9 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,001 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-7 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-13 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-11 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-5 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-23 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-17 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-15 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-21 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-19 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-10 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-8 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-14 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,002 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-12 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,003 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,003 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,003 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-6 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,003 INFO || [Consumer clientId=consumer-sysint-kafka-connect-1, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_offsets-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,045 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,045 INFO || Started KafkaBasedLog for topic _sysint_connect_offsets [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,045 INFO || Finished reading offsets topic and starting KafkaOffsetBackingStore [org.apache.kafka.connect.storage.KafkaOffsetBackingStore] 2020-10-13 08:51:00,051 INFO || Worker started [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,051 INFO || Starting KafkaBasedLog with topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,051 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,053 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,054 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,054 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,054 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,054 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,055 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,055 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,055 INFO || Kafka startTimeMs: 1602579060055 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,078 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-2 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 0 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,081 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,081 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,081 INFO || Kafka startTimeMs: 1602579060081 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,085 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-2 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,087 INFO || [Producer clientId=producer-2] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,091 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,097 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,097 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,097 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,098 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,099 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,099 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,099 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,099 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,099 INFO || Kafka startTimeMs: 1602579060099 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,102 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,106 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_status-0, _sysint_connect_status-4, _sysint_connect_status-1, _sysint_connect_status-2, _sysint_connect_status-3 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-13 08:51:00,106 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,106 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-4 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,106 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-1 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,106 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-2 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,106 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_status-3 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,116 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-2 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,116 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-1 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,116 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,116 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-4 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,116 INFO || [Consumer clientId=consumer-sysint-kafka-connect-2, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_status-3 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,156 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,156 INFO || Started KafkaBasedLog for topic _sysint_connect_status [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,157 INFO || Starting KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-13 08:51:00,157 INFO || Starting KafkaBasedLog with topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,157 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:00,159 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,159 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,159 INFO || Kafka startTimeMs: 1602579060159 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,182 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = producer-3 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,184 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,185 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,185 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,185 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,185 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,185 INFO || Kafka startTimeMs: 1602579060185 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,185 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = consumer-sysint-kafka-connect-3 client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = sysint-kafka-connect group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,188 WARN || The configuration 'rest.advertised.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'task.shutdown.graceful.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'plugin.path' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'status.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'offset.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'config.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'rest.advertised.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'status.storage.topic' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'rest.host.name' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'offset.flush.timeout.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'config.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'rest.port' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,189 WARN || The configuration 'internal.key.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,190 WARN || The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,190 WARN || The configuration 'internal.value.converter' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,190 WARN || The configuration 'offset.storage.replication.factor' was supplied but isn't a known config. [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:00,190 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,190 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,190 INFO || Kafka startTimeMs: 1602579060190 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,193 INFO || [Producer clientId=producer-3] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,196 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,203 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Subscribed to partition(s): _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-13 08:51:00,203 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Seeking to EARLIEST offset of partition _sysint_connect_configs-0 [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,211 INFO || [Consumer clientId=consumer-sysint-kafka-connect-3, groupId=sysint-kafka-connect] Resetting offset for partition _sysint_connect_configs-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:00,236 INFO || Finished reading KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,236 INFO || Started KafkaBasedLog for topic _sysint_connect_configs [org.apache.kafka.connect.util.KafkaBasedLog] 2020-10-13 08:51:00,236 INFO || Started KafkaConfigBackingStore [org.apache.kafka.connect.storage.KafkaConfigBackingStore] 2020-10-13 08:51:00,236 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Herder started [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,249 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,249 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:00,254 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Rebalance started [org.apache.kafka.connect.runtime.distributed.WorkerCoordinator] 2020-10-13 08:51:00,255 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:00,262 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:00,263 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:00,286 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Successfully joined group with generation 12 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:00,287 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Joined group at generation 12 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-c50fa031-cb7e-4e7e-b361-fdea51b513bf', leaderUrl='http://172.18.0.4:8083/', offset=20, connectorIds=[sysint-sqlserver-proc-runinit-connector, sysint-sqlserver-inv-runonly-connector, sysint-sqlserver-dbtech-runonly-connector, sysint-sqlserver-dbtech-runinit-connector], taskIds=[sysint-sqlserver-proc-runinit-connector-0, sysint-sqlserver-inv-runonly-connector-0, sysint-sqlserver-dbtech-runonly-connector-0, sysint-sqlserver-dbtech-runinit-connector-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,288 WARN || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Catching up to assignment's config offset. [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,289 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Current config state offset -1 is behind group assignment 20, reading to end of config log [org.apache.kafka.connect.runtime.distributed.DistributedHerder] Oct 13, 2020 8:51:00 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource will be ignored. Oct 13, 2020 8:51:00 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource will be ignored. Oct 13, 2020 8:51:00 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource will be ignored. Oct 13, 2020 8:51:00 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.RootResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.RootResource will be ignored. Oct 13, 2020 8:51:00 AM org.glassfish.jersey.internal.Errors logErrors WARNING: The following warnings have been detected: WARNING: The (sub)resource method listLoggers in org.apache.kafka.connect.runtime.rest.resources.LoggingResource contains empty path annotation. WARNING: The (sub)resource method createConnector in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectors in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectorPlugins in org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource contains empty path annotation. WARNING: The (sub)resource method serverInfo in org.apache.kafka.connect.runtime.rest.resources.RootResource contains empty path annotation. 2020-10-13 08:51:00,446 INFO || Started o.e.j.s.ServletContextHandler@6e041285{/,null,AVAILABLE} [org.eclipse.jetty.server.handler.ContextHandler] 2020-10-13 08:51:00,447 INFO || REST resources initialized; server is started and ready to handle requests [org.apache.kafka.connect.runtime.rest.RestServer] 2020-10-13 08:51:00,447 INFO || Kafka Connect started [org.apache.kafka.connect.runtime.Connect] 2020-10-13 08:51:00,717 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished reading to end of log and updated config snapshot, new config log offset: 20 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,718 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connectors and tasks using config offset 20 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,719 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-proc-runinit-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,719 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-dbtech-runonly-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,719 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-inv-runonly-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,720 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting connector sysint-sqlserver-dbtech-runinit-connector [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,721 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-dbtech-runinit-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,720 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-dbtech-runonly-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,720 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-inv-runonly-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,720 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Starting task sysint-sqlserver-proc-runinit-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,725 INFO || Creating connector sysint-sqlserver-dbtech-runinit-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,725 INFO || Creating task sysint-sqlserver-dbtech-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,725 INFO || Creating task sysint-sqlserver-inv-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,725 INFO || Creating task sysint-sqlserver-proc-runinit-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,725 INFO || Creating connector sysint-sqlserver-inv-runonly-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,726 INFO || Creating task sysint-sqlserver-dbtech-runonly-connector-0 [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,726 INFO || Creating connector sysint-sqlserver-proc-runinit-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,726 INFO || Creating connector sysint-sqlserver-dbtech-runonly-connector of type io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,733 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-13 08:51:00,733 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,733 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-13 08:51:00,734 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-13 08:51:00,734 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,734 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,734 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,734 INFO || ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig] 2020-10-13 08:51:00,755 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,757 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,756 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,762 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,758 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,764 INFO || Instantiated connector sysint-sqlserver-dbtech-runinit-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,767 INFO || Instantiated connector sysint-sqlserver-dbtech-runonly-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,762 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,767 INFO || Instantiated connector sysint-sqlserver-inv-runonly-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,771 INFO || Finished creating connector sysint-sqlserver-dbtech-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,772 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,772 INFO || Instantiated connector sysint-sqlserver-proc-runinit-connector with version 1.3.0.Final of type class io.debezium.connector.sqlserver.SqlServerConnector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,772 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,773 INFO || Finished creating connector sysint-sqlserver-proc-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,773 INFO || Finished creating connector sysint-sqlserver-dbtech-runinit-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,773 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-13 08:51:00,773 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-13 08:51:00,773 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-13 08:51:00,773 INFO || TaskConfig values: task.class = class io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2020-10-13 08:51:00,776 INFO || Finished creating connector sysint-sqlserver-inv-runonly-connector [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,777 INFO || Instantiated task sysint-sqlserver-inv-runonly-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,777 INFO || Instantiated task sysint-sqlserver-dbtech-runinit-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,777 INFO || Instantiated task sysint-sqlserver-proc-runinit-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,777 INFO || Instantiated task sysint-sqlserver-dbtech-runonly-connector-0 with version 1.3.0.Final of type io.debezium.connector.sqlserver.SqlServerConnectorTask [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,778 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,779 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,779 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,779 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-proc-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-proc-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,779 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,777 INFO || JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,779 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-proc-runinit-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-dbtech-runonly-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,779 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-inv-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,779 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-inv-runonly-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,780 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-inv-runonly-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,782 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,782 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,782 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,783 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,783 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,786 INFO || JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false [org.apache.kafka.connect.json.JsonConverterConfig] 2020-10-13 08:51:00,786 INFO || Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,786 INFO || Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task sysint-sqlserver-dbtech-runinit-connector-0 using the connector config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,786 INFO || Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sysint-sqlserver-dbtech-runinit-connector-0 using the worker config [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,787 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,789 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:00,796 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:00,804 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,804 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,809 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,806 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-inv-runonly-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,809 INFO || Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, com.github.cjmatta.kafka.connect.smt.InsertUuid$Value} [org.apache.kafka.connect.runtime.Worker] 2020-10-13 08:51:00,814 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-proc-runinit-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,813 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-dbtech-runinit-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,814 INFO || ProducerConfig values: acks = -1 batch.size = 16384 bootstrap.servers = [kafka:29093] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-sysint-sqlserver-dbtech-runonly-connector-0 compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,830 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,830 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,830 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,830 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,830 INFO || Kafka startTimeMs: 1602579060830 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,838 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,838 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,838 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,838 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,839 INFO || Kafka startTimeMs: 1602579060838 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,839 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,839 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,839 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,839 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,839 INFO || Kafka startTimeMs: 1602579060839 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,840 WARN || The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,840 WARN || The configuration 'metrics.context.connect.group.id' was supplied but isn't a known config. [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:00,840 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,840 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,840 INFO || Kafka startTimeMs: 1602579060840 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:00,844 INFO || [Producer clientId=connector-producer-sysint-sqlserver-inv-runonly-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,853 INFO || [Producer clientId=connector-producer-sysint-sqlserver-proc-runinit-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,853 INFO || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runonly-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,855 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Finished starting connectors and tasks [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,869 INFO || [Worker clientId=connect-1, groupId=sysint-kafka-connect] Session key updated [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2020-10-13 08:51:00,878 INFO || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runinit-connector-0] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,900 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,900 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,900 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,900 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,901 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.inv [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || table.whitelist = dbo.InvoiceData,dbo.InvoiceDataOrder,dbo.InvoiceDataOrder_Order [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.dbname = inv [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.server.name = inv [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || name = sysint-sqlserver-inv-runonly-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,902 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,903 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,903 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,904 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,904 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,905 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,897 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,905 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,904 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,903 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,906 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,906 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,905 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,907 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,907 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,908 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,906 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,908 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,908 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,907 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,908 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,908 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,908 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,909 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,908 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,910 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,910 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,910 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,910 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,910 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,910 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,910 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,910 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,910 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.proc [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,910 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,910 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || table.whitelist = dbo.ImpostaPagato_Configurazione [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || database.dbname = proc [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,911 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || database.server.name = proc [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || name = sysint-sqlserver-proc-runinit-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,912 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,913 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,913 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,913 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,913 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,914 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,914 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,914 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,914 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,914 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,914 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,910 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,914 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,916 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,916 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,916 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 INFO || Starting SqlServerConnectorTask with configuration: [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,915 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,915 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,917 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,917 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,917 INFO || connector.class = io.debezium.connector.sqlserver.SqlServerConnector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.dbtech [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || errors.log.include.messages = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || table.whitelist = dbo.tab1,dbo.tab2 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || database.dbname = dbtech [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.server.name = dbtech [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,917 INFO || transforms.unwrap.delete.handling.mode = rewrite [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || tasks.max = 1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.history.kafka.topic = it.company.sysint.data.cdc.db.history.dbtech [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms = unwrap,route,insertuuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || provide.transaction.metadata = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || table.whitelist = dbo.VatType,dbo.Registry [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || tombstones.on.delete = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.route.type = org.apache.kafka.connect.transforms.RegexRouter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.route.regex = (.*) [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || decimal.handling.mode = string [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.unwrap.drop.tombstones = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || value.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || errors.log.enable = true [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || key.converter = org.apache.kafka.connect.json.JsonConverter [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.user = sa [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.dbname = dbtech [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.history.kafka.bootstrap.servers = kafka:29093 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.insertuuid.uuid.field.name = __uuid [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.server.name = dbtech [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || snapshot.isolation.mode = read_committed [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || event.processing.failure.handling.mode = warn [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || transforms.unwrap.add.headers = version,connector,name [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.port = 1433 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || key.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || task.class = io.debezium.connector.sqlserver.SqlServerConnectorTask [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.hostname = sqlserver [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || database.password = ******** [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,918 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 INFO || name = sysint-sqlserver-dbtech-runinit-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 INFO || snapshot.mode = initial [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,919 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,919 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,919 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,919 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,919 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,919 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,918 INFO || value.converter.schemas.enable = false [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,919 INFO || name = sysint-sqlserver-dbtech-runonly-connector [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,920 INFO || transforms.unwrap.add.fields = schema,db,table,op,ts_ms,change_lsn,commit_lsn,event_serial_no,data_collection_order [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,920 INFO || retriable.restart?.connector.wait.ms = 10000 [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,920 INFO || snapshot.mode = schema_only [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,920 INFO || transforms.insertuuid.type = com.github.cjmatta.kafka.connect.smt.InsertUuid$Value [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:00,920 WARN || Using configuration property "schema.whitelist" is deprecated and will be removed in future versions. Please use "schema.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,920 WARN || Using configuration property "schema.blacklist" is deprecated and will be removed in future versions. Please use "schema.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,920 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,920 WARN || Using configuration property "table.blacklist" is deprecated and will be removed in future versions. Please use "table.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,920 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,920 WARN || Using configuration property "column.whitelist" is deprecated and will be removed in future versions. Please use "column.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:00,920 WARN || Using configuration property "column.blacklist" is deprecated and will be removed in future versions. Please use "column.exclude.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:01,253 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=dbtech-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=dbtech-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,253 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=dbtech-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=dbtech-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,253 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=proc-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=proc-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,254 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=dbtech-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,254 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=proc-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,254 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=dbtech-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,255 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = db-history-config-check [io.debezium.util.Threads] 2020-10-13 08:51:01,255 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = db-history-config-check [io.debezium.util.Threads] 2020-10-13 08:51:01,256 INFO || KafkaDatabaseHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=inv-dbhistory, bootstrap.servers=kafka:29093, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=inv-dbhistory} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,256 INFO || Requested thread factory for connector SqlServerConnector, id = proc named = db-history-config-check [io.debezium.util.Threads] 2020-10-13 08:51:01,256 INFO || KafkaDatabaseHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=kafka:29093, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=inv-dbhistory, linger.ms=0} [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,256 INFO || Requested thread factory for connector SqlServerConnector, id = inv named = db-history-config-check [io.debezium.util.Threads] 2020-10-13 08:51:01,257 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = inv-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:01,258 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = proc-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:01,258 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:01,258 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=schema-history,server=dbtech': debezium.sql_server:type=connector-metrics,context=schema-history,server=dbtech [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-13 08:51:01,260 INFO || ProducerConfig values:  1 batch.size = 32768 bootstrap.servers = [kafka:29093] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory compression.type = none connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] internal.auto.downgrade.txn.commit = false key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer [org.apache.kafka.clients.producer.ProducerConfig] 2020-10-13 08:51:01,265 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,265 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,265 INFO || Kafka startTimeMs: 1602579061265 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,268 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,268 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,268 INFO || Kafka startTimeMs: 1602579061266 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,268 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = inv-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = inv-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,270 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,270 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,271 INFO || Kafka startTimeMs: 1602579061266 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,272 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = dbtech-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,273 INFO || [Producer clientId=inv-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,275 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,275 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,275 INFO || Kafka startTimeMs: 1602579061265 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,275 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = proc-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = proc-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,275 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser] javax.management.InstanceAlreadyExistsException: kafka.producer:type=app-info,id=dbtech-dbhistory at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320) at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64) at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:435) at org.apache.kafka.clients.producer.KafkaProducer.(KafkaProducer.java:301) at io.debezium.relational.history.KafkaDatabaseHistory.start(KafkaDatabaseHistory.java:235) at io.debezium.relational.HistorizedRelationalDatabaseSchema.(HistorizedRelationalDatabaseSchema.java:40) at io.debezium.connector.sqlserver.SqlServerDatabaseSchema.(SqlServerDatabaseSchema.java:34) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:83) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:01,280 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = dbtech-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,281 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,282 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,282 INFO || Kafka startTimeMs: 1602579061281 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,279 INFO || [Producer clientId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,279 INFO || [Producer clientId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,282 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,283 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,283 INFO || Kafka startTimeMs: 1602579061279 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,284 WARN || Error registering AppInfo mbean [org.apache.kafka.common.utils.AppInfoParser] javax.management.InstanceAlreadyExistsException: kafka.consumer:type=app-info,id=dbtech-dbhistory at java.management/com.sun.jmx.mbeanserver.Repository.addMBean(Repository.java:436) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerWithRepository(DefaultMBeanServerInterceptor.java:1855) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerDynamicMBean(DefaultMBeanServerInterceptor.java:955) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerObject(DefaultMBeanServerInterceptor.java:890) at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:320) at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522) at org.apache.kafka.common.utils.AppInfoParser.registerAppInfo(AppInfoParser.java:64) at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:814) at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:667) at org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:647) at io.debezium.relational.history.KafkaDatabaseHistory.storageExists(KafkaDatabaseHistory.java:352) at io.debezium.relational.HistorizedRelationalDatabaseSchema.initializeStorage(HistorizedRelationalDatabaseSchema.java:67) at io.debezium.connector.sqlserver.SqlServerConnectorTask.start(SqlServerConnectorTask.java:84) at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:106) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:232) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:185) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:235) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:01,283 INFO || [Producer clientId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,286 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,287 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,287 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,287 INFO || Kafka startTimeMs: 1602579061287 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,288 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,294 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,294 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,294 INFO || Kafka startTimeMs: 1602579061294 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,295 INFO || [Consumer clientId=inv-dbhistory, groupId=inv-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,308 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,371 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:01,381 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-inv-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:01,382 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:01,383 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runonly-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:01,383 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:01,384 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-dbtech-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:01,385 INFO || SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.SourceConnectorConfig] 2020-10-13 08:51:01,385 INFO || EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.sqlserver.SqlServerConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = none header.converter = null key.converter = class org.apache.kafka.connect.json.JsonConverter name = sysint-sqlserver-proc-runinit-connector predicates = [] tasks.max = 1 topic.creation.groups = [] transforms = [unwrap, route, insertuuid] transforms.insertuuid.negate = false transforms.insertuuid.predicate = transforms.insertuuid.type = class com.github.cjmatta.kafka.connect.smt.InsertUuid$Value transforms.insertuuid.uuid.field.name = __uuid transforms.route.negate = false transforms.route.predicate = transforms.route.regex = (.*) transforms.route.replacement = it.company.sysint.data.cdc.tables.$1 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [schema, db, table, op, ts_ms, change_lsn, commit_lsn, event_serial_no, data_collection_order] transforms.unwrap.add.headers = [version, connector, name] transforms.unwrap.delete.handling.mode = rewrite transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class org.apache.kafka.connect.json.JsonConverter [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2020-10-13 08:51:01,554 INFO || Found previous offset SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=proc, changeLsn=NULL, commitLsn=00000029:00000438:0003, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=proc}, snapshotCompleted=true, eventSerialNo=0] [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:01,554 INFO || Found previous offset SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=dbtech, changeLsn=NULL, commitLsn=0000003f:00001038:0010, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=dbtech}, snapshotCompleted=true, eventSerialNo=0] [io.debezium.connector.common.BaseSourceTask] 2020-10-13 08:51:01,554 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = dbtech-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,554 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = proc-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = proc-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,559 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,560 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,560 INFO || Kafka startTimeMs: 1602579061559 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,561 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,561 INFO || Requested thread factory for connector SqlServerConnector, id = inv named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,561 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,561 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,561 INFO || Kafka startTimeMs: 1602579061561 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,563 INFO || Creating thread debezium-sqlserverconnector-dbtech-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,564 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,564 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,564 INFO || Creating thread debezium-sqlserverconnector-inv-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,565 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:01,566 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = proc-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = proc-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,568 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = dbtech-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,568 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:01,567 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,568 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,569 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,569 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,571 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,571 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,571 INFO || Kafka startTimeMs: 1602579061571 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,571 INFO || Creating thread debezium-sqlserverconnector-dbtech-db-history-config-check [io.debezium.util.Threads] 2020-10-13 08:51:01,573 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,573 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,573 INFO || Kafka startTimeMs: 1602579061573 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,573 INFO || Creating thread debezium-sqlserverconnector-proc-db-history-config-check [io.debezium.util.Threads] 2020-10-13 08:51:01,577 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory-topic-check connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,577 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,577 INFO || According to the connector configuration only schema will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,578 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,577 INFO || No previous offset has been found [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,578 INFO || According to the connector configuration only schema will be snapshotted [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,578 INFO || Snapshot step 1 - Preparing [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,579 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,579 INFO || Snapshot step 2 - Determining captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,580 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,580 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,580 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,580 INFO || Kafka startTimeMs: 1602579061580 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,580 INFO || AdminClientConfig values: bootstrap.servers = [kafka:29093] client.dns.lookup = use_all_dns_ips client.id = proc-dbhistory-topic-check connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000  retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'value.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'acks' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'batch.size' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'max.block.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'buffer.memory' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'key.serializer' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 WARN || The configuration 'linger.ms' was supplied but isn't a known config. [org.apache.kafka.clients.admin.AdminClientConfig] 2020-10-13 08:51:01,581 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,581 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,581 INFO || Kafka startTimeMs: 1602579061581 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,583 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,583 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,590 INFO || Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-13 08:51:01,597 INFO || Started database history recovery [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-13 08:51:01,618 INFO || Database history topic 'it.company.sysint.data.cdc.db.history.dbtech' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,649 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = dbtech-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = dbtech-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,649 INFO || ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.offset.reset = earliest bootstrap.servers = [kafka:29093] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = proc-dbhistory client.rack = connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = proc-dbhistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer [org.apache.kafka.clients.consumer.ConsumerConfig] 2020-10-13 08:51:01,653 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,654 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,654 INFO || Kafka startTimeMs: 1602579061653 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,654 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Subscribed to topic(s): it.company.sysint.data.cdc.db.history.dbtech [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-13 08:51:01,654 INFO || Kafka version: 2.6.0 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,656 INFO || Kafka commitId: 62abe01bee039651 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,656 INFO || Kafka startTimeMs: 1602579061653 [org.apache.kafka.common.utils.AppInfoParser] 2020-10-13 08:51:01,657 INFO || Database history topic 'it.company.sysint.data.cdc.db.history.proc' has correct settings [io.debezium.relational.history.KafkaDatabaseHistory] 2020-10-13 08:51:01,656 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Subscribed to topic(s): it.company.sysint.data.cdc.db.history.proc [org.apache.kafka.clients.consumer.KafkaConsumer] 2020-10-13 08:51:01,661 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,663 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Cluster ID: q37BlmbORhWBfukJ41nUiw [org.apache.kafka.clients.Metadata] 2020-10-13 08:51:01,668 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,670 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,669 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Discovered group coordinator kafka:29093 (id: 2147483646 rack: null) [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,673 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,677 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,677 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Join group failed with org.apache.kafka.common.errors.MemberIdRequiredException: The group member needs to have a valid member id before actually entering a consumer group. [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,678 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,678 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] (Re-)joining group [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,684 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Finished assignment for group at generation 1: {dbtech-dbhistory-0740c981-d315-4962-a104-c313fe68e534=Assignment(partitions=[it.company.sysint.data.cdc.db.history.dbtech-0])} [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,686 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:01,687 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Successfully joined group with generation 1 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,691 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Notifying assignor about the new Assignment(partitions=[it.company.sysint.data.cdc.db.history.dbtech-0]) [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,691 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,692 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Adding newly assigned partitions: it.company.sysint.data.cdc.db.history.dbtech-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,694 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,694 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,696 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Found no committed offset for partition it.company.sysint.data.cdc.db.history.dbtech-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,691 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Finished assignment for group at generation 1: {proc-dbhistory-8b4e8d37-f5e2-49e4-a652-6081ae8c1aca=Assignment(partitions=[it.company.sysint.data.cdc.db.history.proc-0])} [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,699 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,700 INFO || Reading structure of schema 'inv' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,700 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Resetting offset for partition it.company.sysint.data.cdc.db.history.dbtech-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:01,710 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Successfully joined group with generation 1 [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,711 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Notifying assignor about the new Assignment(partitions=[it.company.sysint.data.cdc.db.history.proc-0]) [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,712 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Adding newly assigned partitions: it.company.sysint.data.cdc.db.history.proc-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,715 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Found no committed offset for partition it.company.sysint.data.cdc.db.history.proc-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,716 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Resetting offset for partition it.company.sysint.data.cdc.db.history.proc-0 to offset 0. [org.apache.kafka.clients.consumer.internals.SubscriptionState] 2020-10-13 08:51:01,729 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Revoke previously assigned partitions it.company.sysint.data.cdc.db.history.proc-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,729 INFO || [Consumer clientId=proc-dbhistory, groupId=proc-dbhistory] Member proc-dbhistory-8b4e8d37-f5e2-49e4-a652-6081ae8c1aca sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,739 INFO || Finished database history recovery of 1 change(s) in 141 ms [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-13 08:51:01,744 WARN || Using configuration property "table.whitelist" is deprecated and will be removed in future versions. Please use "table.include.list" instead. [io.debezium.config.Configuration] 2020-10-13 08:51:01,745 INFO || Snapshot step 3 - Locking captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,745 INFO || Schema locking was disabled in connector configuration [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,745 INFO || Snapshot step 4 - Determining snapshot offset [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,747 INFO || Requested thread factory for connector SqlServerConnector, id = proc named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,747 INFO || Creating thread debezium-sqlserverconnector-proc-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,754 INFO || Snapshot step 5 - Reading structure of captured tables [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:01,754 INFO || Reading structure of schema 'dbtech' [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,754 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:01,761 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,761 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,761 INFO || A previous offset indicating a completed snapshot has been found. Neither schema nor data will be snapshotted. [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,761 INFO || Snapshot ended with SnapshotResult [status=SKIPPED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=proc, changeLsn=NULL, commitLsn=00000029:00000438:0003, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=proc}, snapshotCompleted=true, eventSerialNo=0]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,763 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-13 08:51:01,763 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,820 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Revoke previously assigned partitions it.company.sysint.data.cdc.db.history.dbtech-0 [org.apache.kafka.clients.consumer.internals.ConsumerCoordinator] 2020-10-13 08:51:01,820 INFO || [Consumer clientId=dbtech-dbhistory, groupId=dbtech-dbhistory] Member dbtech-dbhistory-0740c981-d315-4962-a104-c313fe68e534 sending LeaveGroup request to coordinator kafka:29093 (id: 2147483646 rack: null) due to the consumer is being closed [org.apache.kafka.clients.consumer.internals.AbstractCoordinator] 2020-10-13 08:51:01,826 INFO || Finished database history recovery of 36 change(s) in 235 ms [io.debezium.relational.history.DatabaseHistoryMetrics] 2020-10-13 08:51:01,836 INFO || Requested thread factory for connector SqlServerConnector, id = dbtech named = change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,837 INFO || Creating thread debezium-sqlserverconnector-dbtech-change-event-source-coordinator [io.debezium.util.Threads] 2020-10-13 08:51:01,848 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Source task finished initialization and start [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:01,849 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=snapshot,server=dbtech': debezium.sql_server:type=connector-metrics,context=snapshot,server=dbtech [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,849 WARN || Unable to register the MBean 'debezium.sql_server:type=connector-metrics,context=streaming,server=dbtech': debezium.sql_server:type=connector-metrics,context=streaming,server=dbtech [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,849 INFO || Metrics registered [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,849 INFO || Context created [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,849 INFO || A previous offset indicating a completed snapshot has been found. Neither schema nor data will be snapshotted. [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:01,849 INFO || Snapshot ended with SnapshotResult [status=SKIPPED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=dbtech, changeLsn=NULL, commitLsn=0000003f:00001038:0010, eventSerialNo=null, snapshot=FALSE, sourceTime=null], partition={server=dbtech}, snapshotCompleted=true, eventSerialNo=0]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,856 INFO || Last position recorded in offsets is 00000029:00000438:0003(NULL)[0] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:01,852 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-13 08:51:01,856 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:01,967 INFO || CDC is enabled for table Capture instance "dbo_OtherTable" [sourceTableId=dbtech.dbo.OtherTable, changeTableId=dbtech.cdc.dbo_OtherTable_CT, startLsn=00000038:000007b0:006c, changeTableObjectId=1486628339, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:01,967 INFO || CDC is enabled for table Capture instance "dbo_Payment" [sourceTableId=dbtech.dbo.Payment, changeTableId=dbtech.cdc.dbo_Payment_CT, startLsn=00000037:00000af0:00b1, changeTableObjectId=1294627655, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:01,968 INFO || Last position recorded in offsets is 0000003f:00001038:0010(NULL)[0] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:02,345 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] java.lang.NumberFormatException: For input string: "UL" at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.base/java.lang.Integer.parseInt(Integer.java:652) at java.base/java.lang.Integer.parseInt(Integer.java:770) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:02,351 WARN || Cannot parse column default value '(NULL)' to type 'int'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] java.lang.NumberFormatException: For input string: "UL" at java.base/java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) at java.base/java.lang.Integer.parseInt(Integer.java:652) at java.base/java.lang.Integer.parseInt(Integer.java:770) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$1(SqlServerDefaultValueConverter.java:115) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:02,495 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:02,606 INFO || Snapshot step 7 - Skipping snapshotting of data [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:02,613 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource] 2020-10-13 08:51:02,613 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:02,616 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=dbtech, changeLsn=NULL, commitLsn=00000047:00000908:0001, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-13T08:51:02.596Z], partition={server=dbtech}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:02,617 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-13 08:51:02,617 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:02,657 INFO || CDC is enabled for table Capture instance "dbo_VatType" [sourceTableId=dbtech.dbo.VatType, changeTableId=dbtech.cdc.dbo_VatType_CT, startLsn=0000003f:00000028:0042, changeTableObjectId=683149479, stopLsn=NULL] but the table is not whitelisted by connector [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:02,658 INFO || Last position recorded in offsets is 00000047:00000908:0001(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:03,741 WARN || Cannot parse column default value '(getutcdate())' to type 'datetime2'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] com.microsoft.sqlserver.jdbc.SQLServerException: Error converting string value 'etutcdate(' into data type datetime2 using culture ''. at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262) at com.microsoft.sqlserver.jdbc.SQLServerResultSet$FetchBuffer.nextRow(SQLServerResultSet.java:5427) at com.microsoft.sqlserver.jdbc.SQLServerResultSet.fetchBufferNext(SQLServerResultSet.java:1758) at com.microsoft.sqlserver.jdbc.SQLServerResultSet.next(SQLServerResultSet.java:1016) at io.debezium.jdbc.JdbcConnection.querySingleValue(JdbcConnection.java:1299) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$19(SqlServerDefaultValueConverter.java:139) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:03,750 WARN || Cannot parse column default value '(getutcdate())' to type 'datetime2'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] com.microsoft.sqlserver.jdbc.SQLServerException: Error converting string value 'etutcdate(' into data type datetime2 using culture ''. at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262) at com.microsoft.sqlserver.jdbc.SQLServerResultSet$FetchBuffer.nextRow(SQLServerResultSet.java:5427) at com.microsoft.sqlserver.jdbc.SQLServerResultSet.fetchBufferNext(SQLServerResultSet.java:1758) at com.microsoft.sqlserver.jdbc.SQLServerResultSet.next(SQLServerResultSet.java:1016) at io.debezium.jdbc.JdbcConnection.querySingleValue(JdbcConnection.java:1299) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$19(SqlServerDefaultValueConverter.java:139) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:03,756 WARN || Cannot parse column default value '(getutcdate())' to type 'datetime2'. [io.debezium.connector.sqlserver.SqlServerDefaultValueConverter] com.microsoft.sqlserver.jdbc.SQLServerException: Error converting string value 'etutcdate(' into data type datetime2 using culture ''. at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262) at com.microsoft.sqlserver.jdbc.SQLServerResultSet$FetchBuffer.nextRow(SQLServerResultSet.java:5427) at com.microsoft.sqlserver.jdbc.SQLServerResultSet.fetchBufferNext(SQLServerResultSet.java:1758) at com.microsoft.sqlserver.jdbc.SQLServerResultSet.next(SQLServerResultSet.java:1016) at io.debezium.jdbc.JdbcConnection.querySingleValue(JdbcConnection.java:1299) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.lambda$createDefaultValueMappers$19(SqlServerDefaultValueConverter.java:139) at io.debezium.connector.sqlserver.SqlServerDefaultValueConverter.parseDefaultValue(SqlServerDefaultValueConverter.java:82) at io.debezium.connector.sqlserver.SqlServerConnection.getDefaultValue(SqlServerConnection.java:512) at io.debezium.jdbc.JdbcConnection.readTableColumn(JdbcConnection.java:1181) at io.debezium.jdbc.JdbcConnection.readSchema(JdbcConnection.java:1126) at io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource.readTableStructure(SqlServerSnapshotChangeEventSource.java:183) at io.debezium.relational.RelationalSnapshotChangeEventSource.doExecute(RelationalSnapshotChangeEventSource.java:122) at io.debezium.pipeline.source.AbstractSnapshotChangeEventSource.execute(AbstractSnapshotChangeEventSource.java:63) at io.debezium.pipeline.ChangeEventSourceCoordinator.lambda$start$0(ChangeEventSourceCoordinator.java:105) at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:834) 2020-10-13 08:51:03,862 INFO || Snapshot step 6 - Persisting schema history [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:03,875 INFO || Snapshot step 7 - Skipping snapshotting of data [io.debezium.relational.RelationalSnapshotChangeEventSource] 2020-10-13 08:51:03,877 INFO || Snapshot - Final stage [io.debezium.pipeline.source.AbstractSnapshotChangeEventSource] 2020-10-13 08:51:03,878 INFO || Removing locking timeout [io.debezium.connector.sqlserver.SqlServerSnapshotChangeEventSource] 2020-10-13 08:51:03,879 INFO || Snapshot ended with SnapshotResult [status=COMPLETED, offset=SqlServerOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.sqlserver.Source:STRUCT}, sourceInfo=SourceInfo [serverName=inv, changeLsn=NULL, commitLsn=00000031:00000098:0001, eventSerialNo=null, snapshot=FALSE, sourceTime=2020-10-13T08:51:03.873Z], partition={server=inv}, snapshotCompleted=true, eventSerialNo=1]] [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:03,879 INFO || Connected metrics set to 'true' [io.debezium.pipeline.metrics.StreamingChangeEventSourceMetrics] 2020-10-13 08:51:03,879 INFO || Starting streaming [io.debezium.pipeline.ChangeEventSourceCoordinator] 2020-10-13 08:51:03,895 INFO || Last position recorded in offsets is 00000031:00000098:0001(NULL)[1] [io.debezium.connector.sqlserver.SqlServerStreamingChangeEventSource] 2020-10-13 08:51:04,871 WARN || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runinit-connector-0] Error while fetching metadata with correlation id 3 : {it.company.sysint.data.cdc.tables.dbtech.transaction=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-13 08:51:05,851 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,852 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Finished commitOffsets successfully in 11 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,863 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,864 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,866 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Finished commitOffsets successfully in 2 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:05,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Finished commitOffsets successfully in 3 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,863 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:10,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:15,871 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:20,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,864 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,864 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,867 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,872 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:25,873 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,873 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:30,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,865 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,865 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:35,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,868 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:40,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,866 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:45,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,866 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:50,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:51:55,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,867 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,869 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:00,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,867 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:05,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,868 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:10,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,868 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:15,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:20,877 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,078 WARN || [Producer clientId=connector-producer-sysint-sqlserver-dbtech-runonly-connector-0] Error while fetching metadata with correlation id 21 : {it.company.sysint.data.cdc.tables.dbtech.dbo.Payment=LEADER_NOT_AVAILABLE} [org.apache.kafka.clients.NetworkClient] 2020-10-13 08:52:25,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,869 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,869 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,870 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,874 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Finished commitOffsets successfully in 4 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:25,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,870 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,870 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,870 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,870 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:30,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,870 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,870 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,870 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,870 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:35,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,870 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,871 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,871 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,871 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,875 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,878 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:40,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,871 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,871 INFO || WorkerSourceTask{id=sysint-sqlserver-inv-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,871 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,871 INFO || WorkerSourceTask{id=sysint-sqlserver-proc-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,876 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runonly-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2020-10-13 08:52:45,879 INFO || WorkerSourceTask{id=sysint-sqlserver-dbtech-runinit-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask]