log4j:ERROR setFile(null,true) call failed. java.io.FileNotFoundException: /u/kafka_BI/bin/../logs/connect.log (Permission denied) at java.base/java.io.FileOutputStream.open0(Native Method) at java.base/java.io.FileOutputStream.open(FileOutputStream.java:298) at java.base/java.io.FileOutputStream.(FileOutputStream.java:237) at java.base/java.io.FileOutputStream.(FileOutputStream.java:158) at org.apache.log4j.FileAppender.setFile(FileAppender.java:272) at org.apache.log4j.FileAppender.activateOptions(FileAppender.java:162) at org.apache.log4j.DailyRollingFileAppender.activateOptions(DailyRollingFileAppender.java:208) at org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:274) at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:154) at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:97) at org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:775) at org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:711) at org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:603) at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:491) at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:552) at org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:485) at org.apache.log4j.LogManager.(LogManager.java:115) at org.slf4j.impl.Reload4jLoggerFactory.(Reload4jLoggerFactory.java:67) at org.slf4j.impl.StaticLoggerBinder.(StaticLoggerBinder.java:72) at org.slf4j.impl.StaticLoggerBinder.(StaticLoggerBinder.java:45) at org.slf4j.LoggerFactory.bind(LoggerFactory.java:150) at org.slf4j.LoggerFactory.performInitialization(LoggerFactory.java:124) at org.slf4j.LoggerFactory.getILoggerFactory(LoggerFactory.java:412) at org.slf4j.LoggerFactory.getLogger(LoggerFactory.java:357) at org.slf4j.LoggerFactory.getLogger(LoggerFactory.java:383) at org.apache.kafka.connect.cli.ConnectDistributed.(ConnectDistributed.java:61) log4j:ERROR Either File or DatePattern options are not set for appender [connectAppender]. {"description":"Redaction rule for heap byte buffers in Jetty debug logs","trigger":"HeapByteBuffer","search":"HeapByteBuffer.*","replace":"HeapByteBuffer=[BUFFER CONTENTS REDACTED]"} [2024-02-28 13:36:17,648] INFO WorkerInfo values: jvm.args = -Xms256M, -Xmx2G, -XX:+UseG1GC, -XX:MaxGCPauseMillis=20, -XX:InitiatingHeapOccupancyPercent=35, -XX:+ExplicitGCInvokesConcurrent, -XX:MaxInlineLevel=15, -Djava.awt.headless=true, -Dcom.sun.management.jmxremote=true, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Djava.net.preferIPv4Stack=true, -Dkafka.logs.dir=/u/kafka_BI/bin/../logs, -Dlog4j.configuration=file:/etc/kafka/connect-log4j.properties, -Dlog4j.config.dir=/etc/kafka jvm.spec = Red Hat, Inc., OpenJDK 64-Bit Server VM, 11.0.21, 11.0.21+9-LTS jvm.classpath = /u/kafka_BI/share/java/confluent-security/connect/google-auth-library-credentials-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/classgraph-4.8.21.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-http-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-metadata-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-common-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-haproxy-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/asm-tree-9.4.jar:/u/kafka_BI/share/java/confluent-security/connect/http2-common-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-socks-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-dataformat-cbor-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-native-kqueue-4.1.92.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-compress-1.21.jar:/u/kafka_BI/share/java/confluent-security/connect/scala-library-2.13.10.jar:/u/kafka_BI/share/java/confluent-security/connect/minimal-json-0.9.5.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-module-scala_2.13-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/confluent-licensing-new-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/confluent-serializers-new-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/javax.servlet-api-4.0.1.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka_2.13-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-stdlib-common-1.6.10.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-dataformat-properties-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-boringssl-static-2.0.60.Final-linux-x86_64.jar:/u/kafka_BI/share/java/confluent-security/connect/aws-java-sdk-s3-1.12.268.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.inject-2.6.1.jar:/u/kafka_BI/share/java/confluent-security/connect/audience-annotations-0.5.0.jar:/u/kafka_BI/share/java/confluent-security/connect/ion-java-1.0.2.jar:/u/kafka_BI/share/java/confluent-security/connect/nimbus-jose-jwt-9.24.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-collections4-4.4.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-json-serializer-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/joda-time-2.10.8.jar:/u/kafka_BI/share/java/confluent-security/connect/jbcrypt-0.4.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-raft-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/http2-server-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/proto-google-common-protos-2.5.1.jar:/u/kafka_BI/share/java/confluent-security/connect/validation-api-2.0.1.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-protobuf-provider-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-boringssl-static-2.0.60.Final-windows-x86_64.jar:/u/kafka_BI/share/java/confluent-security/connect/aws-java-sdk-core-1.12.268.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-validator-1.7.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-hk2-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-boringssl-static-2.0.60.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-sctp-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/websocket-client-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/javassist-3.25.0-GA.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-native-unix-common-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-resolver-dns-native-macos-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/share/java/confluent-security/connect/logredactor-1.0.12.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-common-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/okio-jvm-3.0.0.jar:/u/kafka_BI/share/java/confluent-security/connect/rest-authorizer-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/bc-fips-1.0.2.3.jar:/u/kafka_BI/share/java/confluent-security/connect/tink-gcpkms-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-annotations-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-dataformat-yaml-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-http2-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/asm-commons-9.4.jar:/u/kafka_BI/share/java/confluent-security/connect/snakeyaml-2.0.jar:/u/kafka_BI/share/java/confluent-security/connect/websocket-api-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-logging-1.2.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-collections-3.2.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-storage-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-rxtx-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-servlet-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/proto-google-iam-v1-1.3.1.jar:/u/kafka_BI/share/java/confluent-security/connect/api-common-2.1.5.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-handler-4.1.86.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-protobuf-serializer-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/google-api-client-1.34.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-xml-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-dns-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/api-util-2.1.2.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-udt-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/everit-json-schema-1.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/zookeeper-jute-3.6.3.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/authorizer-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-util-ajax-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-scripting-compiler-impl-embeddable-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/common-utils-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/javax-websocket-server-impl-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-native-epoll-4.1.92.Final-linux-x86_64.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-shell-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/protobuf-java-3.19.6.jar:/u/kafka_BI/share/java/confluent-security/connect/confluent-connect-security-plugin-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-stomp-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/hibernate-validator-6.1.7.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/logredactor-metrics-1.0.12.jar:/u/kafka_BI/share/java/confluent-security/connect/auto-service-annotations-1.0-rc7.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-cli-1.4.jar:/u/kafka_BI/share/java/confluent-security/connect/bctls-fips-1.0.13.jar:/u/kafka_BI/share/java/confluent-security/connect/scala-logging_2.13-3.9.4.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-boringssl-static-2.0.60.Final-osx-x86_64.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-classes-kqueue-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-digester-2.1.jar:/u/kafka_BI/share/java/confluent-security/connect/flatbuffers-java-2.0.3.jar:/u/kafka_BI/share/java/confluent-security/connect/org.apache.servicemix.bundles.antlr-2.7.7_5.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-container-servlet-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-native-kqueue-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-alpn-server-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/websocket-common-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-datatype-joda-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/rbac-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-storage-api-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-buffer-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-resolver-dns-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-resolver-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/classmate-1.3.4.jar:/u/kafka_BI/share/java/confluent-security/connect/scala-reflect-2.13.10.jar:/u/kafka_BI/share/java/confluent-security/connect/auto-value-annotations-1.9.jar:/u/kafka_BI/share/java/confluent-security/connect/google-auth-library-oauth2-http-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/google-cloud-storage-2.6.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-jndi-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-client-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/mbknor-jackson-jsonschema_2.13-1.0.39.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-redis-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-secret-registry-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-alpn-java-server-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-protobuf-types-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/share/java/confluent-security/connect/google-api-services-cloudkms-v1-rev108-1.25.0.jar:/u/kafka_BI/share/java/confluent-security/connect/agrona-1.15.2.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.xml.bind-api-2.3.3.jar:/u/kafka_BI/share/java/confluent-security/connect/handy-uri-templates-2.1.8.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-memcache-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/share/java/confluent-security/connect/hk2-utils-2.6.1.jar:/u/kafka_BI/share/java/confluent-security/connect/cel-generated-pb-0.3.5.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-module-parameter-names-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/confluent-security-plugins-common-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-mqtt-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/javax.websocket-client-api-1.0.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-handler-proxy-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/internal-rest-server-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-dataformat-csv-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-script-runtime-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/google-http-client-appengine-1.41.7.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-xml-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/google-http-client-1.41.7.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-server-common-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/scala-java8-compat_2.13-1.0.2.jar:/u/kafka_BI/share/java/confluent-security/connect/metrics-core-4.1.12.1.jar:/u/kafka_BI/share/java/confluent-security/connect/gax-2.16.0.jar:/u/kafka_BI/share/java/confluent-security/connect/opencensus-api-0.31.0.jar:/u/kafka_BI/share/java/confluent-security/connect/metrics-core-2.2.0.jar:/u/kafka_BI/share/java/confluent-security/connect/websocket-servlet-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/httpcore-4.4.13.jar:/u/kafka_BI/share/java/confluent-security/connect/wire-runtime-jvm-4.4.3.jar:/u/kafka_BI/share/java/confluent-security/connect/json-20230227.jar:/u/kafka_BI/share/java/confluent-security/connect/j2objc-annotations-1.3.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-secret-registry-client-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/guava-30.1.1-jre.jar:/u/kafka_BI/share/java/confluent-security/connect/httpclient-4.5.13.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-classes-epoll-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-module-jaxb-annotations-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/okio-3.0.0.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-lang3-3.12.0.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-transport-native-epoll-4.1.92.Final-linux-aarch_64.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-connect-json-schema-converter-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-connect-avro-data-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-scripting-common-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/share/java/confluent-security/connect/json-smart-2.4.10.jar:/u/kafka_BI/share/java/confluent-security/connect/argparse4j-0.7.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-io-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-codec-1.13.jar:/u/kafka_BI/share/java/confluent-security/connect/re2j-1.6.jar:/u/kafka_BI/share/java/confluent-security/connect/activation-1.1.1.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-schema-serializer-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/confluent-connect-secret-registry-plugin-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-boringssl-static-2.0.60.Final-linux-aarch_64.jar:/u/kafka_BI/share/java/confluent-security/connect/api-ldap-model-2.1.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-schema-converter-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/http2-hpack-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/grpc-context-1.45.1.jar:/u/kafka_BI/share/java/confluent-security/connect/avro-1.11.0.jar:/u/kafka_BI/share/java/confluent-security/connect/rest-utils-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/scala-collection-compat_2.13-2.10.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.activation-api-1.2.2.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-container-servlet-core-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/api-asn1-ber-2.1.2.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.el-3.0.4.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-stdlib-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/google-oauth-client-1.33.3.jar:/u/kafka_BI/share/java/confluent-security/connect/annotations-13.0.jar:/u/kafka_BI/share/java/confluent-security/connect/checker-qual-3.8.0.jar:/u/kafka_BI/share/java/confluent-security/connect/aws-java-sdk-sts-1.12.268.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-scripting-jvm-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-handler-ssl-ocsp-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/google-http-client-apache-v2-1.41.7.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-json-schema-serializer-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-jmx-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/jaxb-api-2.3.0.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-resolver-dns-classes-macos-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/gson-2.9.0.jar:/u/kafka_BI/share/java/confluent-security/connect/gax-httpjson-0.101.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-server-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/swagger-annotations-2.1.10.jar:/u/kafka_BI/share/java/confluent-security/connect/bcpkix-fips-1.0.6.jar:/u/kafka_BI/share/java/confluent-security/connect/jsr305-3.0.2.jar:/u/kafka_BI/share/java/confluent-security/connect/auto-common-0.10.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-continuation-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/google-cloud-core-http-2.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/google-http-client-jackson2-1.41.7.jar:/u/kafka_BI/share/java/confluent-security/connect/javax.annotation-api-1.3.2.jar:/u/kafka_BI/share/java/confluent-security/connect/javax.ws.rs-api-2.1.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jose4j-0.9.3.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-reflect-1.7.0.jar:/u/kafka_BI/share/java/confluent-security/connect/javax.websocket-api-1.0.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlinpoet-1.12.0.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-avro-serializer-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/websocket-server-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/mina-core-2.2.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-resolver-dns-native-macos-4.1.92.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/confluent-security/connect/threetenbp-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-server-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-schema-registry-client-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/wire-schema-jvm-4.4.3.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-all-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-servlets-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-client-plugins-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/auto-service-1.0-rc7.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-jaas-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/share/java/confluent-security/connect/zookeeper-3.6.3.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-jaxrs-json-provider-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-stdlib-jdk8-1.6.10.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/aopalliance-repackaged-2.6.1.jar:/u/kafka_BI/share/java/confluent-security/connect/aws-java-sdk-kms-1.12.268.jar:/u/kafka_BI/share/java/confluent-security/connect/accessors-smart-2.4.9.jar:/u/kafka_BI/share/java/confluent-security/connect/javapoet-1.13.0.jar:/u/kafka_BI/share/java/confluent-security/connect/google-http-client-gson-1.41.7.jar:/u/kafka_BI/share/java/confluent-security/connect/google-cloud-core-2.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-annotations-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-connect-avro-converter-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/auth-metadata-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-stdlib-jdk7-1.6.10.jar:/u/kafka_BI/share/java/confluent-security/connect/javax-websocket-client-impl-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/hk2-locator-2.6.1.jar:/u/kafka_BI/share/java/confluent-security/connect/security-extensions-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-webapp-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-json-schema-provider-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/api-i18n-2.1.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kotlin-scripting-compiler-embeddable-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/jopt-simple-5.0.4.jar:/u/kafka_BI/share/java/confluent-security/connect/api-asn1-api-2.1.2.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-group-coordinator-7.4.1-ce.jar:/u/kafka_BI/share/java/confluent-security/connect/opencensus-contrib-http-util-0.31.0.jar:/u/kafka_BI/share/java/confluent-security/connect/cel-generated-antlr-0.3.5.jar:/u/kafka_BI/share/java/confluent-security/connect/hk2-api-2.6.1.jar:/u/kafka_BI/share/java/confluent-security/connect/error_prone_annotations-2.5.1.jar:/u/kafka_BI/share/java/confluent-security/connect/events-schema-0.117.0.jar:/u/kafka_BI/share/java/confluent-security/connect/asm-9.4.jar:/u/kafka_BI/share/java/confluent-security/connect/tink-1.6.0.jar:/u/kafka_BI/share/java/confluent-security/connect/commons-pool2-2.11.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-jaxrs-base-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/jakarta.el-api-4.0.0.jar:/u/kafka_BI/share/java/confluent-security/connect/paranamer-2.8.jar:/u/kafka_BI/share/java/confluent-security/connect/jcip-annotations-1.0-1.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-smtp-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-boringssl-static-2.0.60.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/confluent-security/connect/kafka-connect-protobuf-converter-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/protobuf-java-util-3.19.6.jar:/u/kafka_BI/share/java/confluent-security/connect/jmespath-java-1.12.268.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-datatype-guava-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/common-config-7.4.1.jar:/u/kafka_BI/share/java/confluent-security/connect/jersey-bean-validation-2.36.jar:/u/kafka_BI/share/java/confluent-security/connect/jetty-plus-9.4.51.v20230217.jar:/u/kafka_BI/share/java/confluent-security/connect/cel-core-0.3.5.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-codec-http-4.1.92.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/jackson-core-2.14.2.jar:/u/kafka_BI/share/java/confluent-security/connect/google-api-services-storage-v1-rev20220401-1.32.1.jar:/u/kafka_BI/share/java/confluent-security/connect/broker-plugins-7.4.1-ce-test.jar:/u/kafka_BI/share/java/confluent-security/connect/jboss-logging-3.3.2.Final.jar:/u/kafka_BI/share/java/confluent-security/connect/netty-tcnative-classes-2.0.60.Final.jar:/u/kafka_BI/share/java/kafka/connect-api-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/threetenbp-1.6.0.jar:/u/kafka_BI/share/java/kafka/grpc-context-1.45.1.jar:/u/kafka_BI/share/java/kafka/tink-1.6.0.jar:/u/kafka_BI/share/java/kafka/tink-gcpkms-1.6.0.jar:/u/kafka_BI/share/java/kafka/connect-ce-logs-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/trogdor-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/connect-json-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/woodstox-core-6.5.0.jar:/u/kafka_BI/share/java/kafka/groovy-3.0.19.jar:/u/kafka_BI/share/java/kafka/zookeeper-3.6.4.jar:/u/kafka_BI/share/java/kafka/connect-runtime-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/zookeeper-jute-3.6.4.jar:/u/kafka_BI/share/java/kafka/connect-mirror-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/zstd-jni-1.5.2-1.jar:/u/kafka_BI/share/java/kafka/accessors-smart-2.4.9.jar:/u/kafka_BI/share/java/kafka/activation-1.1.1.jar:/u/kafka_BI/share/java/kafka/groovy-jsr223-3.0.19.jar:/u/kafka_BI/share/java/kafka/agrona-1.15.2.jar:/u/kafka_BI/share/java/kafka/annotations-3.0.1.jar:/u/kafka_BI/share/java/kafka/annotations-13.0.jar:/u/kafka_BI/share/java/kafka/aopalliance-repackaged-2.6.1.jar:/u/kafka_BI/share/java/kafka/api-common-2.1.5.jar:/u/kafka_BI/share/java/kafka/argparse4j-0.7.0.jar:/u/kafka_BI/share/java/kafka/hk2-api-2.6.1.jar:/u/kafka_BI/share/java/kafka/asm-9.3.jar:/u/kafka_BI/share/java/kafka/audience-annotations-0.13.0.jar:/u/kafka_BI/share/java/kafka/auth-metadata-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/authorizer-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/connect-mirror-client-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/auth-providers-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/auto-common-0.10.jar:/u/kafka_BI/share/java/kafka/auto-service-1.0-rc7.jar:/u/kafka_BI/share/java/kafka/auto-service-annotations-1.0-rc7.jar:/u/kafka_BI/share/java/kafka/auto-value-annotations-1.9.jar:/u/kafka_BI/share/java/kafka/cloudevents-kafka-2.3.0.jar:/u/kafka_BI/share/java/kafka/aws-java-sdk-core-1.12.268.jar:/u/kafka_BI/share/java/kafka/cloudevents-protobuf-2.3.0.jar:/u/kafka_BI/share/java/kafka/aws-java-sdk-kms-1.12.268.jar:/u/kafka_BI/share/java/kafka/commons-cli-1.4.jar:/u/kafka_BI/share/java/kafka/kafka.jar:/u/kafka_BI/share/java/kafka/aws-java-sdk-s3-1.12.268.jar:/u/kafka_BI/share/java/kafka/confluent-resource-names-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/aws-java-sdk-sts-1.12.268.jar:/u/kafka_BI/share/java/kafka/connect-transforms-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/azure-core-1.35.0.jar:/u/kafka_BI/share/java/kafka/azure-core-http-netty-1.12.8.jar:/u/kafka_BI/share/java/kafka/content-type-2.2.jar:/u/kafka_BI/share/java/kafka/azure-identity-1.7.3.jar:/u/kafka_BI/share/java/kafka/commons-codec-1.15.jar:/u/kafka_BI/share/java/kafka/azure-storage-blob-12.12.0.jar:/u/kafka_BI/share/java/kafka/commons-pool2-2.11.1.jar:/u/kafka_BI/share/java/kafka/azure-storage-common-12.12.0.jar:/u/kafka_BI/share/java/kafka/azure-storage-internal-avro-12.0.5.jar:/u/kafka_BI/share/java/kafka/guava-32.0.1-jre.jar:/u/kafka_BI/share/java/kafka/bc-fips-1.0.2.3.jar:/u/kafka_BI/share/java/kafka/bson-4.11.1.jar:/u/kafka_BI/share/java/kafka/bcpkix-fips-1.0.6.jar:/u/kafka_BI/share/java/kafka/bctls-fips-1.0.13.jar:/u/kafka_BI/share/java/kafka/debezium-scripting-1.3.1.jar:/u/kafka_BI/share/java/kafka/broker-plugins-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/gson-2.9.0.jar:/u/kafka_BI/share/java/kafka/cel-core-0.3.5.jar:/u/kafka_BI/share/java/kafka/confluent-audit-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/cel-generated-antlr-0.3.5.jar:/u/kafka_BI/share/java/kafka/error_prone_annotations-2.19.1.jar:/u/kafka_BI/share/java/kafka/cel-generated-pb-0.3.5.jar:/u/kafka_BI/share/java/kafka/flatbuffers-java-2.0.3.jar:/u/kafka_BI/share/java/kafka/ce-sbk_2.13-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/events-schema-0.117.0.jar:/u/kafka_BI/share/java/kafka/checker-qual-3.33.0.jar:/u/kafka_BI/share/java/kafka/failureaccess-1.0.1.jar:/u/kafka_BI/share/java/kafka/classgraph-4.8.138.jar:/u/kafka_BI/share/java/kafka/gax-httpjson-0.101.0.jar:/u/kafka_BI/share/java/kafka/client-java-14.0.0.jar:/u/kafka_BI/share/java/kafka/gax-2.16.0.jar:/u/kafka_BI/share/java/kafka/client-java-api-14.0.0.jar:/u/kafka_BI/share/java/kafka/confluent-licensing-new-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/client-java-proto-14.0.0.jar:/u/kafka_BI/share/java/kafka/cloudevents-api-2.3.0.jar:/u/kafka_BI/share/java/kafka/cloudevents-core-2.3.0.jar:/u/kafka_BI/share/java/kafka/cloudevents-json-jackson-2.3.0.jar:/u/kafka_BI/share/java/kafka/commons-collections4-4.4.jar:/u/kafka_BI/share/java/kafka/google-cloud-storage-2.6.1.jar:/u/kafka_BI/share/java/kafka/commons-compress-1.21.jar:/u/kafka_BI/share/java/kafka/google-api-client-1.34.0.jar:/u/kafka_BI/share/java/kafka/commons-io-2.11.0.jar:/u/kafka_BI/share/java/kafka/google-cloud-core-2.6.0.jar:/u/kafka_BI/share/java/kafka/commons-lang3-3.11.jar:/u/kafka_BI/share/java/kafka/commons-logging-1.2.jar:/u/kafka_BI/share/java/kafka/google-cloud-core-http-2.6.0.jar:/u/kafka_BI/share/java/kafka/commons-math3-3.6.1.jar:/u/kafka_BI/share/java/kafka/gson-fire-1.8.5.jar:/u/kafka_BI/share/java/kafka/jetty-io-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/confluent-serializers-new-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/connect-basic-auth-extension-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/connector-datapreview-extension-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/lang-tag-1.6.jar:/u/kafka_BI/share/java/kafka/jetty-http-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/google-api-services-cloudkms-v1-rev108-1.25.0.jar:/u/kafka_BI/share/java/kafka/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/google-api-services-storage-v1-rev20220401-1.32.1.jar:/u/kafka_BI/share/java/kafka/google-auth-library-credentials-1.6.0.jar:/u/kafka_BI/share/java/kafka/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/google-auth-library-oauth2-http-1.6.0.jar:/u/kafka_BI/share/java/kafka/jetty-servlet-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/google-http-client-1.41.7.jar:/u/kafka_BI/share/java/kafka/google-http-client-apache-v2-1.41.7.jar:/u/kafka_BI/share/java/kafka/google-http-client-appengine-1.41.7.jar:/u/kafka_BI/share/java/kafka/google-http-client-gson-1.41.7.jar:/u/kafka_BI/share/java/kafka/google-http-client-jackson2-1.41.7.jar:/u/kafka_BI/share/java/kafka/google-oauth-client-1.33.3.jar:/u/kafka_BI/share/java/kafka/jna-platform-5.6.0.jar:/u/kafka_BI/share/java/kafka/hk2-locator-2.6.1.jar:/u/kafka_BI/share/java/kafka/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/hk2-utils-2.6.1.jar:/u/kafka_BI/share/java/kafka/jna-5.6.0.jar:/u/kafka_BI/share/java/kafka/httpclient-4.5.14.jar:/u/kafka_BI/share/java/kafka/kafka-metadata-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/httpcore-4.4.16.jar:/u/kafka_BI/share/java/kafka/internal-rest-server-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/kafka-raft-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/ion-java-1.0.2.jar:/u/kafka_BI/share/java/kafka/j2objc-annotations-2.8.jar:/u/kafka_BI/share/java/kafka/jackson-annotations-2.14.2.jar:/u/kafka_BI/share/java/kafka/joda-time-2.9.9.jar:/u/kafka_BI/share/java/kafka/jackson-core-2.14.2.jar:/u/kafka_BI/share/java/kafka/json-smart-2.4.10.jar:/u/kafka_BI/share/java/kafka/jackson-databind-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-dataformat-cbor-2.14.2.jar:/u/kafka_BI/share/java/kafka/jetty-servlets-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/jackson-dataformat-csv-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-dataformat-properties-2.14.2.jar:/u/kafka_BI/share/java/kafka/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/jackson-dataformat-xml-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-dataformat-yaml-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/share/java/kafka/jmespath-java-1.12.268.jar:/u/kafka_BI/share/java/kafka/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-datatype-protobuf-0.9.11-jackson2.9.jar:/u/kafka_BI/share/java/kafka/jackson-jaxrs-base-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-jaxrs-json-provider-2.14.2.jar:/u/kafka_BI/share/java/kafka/jackson-module-jaxb-annotations-2.14.2.jar:/u/kafka_BI/share/java/kafka/jetty-server-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/jackson-module-scala_2.13-2.14.2.jar:/u/kafka_BI/share/java/kafka/jakarta.activation-api-1.2.2.jar:/u/kafka_BI/share/java/kafka/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/share/java/kafka/jakarta.inject-2.6.1.jar:/u/kafka_BI/share/java/kafka/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/share/java/kafka/jopt-simple-5.0.4.jar:/u/kafka_BI/share/java/kafka/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/share/java/kafka/jline-3.22.0.jar:/u/kafka_BI/share/java/kafka/jakarta.xml.bind-api-2.3.3.jar:/u/kafka_BI/share/java/kafka/jose4j-0.9.3.jar:/u/kafka_BI/share/java/kafka/javassist-3.29.2-GA.jar:/u/kafka_BI/share/java/kafka/javax.activation-api-1.2.0.jar:/u/kafka_BI/share/java/kafka/javax.annotation-api-1.3.2.jar:/u/kafka_BI/share/java/kafka/javax.servlet-api-3.1.0.jar:/u/kafka_BI/share/java/kafka/jsr305-3.0.2.jar:/u/kafka_BI/share/java/kafka/javax.ws.rs-api-2.1.1.jar:/u/kafka_BI/share/java/kafka/kafka-shell-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/jaxb-api-2.3.1.jar:/u/kafka_BI/share/java/kafka/jbcrypt-0.4.jar:/u/kafka_BI/share/java/kafka/jcip-annotations-1.0.jar:/u/kafka_BI/share/java/kafka/jcip-annotations-1.0-1.jar:/u/kafka_BI/share/java/kafka/jsr305-3.0.1.jar:/u/kafka_BI/share/java/kafka/jersey-client-2.39.1.jar:/u/kafka_BI/share/java/kafka/kafka-client-plugins-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/jersey-common-2.39.1.jar:/u/kafka_BI/share/java/kafka/jersey-container-servlet-2.39.1.jar:/u/kafka_BI/share/java/kafka/jersey-container-servlet-core-2.39.1.jar:/u/kafka_BI/share/java/kafka/jersey-hk2-2.39.1.jar:/u/kafka_BI/share/java/kafka/kafka_2.13-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/jersey-server-2.39.1.jar:/u/kafka_BI/share/java/kafka/jetty-continuation-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/jetty-util-ajax-9.4.51.v20230217.jar:/u/kafka_BI/share/java/kafka/lz4-java-1.8.0.jar:/u/kafka_BI/share/java/kafka/kafka-group-coordinator-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/kafka-log4j-appender-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/netty-transport-rxtx-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/kafka-server-common-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/opencensus-proto-0.2.0.jar:/u/kafka_BI/share/java/kafka/kafka-storage-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/kafka-storage-api-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/share/java/kafka/kafka-streams-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/kafka-streams-examples-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/paranamer-2.8.jar:/u/kafka_BI/share/java/kafka/netty-transport-native-unix-common-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/kafka-streams-scala_2.13-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/kafka-streams-test-utils-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/kafka-tools-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/protobuf-java-util-3.19.6.jar:/u/kafka_BI/share/java/kafka/kotlin-stdlib-1.6.0.jar:/u/kafka_BI/share/java/kafka/netty-transport-sctp-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/kotlin-stdlib-common-1.6.0.jar:/u/kafka_BI/share/java/kafka/kotlin-stdlib-jdk7-1.6.0.jar:/u/kafka_BI/share/java/kafka/kotlin-stdlib-jdk8-1.6.0.jar:/u/kafka_BI/share/java/kafka/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/share/java/kafka/logging-interceptor-4.9.1.jar:/u/kafka_BI/share/java/kafka/maven-artifact-3.8.4.jar:/u/kafka_BI/share/java/kafka/metrics-core-2.2.0.jar:/u/kafka_BI/share/java/kafka/plexus-utils-3.3.0.jar:/u/kafka_BI/share/java/kafka/metrics-core-4.1.12.1.jar:/u/kafka_BI/share/java/kafka/protobuf-java-3.19.6.jar:/u/kafka_BI/share/java/kafka/msal4j-1.13.3.jar:/u/kafka_BI/share/java/kafka/msal4j-persistence-extension-1.1.0.jar:/u/kafka_BI/share/java/kafka/netty-all-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-transport-udt-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-buffer-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/oauth2-oidc-sdk-9.35.jar:/u/kafka_BI/share/java/kafka/netty-codec-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-dns-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-haproxy-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/nimbus-jose-jwt-9.24.jar:/u/kafka_BI/share/java/kafka/netty-codec-http2-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/okhttp-4.9.3.jar:/u/kafka_BI/share/java/kafka/netty-codec-http-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-memcache-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-mqtt-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-redis-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-smtp-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/okio-jvm-3.0.0.jar:/u/kafka_BI/share/java/kafka/netty-codec-socks-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-stomp-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-codec-xml-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/opencensus-contrib-http-util-0.31.0.jar:/u/kafka_BI/share/java/kafka/netty-common-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/opencensus-api-0.31.0.jar:/u/kafka_BI/share/java/kafka/netty-handler-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-handler-proxy-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-handler-ssl-ocsp-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-resolver-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/opentelemetry-proto-0.19.0-alpha.jar:/u/kafka_BI/share/java/kafka/netty-resolver-dns-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-resolver-dns-classes-macos-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-resolver-dns-native-macos-4.1.92.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/kafka/netty-resolver-dns-native-macos-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-classes-2.0.60.Final.jar:/u/kafka_BI/share/java/kafka/reload4j-1.2.25.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-linux-aarch_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-native-epoll-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-linux-x86_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/rbac-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-native-epoll-4.1.92.Final-linux-aarch_64.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-osx-x86_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-classes-epoll-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-windows-x86_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-classes-kqueue-4.1.92.Final.jar:/u/kafka_BI/share/java/kafka/netty-transport-native-epoll-4.1.92.Final-linux-x86_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-native-kqueue-4.1.92.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/kafka/netty-transport-native-kqueue-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/share/java/kafka/proto-google-common-protos-2.8.3.jar:/u/kafka_BI/share/java/kafka/proto-google-iam-v1-1.3.1.jar:/u/kafka_BI/share/java/kafka/reactive-streams-1.0.4.jar:/u/kafka_BI/share/java/kafka/reactor-core-3.4.26.jar:/u/kafka_BI/share/java/kafka/reactor-netty-core-1.0.26.jar:/u/kafka_BI/share/java/kafka/reactor-netty-http-1.0.26.jar:/u/kafka_BI/share/java/kafka/reflections-0.9.12.jar:/u/kafka_BI/share/java/kafka/rest-authorizer-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/rocksdbjni-7.1.2.jar:/u/kafka_BI/share/java/kafka/scala-collection-compat_2.13-2.10.0.jar:/u/kafka_BI/share/java/kafka/scala-java8-compat_2.13-1.0.2.jar:/u/kafka_BI/share/java/kafka/scala-library-2.13.10.jar:/u/kafka_BI/share/java/kafka/scala-logging_2.13-3.9.4.jar:/u/kafka_BI/share/java/kafka/scala-reflect-2.13.10.jar:/u/kafka_BI/share/java/kafka/security-extensions-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/simpleclient_common-0.12.0.jar:/u/kafka_BI/share/java/kafka/simpleclient_httpserver-0.12.0.jar:/u/kafka_BI/share/java/kafka/simpleclient_tracer_common-0.12.0.jar:/u/kafka_BI/share/java/kafka/simpleclient_tracer_otel_agent-0.12.0.jar:/u/kafka_BI/share/java/kafka/simpleclient_tracer_otel-0.12.0.jar:/u/kafka_BI/share/java/kafka/simpleclient-0.12.0.jar:/u/kafka_BI/share/java/kafka/slf4j-api-1.7.21.jar:/u/kafka_BI/share/java/kafka/slf4j-api-1.7.30.jar:/u/kafka_BI/share/java/kafka/slf4j-api-1.7.36.jar:/u/kafka_BI/share/java/kafka/slf4j-reload4j-1.7.36.jar:/u/kafka_BI/share/java/kafka/snakeyaml-2.0.jar:/u/kafka_BI/share/java/kafka/snappy-java-1.1.10.1.jar:/u/kafka_BI/share/java/kafka/stax2-api-4.2.1.jar:/u/kafka_BI/share/java/kafka/swagger-annotations-1.6.3.jar:/u/kafka_BI/share/java/kafka/swagger-annotations-2.2.0.jar:/u/kafka_BI/share/java/kafka/swagger-core-2.2.0.jar:/u/kafka_BI/share/java/kafka/swagger-integration-2.2.0.jar:/u/kafka_BI/share/java/kafka/swagger-jaxrs2-2.2.0.jar:/u/kafka_BI/share/java/kafka/swagger-models-2.2.0.jar:/u/kafka_BI/share/java/kafka/telemetry-api-3.718.0.jar:/u/kafka_BI/share/java/kafka/telemetry-client-3.718.0.jar:/u/kafka_BI/share/java/kafka/telemetry-events-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/share/java/kafka/ifx-changestream-client-1.1.3.jar:/u/kafka_BI/share/java/kafka/jdbc-4.50.10.jar:/u/kafka_BI/share/java/kafka/debezium-storage-kafka-2.6.0.Alpha2.jar:/u/kafka_BI/share/java/kafka/debezium-core-2.6.0.Alpha2.jar:/u/kafka_BI/share/java/kafka/debezium-api-2.6.0.Alpha2.jar:/u/kafka_BI/share/java/kafka/debezium-connector-informix-2.6.0.Alpha2.jar:/u/kafka_BI/share/java/kafka/debezium-storage-file-2.6.0.Alpha2.jar:/u/kafka_BI/share/java/kafka/TimestampConverter-1.2.3-SNAPSHOT.jar:/u/kafka_BI/share/java/confluent-common/build-tools-7.4.1.jar:/u/kafka_BI/share/java/confluent-common/common-utils-7.4.1.jar:/u/kafka_BI/share/java/confluent-common/common-metrics-7.4.1.jar:/u/kafka_BI/share/java/confluent-common/slf4j-api-1.7.36.jar:/u/kafka_BI/share/java/confluent-common/common-config-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/classgraph-4.8.21.jar:/u/kafka_BI/share/java/kafka-serde-tools/tink-1.9.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/aws-java-sdk-core-1.12.182.jar:/u/kafka_BI/share/java/kafka-serde-tools/failureaccess-1.0.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-dataformat-cbor-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-compress-1.21.jar:/u/kafka_BI/share/java/kafka-serde-tools/scala-library-2.13.10.jar:/u/kafka_BI/share/java/kafka-serde-tools/cel-core-0.3.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/minimal-json-0.9.5.jar:/u/kafka_BI/share/java/kafka-serde-tools/lang-tag-1.7.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-stdlib-common-1.6.10.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-http-client-gson-1.42.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-codec-socks-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/content-type-2.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/ion-java-1.0.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-registry-client-encryption-gcp-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/reactor-netty-core-1.0.28.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-json-serializer-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-registry-client-encryption-azure-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/tink-gcpkms-1.8.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/proto-google-common-protos-2.5.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/aws-java-sdk-kms-1.12.182.jar:/u/kafka_BI/share/java/kafka-serde-tools/validation-api-2.0.1.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-protobuf-provider-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/cel-tools-0.3.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/JSONata4Java-2.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-validator-1.7.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-resolver-dns-native-macos-4.1.89.Final-osx-x86_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/logredactor-1.0.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/oauth2-oidc-sdk-10.7.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/okio-jvm-3.0.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-dataformat-xml-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-dataformat-yaml-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/azure-json-1.0.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-codec-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-registry-client-encryption-hcvault-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/antlr4-runtime-4.11.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/snakeyaml-2.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/reactor-core-3.4.27.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-streams-protobuf-serde-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-codec-1.15.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-logging-1.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-collections-3.2.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-resolver-4.1.86.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-handler-4.1.86.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-protobuf-serializer-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/everit-json-schema-1.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-transport-classes-kqueue-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-scripting-compiler-impl-embeddable-1.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-rules-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-auth-library-credentials-1.5.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/protobuf-java-3.19.6.jar:/u/kafka_BI/share/java/kafka-serde-tools/logredactor-metrics-1.0.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-transport-native-kqueue-4.1.89.Final-osx-x86_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-http-client-apache-v2-1.42.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-boringssl-static-2.0.56.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-digester-2.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/vault-java-driver-5.1.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/azure-core-http-netty-1.13.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-streams-7.4.1-ccs.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-datatype-joda-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/protoparser-4.0.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-boringssl-static-2.0.56.Final-osx-aarch_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/auto-value-annotations-1.9.jar:/u/kafka_BI/share/java/kafka-serde-tools/mbknor-jackson-jsonschema_2.13-1.0.39.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-protobuf-types-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/tink-awskms-1.8.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/handy-uri-templates-2.1.8.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-registry-client-encryption-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/grpc-context-1.27.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-transport-4.1.86.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/httpcore-4.4.15.jar:/u/kafka_BI/share/java/kafka-serde-tools/annotations-3.0.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-module-parameter-names-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/reactor-netty-http-1.0.28.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-script-runtime-1.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/cel-generated-antlr-0.3.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-resolver-dns-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/cel-generated-pb-0.3.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/wire-runtime-jvm-4.4.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/json-20230227.jar:/u/kafka_BI/share/java/kafka-serde-tools/j2objc-annotations-1.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-boringssl-static-2.0.56.Final-windows-x86_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/re2j-1.6.jar:/u/kafka_BI/share/java/kafka-serde-tools/guava-30.1.1-jre.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-handler-proxy-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/httpclient-4.5.13.jar:/u/kafka_BI/share/java/kafka-serde-tools/okio-3.0.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-lang3-3.12.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-connect-json-schema-converter-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-connect-avro-data-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-scripting-common-1.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/jmespath-java-1.12.182.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-oauth-client-1.34.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/share/java/kafka-serde-tools/json-smart-2.4.10.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-serializer-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-databind-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-common-4.1.86.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/azure-identity-1.8.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/agrona-1.17.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-codec-dns-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-http-client-1.43.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-converter-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/avro-1.11.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/slf4j-api-1.7.36.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-transport-classes-epoll-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/rocksdbjni-7.1.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/woodstox-core-6.5.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-registry-client-encryption-aws-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-stdlib-1.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/annotations-13.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/checker-qual-3.8.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-scripting-jvm-1.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-json-schema-serializer-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-transport-native-unix-common-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/gson-2.9.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/commons-text-1.10.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-transport-native-epoll-4.1.89.Final-linux-x86_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/swagger-annotations-2.1.10.jar:/u/kafka_BI/share/java/kafka-serde-tools/jsr305-3.0.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/jna-platform-5.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-reflect-1.7.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-buffer-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlinpoet-1.12.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-avro-serializer-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-boringssl-static-2.0.56.Final-osx-x86_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-api-services-cloudkms-v1-rev20221107-2.0.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/joda-time-2.12.5.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-api-client-1.35.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-schema-registry-client-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/wire-schema-jvm-4.4.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/nimbus-jose-jwt-9.30.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-boringssl-static-2.0.56.Final-linux-x86_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-streams-avro-serde-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-classes-2.0.56.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-stdlib-jdk8-1.6.10.jar:/u/kafka_BI/share/java/kafka-serde-tools/cel-jackson-0.3.12.jar:/u/kafka_BI/share/java/kafka-serde-tools/accessors-smart-2.4.9.jar:/u/kafka_BI/share/java/kafka-serde-tools/javapoet-1.13.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-resolver-dns-classes-macos-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-tcnative-boringssl-static-2.0.56.Final-linux-aarch_64.jar:/u/kafka_BI/share/java/kafka-serde-tools/reactive-streams-1.0.4.jar:/u/kafka_BI/share/java/kafka-serde-tools/asm-9.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-annotations-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-connect-avro-converter-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-stdlib-jdk7-1.6.10.jar:/u/kafka_BI/share/java/kafka-serde-tools/opencensus-contrib-http-util-0.31.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/stax2-api-4.2.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-datatype-protobuf-0.9.13.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-dataformat-protobuf-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/auto-service-annotations-1.0.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-streams-json-schema-serde-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-json-schema-provider-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/google-auth-library-oauth2-http-1.5.3.jar:/u/kafka_BI/share/java/kafka-serde-tools/azure-core-1.38.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/kotlin-scripting-compiler-embeddable-1.6.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-codec-http-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/error_prone_annotations-2.5.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/netty-codec-http2-4.1.89.Final.jar:/u/kafka_BI/share/java/kafka-serde-tools/jcip-annotations-1.0-1.jar:/u/kafka_BI/share/java/kafka-serde-tools/azure-security-keyvault-keys-4.6.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/kafka-connect-protobuf-converter-7.4.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/protobuf-java-util-3.19.6.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-datatype-guava-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/opencensus-api-0.31.1.jar:/u/kafka_BI/share/java/kafka-serde-tools/jackson-core-2.14.2.jar:/u/kafka_BI/share/java/kafka-serde-tools/msal4j-persistence-extension-1.1.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/jna-5.5.0.jar:/u/kafka_BI/share/java/kafka-serde-tools/msal4j-1.13.8.jar:/u/kafka_BI/share/java/monitoring-interceptors/monitoring-interceptors-7.4.1.jar:/u/kafka_BI/bin/../ce-broker-plugins/build/libs/*:/u/kafka_BI/bin/../ce-broker-plugins/build/dependant-libs/*:/u/kafka_BI/bin/../ce-auth-providers/build/libs/*:/u/kafka_BI/bin/../ce-auth-providers/build/dependant-libs/*:/u/kafka_BI/bin/../ce-rest-server/build/libs/*:/u/kafka_BI/bin/../ce-rest-server/build/dependant-libs/*:/u/kafka_BI/bin/../ce-audit/build/libs/*:/u/kafka_BI/bin/../ce-audit/build/dependant-libs/*:/u/kafka_BI/bin/../ce-authorizer/build/libs/*:/u/kafka_BI/bin/../ce-authorizer/build/dependant-libs/*:/u/kafka_BI/bin/../ce-licensing/build/libs/*:/u/kafka_BI/bin/../ce-licensing/build/dependant-libs/*:/u/kafka_BI/bin/../share/java/kafka/connect-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/threetenbp-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/grpc-context-1.45.1.jar:/u/kafka_BI/bin/../share/java/kafka/tink-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/tink-gcpkms-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/connect-ce-logs-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/trogdor-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/connect-json-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/woodstox-core-6.5.0.jar:/u/kafka_BI/bin/../share/java/kafka/groovy-3.0.19.jar:/u/kafka_BI/bin/../share/java/kafka/zookeeper-3.6.4.jar:/u/kafka_BI/bin/../share/java/kafka/connect-runtime-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/zookeeper-jute-3.6.4.jar:/u/kafka_BI/bin/../share/java/kafka/connect-mirror-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/kafka/accessors-smart-2.4.9.jar:/u/kafka_BI/bin/../share/java/kafka/activation-1.1.1.jar:/u/kafka_BI/bin/../share/java/kafka/groovy-jsr223-3.0.19.jar:/u/kafka_BI/bin/../share/java/kafka/agrona-1.15.2.jar:/u/kafka_BI/bin/../share/java/kafka/annotations-3.0.1.jar:/u/kafka_BI/bin/../share/java/kafka/annotations-13.0.jar:/u/kafka_BI/bin/../share/java/kafka/aopalliance-repackaged-2.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/api-common-2.1.5.jar:/u/kafka_BI/bin/../share/java/kafka/argparse4j-0.7.0.jar:/u/kafka_BI/bin/../share/java/kafka/hk2-api-2.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/asm-9.3.jar:/u/kafka_BI/bin/../share/java/kafka/audience-annotations-0.13.0.jar:/u/kafka_BI/bin/../share/java/kafka/auth-metadata-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/authorizer-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/connect-mirror-client-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/auth-providers-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/auto-common-0.10.jar:/u/kafka_BI/bin/../share/java/kafka/auto-service-1.0-rc7.jar:/u/kafka_BI/bin/../share/java/kafka/auto-service-annotations-1.0-rc7.jar:/u/kafka_BI/bin/../share/java/kafka/auto-value-annotations-1.9.jar:/u/kafka_BI/bin/../share/java/kafka/cloudevents-kafka-2.3.0.jar:/u/kafka_BI/bin/../share/java/kafka/aws-java-sdk-core-1.12.268.jar:/u/kafka_BI/bin/../share/java/kafka/cloudevents-protobuf-2.3.0.jar:/u/kafka_BI/bin/../share/java/kafka/aws-java-sdk-kms-1.12.268.jar:/u/kafka_BI/bin/../share/java/kafka/commons-cli-1.4.jar:/u/kafka_BI/bin/../share/java/kafka/kafka.jar:/u/kafka_BI/bin/../share/java/kafka/aws-java-sdk-s3-1.12.268.jar:/u/kafka_BI/bin/../share/java/kafka/confluent-resource-names-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/aws-java-sdk-sts-1.12.268.jar:/u/kafka_BI/bin/../share/java/kafka/connect-transforms-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/azure-core-1.35.0.jar:/u/kafka_BI/bin/../share/java/kafka/azure-core-http-netty-1.12.8.jar:/u/kafka_BI/bin/../share/java/kafka/content-type-2.2.jar:/u/kafka_BI/bin/../share/java/kafka/azure-identity-1.7.3.jar:/u/kafka_BI/bin/../share/java/kafka/commons-codec-1.15.jar:/u/kafka_BI/bin/../share/java/kafka/azure-storage-blob-12.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/commons-pool2-2.11.1.jar:/u/kafka_BI/bin/../share/java/kafka/azure-storage-common-12.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/azure-storage-internal-avro-12.0.5.jar:/u/kafka_BI/bin/../share/java/kafka/guava-32.0.1-jre.jar:/u/kafka_BI/bin/../share/java/kafka/bc-fips-1.0.2.3.jar:/u/kafka_BI/bin/../share/java/kafka/bson-4.11.1.jar:/u/kafka_BI/bin/../share/java/kafka/bcpkix-fips-1.0.6.jar:/u/kafka_BI/bin/../share/java/kafka/bctls-fips-1.0.13.jar:/u/kafka_BI/bin/../share/java/kafka/debezium-scripting-1.3.1.jar:/u/kafka_BI/bin/../share/java/kafka/broker-plugins-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/gson-2.9.0.jar:/u/kafka_BI/bin/../share/java/kafka/cel-core-0.3.5.jar:/u/kafka_BI/bin/../share/java/kafka/confluent-audit-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/cel-generated-antlr-0.3.5.jar:/u/kafka_BI/bin/../share/java/kafka/error_prone_annotations-2.19.1.jar:/u/kafka_BI/bin/../share/java/kafka/cel-generated-pb-0.3.5.jar:/u/kafka_BI/bin/../share/java/kafka/flatbuffers-java-2.0.3.jar:/u/kafka_BI/bin/../share/java/kafka/ce-sbk_2.13-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/events-schema-0.117.0.jar:/u/kafka_BI/bin/../share/java/kafka/checker-qual-3.33.0.jar:/u/kafka_BI/bin/../share/java/kafka/failureaccess-1.0.1.jar:/u/kafka_BI/bin/../share/java/kafka/classgraph-4.8.138.jar:/u/kafka_BI/bin/../share/java/kafka/gax-httpjson-0.101.0.jar:/u/kafka_BI/bin/../share/java/kafka/client-java-14.0.0.jar:/u/kafka_BI/bin/../share/java/kafka/gax-2.16.0.jar:/u/kafka_BI/bin/../share/java/kafka/client-java-api-14.0.0.jar:/u/kafka_BI/bin/../share/java/kafka/confluent-licensing-new-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/client-java-proto-14.0.0.jar:/u/kafka_BI/bin/../share/java/kafka/cloudevents-api-2.3.0.jar:/u/kafka_BI/bin/../share/java/kafka/cloudevents-core-2.3.0.jar:/u/kafka_BI/bin/../share/java/kafka/cloudevents-json-jackson-2.3.0.jar:/u/kafka_BI/bin/../share/java/kafka/commons-collections4-4.4.jar:/u/kafka_BI/bin/../share/java/kafka/google-cloud-storage-2.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/commons-compress-1.21.jar:/u/kafka_BI/bin/../share/java/kafka/google-api-client-1.34.0.jar:/u/kafka_BI/bin/../share/java/kafka/commons-io-2.11.0.jar:/u/kafka_BI/bin/../share/java/kafka/google-cloud-core-2.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/commons-lang3-3.11.jar:/u/kafka_BI/bin/../share/java/kafka/commons-logging-1.2.jar:/u/kafka_BI/bin/../share/java/kafka/google-cloud-core-http-2.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/commons-math3-3.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/gson-fire-1.8.5.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-io-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/confluent-serializers-new-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/connect-basic-auth-extension-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/connector-datapreview-extension-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/lang-tag-1.6.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-http-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/google-api-services-cloudkms-v1-rev108-1.25.0.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/google-api-services-storage-v1-rev20220401-1.32.1.jar:/u/kafka_BI/bin/../share/java/kafka/google-auth-library-credentials-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/google-auth-library-oauth2-http-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-servlet-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/google-http-client-1.41.7.jar:/u/kafka_BI/bin/../share/java/kafka/google-http-client-apache-v2-1.41.7.jar:/u/kafka_BI/bin/../share/java/kafka/google-http-client-appengine-1.41.7.jar:/u/kafka_BI/bin/../share/java/kafka/google-http-client-gson-1.41.7.jar:/u/kafka_BI/bin/../share/java/kafka/google-http-client-jackson2-1.41.7.jar:/u/kafka_BI/bin/../share/java/kafka/google-oauth-client-1.33.3.jar:/u/kafka_BI/bin/../share/java/kafka/jna-platform-5.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/hk2-locator-2.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/hk2-utils-2.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/jna-5.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/httpclient-4.5.14.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-metadata-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/httpcore-4.4.16.jar:/u/kafka_BI/bin/../share/java/kafka/internal-rest-server-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-raft-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/ion-java-1.0.2.jar:/u/kafka_BI/bin/../share/java/kafka/j2objc-annotations-2.8.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/joda-time-2.9.9.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/json-smart-2.4.10.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-databind-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-dataformat-cbor-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-servlets-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-dataformat-csv-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-dataformat-properties-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-dataformat-xml-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-dataformat-yaml-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jmespath-java-1.12.268.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-datatype-protobuf-0.9.11-jackson2.9.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-jaxrs-base-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-jaxrs-json-provider-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-module-jaxb-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/jackson-module-scala_2.13-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka/jakarta.activation-api-1.2.2.jar:/u/kafka_BI/bin/../share/java/kafka/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/bin/../share/java/kafka/jakarta.inject-2.6.1.jar:/u/kafka_BI/bin/../share/java/kafka/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/bin/../share/java/kafka/jopt-simple-5.0.4.jar:/u/kafka_BI/bin/../share/java/kafka/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/bin/../share/java/kafka/jline-3.22.0.jar:/u/kafka_BI/bin/../share/java/kafka/jakarta.xml.bind-api-2.3.3.jar:/u/kafka_BI/bin/../share/java/kafka/jose4j-0.9.3.jar:/u/kafka_BI/bin/../share/java/kafka/javassist-3.29.2-GA.jar:/u/kafka_BI/bin/../share/java/kafka/javax.activation-api-1.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/javax.annotation-api-1.3.2.jar:/u/kafka_BI/bin/../share/java/kafka/javax.servlet-api-3.1.0.jar:/u/kafka_BI/bin/../share/java/kafka/jsr305-3.0.2.jar:/u/kafka_BI/bin/../share/java/kafka/javax.ws.rs-api-2.1.1.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-shell-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/jaxb-api-2.3.1.jar:/u/kafka_BI/bin/../share/java/kafka/jbcrypt-0.4.jar:/u/kafka_BI/bin/../share/java/kafka/jcip-annotations-1.0.jar:/u/kafka_BI/bin/../share/java/kafka/jcip-annotations-1.0-1.jar:/u/kafka_BI/bin/../share/java/kafka/jsr305-3.0.1.jar:/u/kafka_BI/bin/../share/java/kafka/jersey-client-2.39.1.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-client-plugins-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/jersey-common-2.39.1.jar:/u/kafka_BI/bin/../share/java/kafka/jersey-container-servlet-2.39.1.jar:/u/kafka_BI/bin/../share/java/kafka/jersey-container-servlet-core-2.39.1.jar:/u/kafka_BI/bin/../share/java/kafka/jersey-hk2-2.39.1.jar:/u/kafka_BI/bin/../share/java/kafka/kafka_2.13-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/jersey-server-2.39.1.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-continuation-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/jetty-util-ajax-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/kafka/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-group-coordinator-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-log4j-appender-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-rxtx-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-server-common-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/opencensus-proto-0.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-storage-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-storage-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-streams-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-streams-examples-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/paranamer-2.8.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-native-unix-common-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-streams-scala_2.13-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-streams-test-utils-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/kafka-tools-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/protobuf-java-util-3.19.6.jar:/u/kafka_BI/bin/../share/java/kafka/kotlin-stdlib-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-sctp-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/kotlin-stdlib-common-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/kotlin-stdlib-jdk7-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/kotlin-stdlib-jdk8-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/bin/../share/java/kafka/logging-interceptor-4.9.1.jar:/u/kafka_BI/bin/../share/java/kafka/maven-artifact-3.8.4.jar:/u/kafka_BI/bin/../share/java/kafka/metrics-core-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/plexus-utils-3.3.0.jar:/u/kafka_BI/bin/../share/java/kafka/metrics-core-4.1.12.1.jar:/u/kafka_BI/bin/../share/java/kafka/protobuf-java-3.19.6.jar:/u/kafka_BI/bin/../share/java/kafka/msal4j-1.13.3.jar:/u/kafka_BI/bin/../share/java/kafka/msal4j-persistence-extension-1.1.0.jar:/u/kafka_BI/bin/../share/java/kafka/netty-all-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-udt-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-buffer-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/oauth2-oidc-sdk-9.35.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-dns-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-haproxy-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/nimbus-jose-jwt-9.24.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-http2-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/okhttp-4.9.3.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-http-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-memcache-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-mqtt-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-redis-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-smtp-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/okio-jvm-3.0.0.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-socks-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-stomp-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-codec-xml-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/opencensus-contrib-http-util-0.31.0.jar:/u/kafka_BI/bin/../share/java/kafka/netty-common-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/opencensus-api-0.31.0.jar:/u/kafka_BI/bin/../share/java/kafka/netty-handler-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-handler-proxy-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-handler-ssl-ocsp-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-resolver-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/opentelemetry-proto-0.19.0-alpha.jar:/u/kafka_BI/bin/../share/java/kafka/netty-resolver-dns-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-resolver-dns-classes-macos-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-resolver-dns-native-macos-4.1.92.Final-osx-aarch_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-resolver-dns-native-macos-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-classes-2.0.60.Final.jar:/u/kafka_BI/bin/../share/java/kafka/reload4j-1.2.25.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-linux-aarch_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-native-epoll-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-linux-x86_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/rbac-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-osx-aarch_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-native-epoll-4.1.92.Final-linux-aarch_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-osx-x86_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-classes-epoll-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-tcnative-boringssl-static-2.0.60.Final-windows-x86_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-classes-kqueue-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-native-epoll-4.1.92.Final-linux-x86_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-native-kqueue-4.1.92.Final-osx-aarch_64.jar:/u/kafka_BI/bin/../share/java/kafka/netty-transport-native-kqueue-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/bin/../share/java/kafka/proto-google-common-protos-2.8.3.jar:/u/kafka_BI/bin/../share/java/kafka/proto-google-iam-v1-1.3.1.jar:/u/kafka_BI/bin/../share/java/kafka/reactive-streams-1.0.4.jar:/u/kafka_BI/bin/../share/java/kafka/reactor-core-3.4.26.jar:/u/kafka_BI/bin/../share/java/kafka/reactor-netty-core-1.0.26.jar:/u/kafka_BI/bin/../share/java/kafka/reactor-netty-http-1.0.26.jar:/u/kafka_BI/bin/../share/java/kafka/reflections-0.9.12.jar:/u/kafka_BI/bin/../share/java/kafka/rest-authorizer-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/rocksdbjni-7.1.2.jar:/u/kafka_BI/bin/../share/java/kafka/scala-collection-compat_2.13-2.10.0.jar:/u/kafka_BI/bin/../share/java/kafka/scala-java8-compat_2.13-1.0.2.jar:/u/kafka_BI/bin/../share/java/kafka/scala-library-2.13.10.jar:/u/kafka_BI/bin/../share/java/kafka/scala-logging_2.13-3.9.4.jar:/u/kafka_BI/bin/../share/java/kafka/scala-reflect-2.13.10.jar:/u/kafka_BI/bin/../share/java/kafka/security-extensions-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/simpleclient_common-0.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/simpleclient_httpserver-0.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/simpleclient_tracer_common-0.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/simpleclient_tracer_otel_agent-0.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/simpleclient_tracer_otel-0.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/simpleclient-0.12.0.jar:/u/kafka_BI/bin/../share/java/kafka/slf4j-api-1.7.21.jar:/u/kafka_BI/bin/../share/java/kafka/slf4j-api-1.7.30.jar:/u/kafka_BI/bin/../share/java/kafka/slf4j-api-1.7.36.jar:/u/kafka_BI/bin/../share/java/kafka/slf4j-reload4j-1.7.36.jar:/u/kafka_BI/bin/../share/java/kafka/snakeyaml-2.0.jar:/u/kafka_BI/bin/../share/java/kafka/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/kafka/stax2-api-4.2.1.jar:/u/kafka_BI/bin/../share/java/kafka/swagger-annotations-1.6.3.jar:/u/kafka_BI/bin/../share/java/kafka/swagger-annotations-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/swagger-core-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/swagger-integration-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/swagger-jaxrs2-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/swagger-models-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka/telemetry-api-3.718.0.jar:/u/kafka_BI/bin/../share/java/kafka/telemetry-client-3.718.0.jar:/u/kafka_BI/bin/../share/java/kafka/telemetry-events-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka/ifx-changestream-client-1.1.3.jar:/u/kafka_BI/bin/../share/java/kafka/jdbc-4.50.10.jar:/u/kafka_BI/bin/../share/java/kafka/debezium-storage-kafka-2.6.0.Alpha2.jar:/u/kafka_BI/bin/../share/java/kafka/debezium-core-2.6.0.Alpha2.jar:/u/kafka_BI/bin/../share/java/kafka/debezium-api-2.6.0.Alpha2.jar:/u/kafka_BI/bin/../share/java/kafka/debezium-connector-informix-2.6.0.Alpha2.jar:/u/kafka_BI/bin/../share/java/kafka/debezium-storage-file-2.6.0.Alpha2.jar:/u/kafka_BI/bin/../share/java/kafka/TimestampConverter-1.2.3-SNAPSHOT.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jersey-common-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-transport-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-codec-socks-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/minimal-json-0.9.5.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/javax.servlet-api-4.0.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jakarta.inject-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/kafka-tools-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/api-asn1-ber-1.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jbcrypt-0.4.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jetty-proxy-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/commons-codec-1.10.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/api-util-1.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-transport-native-unix-common-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/concurrent-trees-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-common-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cloudevents-core-2.4.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cloudevents-api-2.4.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/rest-authorizer-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/bc-fips-1.0.2.3.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jackson-dataformat-yaml-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/api-asn1-api-1.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/snakeyaml-2.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/connect-api-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/mina-core-2.0.22.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/commons-collections-3.2.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/events-schema-0.110.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cloudevents-protobuf-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/authorizer-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/api-i18n-1.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/common-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-transport-native-epoll-4.1.92.Final-linux-x86_64.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/protobuf-java-3.19.6.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/kafka-log4j-appender-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/bctls-fips-1.0.13.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-transport-classes-kqueue-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/org.apache.servicemix.bundles.antlr-2.7.7_5.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-transport-native-kqueue-4.1.92.Final-osx-x86_64.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/rbac-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/logredactor-1.0.11.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/auto-value-annotations-1.8.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-buffer-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/commons-lang-2.6.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/auth-providers-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cloudevents-kafka-2.4.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jersey-client-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/agrona-1.15.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/auditlog-emitter-common-1.4.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/annotations-3.0.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cel-generated-pb-0.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/confluent-security-plugins-common-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jsonassert-1.5.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-handler-proxy-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/kafka-server-common-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/swagger-annotations-2.2.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/metrics-core-2.2.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/android-json-0.0.20131108.vaadin1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/ce-kafka-http-server-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-transport-classes-epoll-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/commons-lang3-3.12.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/connect-json-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/telemetry-api-3.282.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/logredactor-metrics-1.0.11.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/reflections-0.9.12.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/confluent-audit-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/connect-runtime-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/maven-artifact-3.8.4.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/api-ldap-model-1.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/re2j-1.6.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jakarta.el-3.0.4.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/auditlog-emitter-1.4.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/telemetry-events-7.3.0-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/gson-2.9.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/bcpkix-fips-1.0.6.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/connect-transforms-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/plexus-utils-3.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/opentelemetry-context-1.15.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/connect-ce-logs-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/javax.annotation-api-1.3.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/javax.ws.rs-api-2.1.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/confluent-resource-names-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jose4j-0.9.3.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/rbac-common-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cloudevents-json-jackson-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jersey-server-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/kafka-client-plugins-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jackson-datatype-protobuf-0.9.11-jackson2.9.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/auth-metadata-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/security-extensions-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jul-to-slf4j-1.7.36.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cel-generated-antlr-0.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jakarta.el-api-4.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/opentelemetry-api-1.15.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/protobuf-java-util-3.19.6.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/rbac-api-server-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jersey-bean-validation-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/cel-core-0.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/telemetry-client-3.282.0.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/netty-codec-http-4.1.92.Final.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/argparse4j-0.8.1.jar:/u/kafka_BI/bin/../share/java/confluent-metadata-service/authorizer-client-7.4.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-http-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-common-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/asm-tree-9.4.jar:/u/kafka_BI/bin/../share/java/rest-utils/http2-common-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/failureaccess-1.0.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.inject-2.6.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/kafka-clients-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/rest-utils/commons-collections4-4.4.jar:/u/kafka_BI/bin/../share/java/rest-utils/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/http2-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-hk2-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/websocket-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/javassist-3.25.0-GA.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-annotations-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/asm-commons-9.4.jar:/u/kafka_BI/bin/../share/java/rest-utils/websocket-api-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/commons-codec-1.15.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-servlet-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-xml-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/api-util-2.1.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-util-ajax-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/javax-websocket-server-impl-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/hibernate-validator-6.1.7.Final.jar:/u/kafka_BI/bin/../share/java/rest-utils/org.apache.servicemix.bundles.antlr-2.7.7_5.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-container-servlet-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-alpn-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/websocket-common-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/classmate-1.3.4.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-jndi-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-client-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-alpn-java-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.xml.bind-api-2.3.3.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/bin/../share/java/rest-utils/hk2-utils-2.6.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/javax.websocket-client-api-1.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/websocket-servlet-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/j2objc-annotations-1.3.jar:/u/kafka_BI/bin/../share/java/rest-utils/guava-30.1.1-jre.jar:/u/kafka_BI/bin/../share/java/rest-utils/jackson-module-jaxb-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/commons-lang3-3.12.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-io-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/activation-1.1.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jackson-databind-2.14.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/api-ldap-model-2.1.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/http2-hpack-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/rest-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.activation-api-1.2.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-container-servlet-core-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/api-asn1-ber-2.1.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.el-3.0.4.jar:/u/kafka_BI/bin/../share/java/rest-utils/checker-qual-3.8.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-jmx-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jaxb-api-2.3.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/javax.servlet-api-3.1.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/jsr305-3.0.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-continuation-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/javax.annotation-api-1.3.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/javax.websocket-api-1.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/websocket-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/mina-core-2.2.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-server-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-servlets-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-jaas-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/bin/../share/java/rest-utils/jackson-jaxrs-json-provider-2.14.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/aopalliance-repackaged-2.6.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jackson-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/javax-websocket-client-impl-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/hk2-locator-2.6.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-webapp-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/api-i18n-2.1.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/api-asn1-api-2.1.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/hk2-api-2.6.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/error_prone_annotations-2.5.1.jar:/u/kafka_BI/bin/../share/java/rest-utils/jackson-jaxrs-base-2.14.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jakarta.el-api-4.0.0.jar:/u/kafka_BI/bin/../share/java/rest-utils/asm-9.4.jar:/u/kafka_BI/bin/../share/java/rest-utils/jersey-bean-validation-2.36.jar:/u/kafka_BI/bin/../share/java/rest-utils/jetty-plus-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/rest-utils/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../share/java/rest-utils/jboss-logging-3.3.2.Final.jar:/u/kafka_BI/bin/../share/java/confluent-common/build-tools-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-common/common-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-common/common-metrics-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-common/slf4j-api-1.7.36.jar:/u/kafka_BI/bin/../share/java/confluent-common/common-config-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-http-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-common-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/asm-tree-9.4.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/http2-common-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/failureaccess-1.0.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.inject-2.6.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/commons-collections4-4.4.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/http2-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-hk2-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/websocket-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javassist-3.25.0-GA.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-annotations-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/asm-commons-9.4.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/websocket-api-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/commons-codec-1.15.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-servlet-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-xml-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/api-util-2.1.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-util-ajax-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/common-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javax-websocket-server-impl-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/hibernate-validator-6.1.7.Final.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/org.apache.servicemix.bundles.antlr-2.7.7_5.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-container-servlet-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-alpn-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/websocket-common-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/classmate-1.3.4.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-jndi-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-client-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-alpn-java-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.xml.bind-api-2.3.3.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/hk2-utils-2.6.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javax.websocket-client-api-1.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/websocket-servlet-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/j2objc-annotations-1.3.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/ce-kafka-http-server-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/guava-30.1.1-jre.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jackson-module-jaxb-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/commons-lang3-3.12.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-io-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/activation-1.1.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jackson-databind-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/api-ldap-model-2.1.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/http2-hpack-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/slf4j-api-1.7.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/rest-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.activation-api-1.2.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-container-servlet-core-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/api-asn1-ber-2.1.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.el-3.0.4.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/checker-qual-3.8.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-jmx-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jaxb-api-2.3.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javax.servlet-api-3.1.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jsr305-3.0.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-continuation-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javax.annotation-api-1.3.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javax.websocket-api-1.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/websocket-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/mina-core-2.2.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-server-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-servlets-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-jaas-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jackson-jaxrs-json-provider-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/aopalliance-repackaged-2.6.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jackson-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/javax-websocket-client-impl-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/hk2-locator-2.6.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-webapp-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/api-i18n-2.1.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/api-asn1-api-2.1.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/hk2-api-2.6.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/error_prone_annotations-2.5.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jackson-jaxrs-base-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jakarta.el-api-4.0.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/asm-9.4.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jersey-bean-validation-2.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jetty-plus-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-http-server/jboss-logging-3.3.2.Final.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-servlet/ce-kafka-rest-servlet-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/common-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/slf4j-api-1.7.36.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/ce-kafka-rest-extensions-7.4.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/javax.ws.rs-api-2.1.1.jar:/u/kafka_BI/bin/../share/java/ce-kafka-rest-extensions/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/classgraph-4.8.21.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/failureaccess-1.0.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-compress-1.21.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/scala-library-2.13.10.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/minimal-json-0.9.5.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-module-scala_2.13-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/vavr-0.10.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/vavr-match-0.10.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-clients-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/audience-annotations-0.5.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-json-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/proto-google-common-protos-2.5.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/validation-api-2.0.1.Final.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-protobuf-provider-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-validator-1.7.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/logredactor-1.0.12.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-stdlib-common-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/okio-jvm-3.0.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/snakeyaml-2.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-server-common-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-logging-1.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-collections-3.2.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-protobuf-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/everit-json-schema-1.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/zookeeper-jute-3.6.3.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-scripting-compiler-impl-embeddable-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/common-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-group-coordinator-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/protobuf-java-3.19.6.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/logredactor-metrics-1.0.12.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-cli-1.4.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/scala-logging_2.13-3.9.4.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-digester-2.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-datatype-joda-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/scala-reflect-2.13.10.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-metadata-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/auto-value-annotations-1.7.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/mbknor-jackson-jsonschema_2.13-1.0.39.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-protobuf-types-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/handy-uri-templates-2.1.8.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-module-parameter-names-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-rest-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-dataformat-csv-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-script-runtime-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/scala-java8-compat_2.13-1.0.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/metrics-core-4.1.12.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/metrics-core-2.2.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/wire-runtime-jvm-4.4.3.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/json-20230227.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/j2objc-annotations-1.3.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/guava-30.1.1-jre.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/resilience4j-core-1.7.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/okio-3.0.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/commons-lang3-3.12.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-scripting-common-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/argparse4j-0.7.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-schema-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/avro-1.11.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/re2j-1.6.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/scala-collection-compat_2.13-2.10.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/spotbugs-annotations-4.7.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-stdlib-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/annotations-13.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/checker-qual-3.8.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-scripting-jvm-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-json-schema-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/gson-2.9.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/resilience4j-ratelimiter-1.7.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-storage-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/swagger-annotations-2.1.10.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jsr305-3.0.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka_2.13-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jose4j-0.9.3.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-reflect-1.7.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlinpoet-1.12.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-avro-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-storage-api-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/joda-time-2.12.5.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-schema-registry-client-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/wire-schema-jvm-4.4.3.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/zookeeper-3.6.3.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-stdlib-jdk8-1.6.10.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/javapoet-1.13.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-stdlib-jdk7-1.6.10.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-json-schema-provider-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kotlin-scripting-compiler-embeddable-1.6.0.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jopt-simple-5.0.4.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/kafka-raft-7.4.1-ccs.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/error_prone_annotations-2.5.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/paranamer-2.8.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/protobuf-java-util-3.19.6.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-datatype-guava-2.14.2.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/common-config-7.4.1.jar:/u/kafka_BI/bin/../share/java/kafka-rest-lib/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../share/java/ce-kafka-queues/*:/u/kafka_BI/bin/../share/java/kafka-queues-lib/*:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-http-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jersey-common-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/failureaccess-1.0.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/commons-compress-1.21.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/minimal-json-0.9.5.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/confluent-licensing-new-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/confluent-serializers-new-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/javax.servlet-api-4.0.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-dataformat-properties-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jakarta.inject-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/connect-json-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/kafka-tools-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jbcrypt-0.4.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/telemetry-api-3.718.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jersey-hk2-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/javassist-3.25.0-GA.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-transport-classes-kqueue-4.1.84.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/logredactor-1.0.12.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-transport-native-epoll-4.1.84.Final-linux-x86_64.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/rest-authorizer-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/bc-fips-1.0.2.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-dataformat-yaml-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/snakeyaml-2.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-servlet-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-resolver-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-handler-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cloudevents-api-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cloudevents-protobuf-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/authorizer-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jakarta.ws.rs-api-2.1.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-util-ajax-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/protobuf-java-3.19.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/logredactor-metrics-1.0.12.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/kafka-log4j-appender-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/bctls-fips-1.0.13.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jersey-container-servlet-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/rbac-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-client-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jersey-client-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jakarta.validation-api-2.0.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/agrona-1.15.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jakarta.xml.bind-api-2.3.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-transport-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jakarta.annotation-api-1.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/annotations-3.0.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/hk2-utils-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cel-generated-pb-0.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/confluent-security-plugins-common-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/confluent-kafka-rest-security-plugin-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/kafka-server-common-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/opentelemetry-proto-0.19.0-alpha.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/metrics-core-4.1.12.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/telemetry-events-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/swagger-annotations-2.2.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/metrics-core-2.2.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-handler-proxy-4.1.84.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/j2objc-annotations-1.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/guava-30.1.1-jre.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-module-jaxb-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/commons-lang3-3.12.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/reflections-0.9.12.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/connect-runtime-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/argparse4j-0.7.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/maven-artifact-3.8.4.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-io-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-codec-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/activation-1.1.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-transport-native-unix-common-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-databind-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-common-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/avro-1.11.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/connect-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/re2j-1.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jakarta.activation-api-1.2.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jersey-container-servlet-core-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/checker-qual-3.8.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-transport-native-kqueue-4.1.84.Final-osx-x86_64.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jaxb-api-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/gson-2.9.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-server-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/bcpkix-fips-1.0.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/connect-transforms-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jsr305-3.0.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/plexus-utils-3.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-continuation-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/connect-ce-logs-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-codec-socks-4.1.84.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/javax.annotation-api-1.3.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/javax.ws.rs-api-2.1.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cloudevents-core-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/confluent-resource-names-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jose4j-0.9.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/telemetry-client-3.718.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-util-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cloudevents-json-jackson-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jersey-server-2.36.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/kafka-schema-registry-client-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-servlets-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/kafka-client-plugins-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/osgi-resource-locator-1.0.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-jaxrs-json-provider-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jetty-security-9.4.51.v20230217.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-datatype-protobuf-0.9.11-jackson2.9.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/aopalliance-repackaged-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-transport-classes-epoll-4.1.84.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/auth-metadata-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/hk2-locator-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/security-extensions-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cloudevents-kafka-2.3.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-buffer-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cel-generated-antlr-0.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/hk2-api-2.6.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/netty-codec-http-4.1.86.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/error_prone_annotations-2.5.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/events-schema-0.117.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-jaxrs-base-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/protobuf-java-util-3.19.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/cel-core-0.3.5.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/kafka-rest/broker-plugins-7.4.1-ce-test.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/snappy-java-1.1.10.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/classgraph-4.8.21.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/failureaccess-1.0.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/commons-compress-1.21.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/scala-library-2.13.10.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/minimal-json-0.9.5.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-datatype-jsr310-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/joda-time-2.10.8.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/lz4-java-1.8.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/proto-google-common-protos-2.5.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/validation-api-2.0.1.Final.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-protobuf-provider-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/commons-validator-1.7.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/logredactor-1.0.12.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-stdlib-common-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/okio-jvm-3.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/bc-fips-1.0.2.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/snakeyaml-2.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/confluent-schema-registry-validator-plugin-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/commons-logging-1.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/commons-collections-3.2.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/everit-json-schema-1.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-datatype-jdk8-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-scripting-compiler-impl-embeddable-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/common-utils-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/protobuf-java-3.19.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/caffeine-2.8.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/logredactor-metrics-1.0.12.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/bctls-fips-1.0.13.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/commons-digester-2.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-datatype-joda-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/mbknor-jackson-jsonschema_2.13-1.0.39.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/zstd-jni-1.5.2-1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-protobuf-types-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/handy-uri-templates-2.1.8.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-module-parameter-names-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-script-runtime-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/wire-runtime-jvm-4.4.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/json-20230227.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/j2objc-annotations-1.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/guava-30.1.1-jre.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/okio-3.0.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/commons-lang3-3.12.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-scripting-common-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-schema-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-databind-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/avro-1.11.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/re2j-1.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-stdlib-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/annotations-13.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/checker-qual-3.8.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-scripting-jvm-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-clients-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/gson-2.9.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/swagger-annotations-2.1.10.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/bcpkix-fips-1.0.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jsr305-3.0.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-reflect-1.7.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlinpoet-1.12.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-avro-serializer-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/telemetry-events-api-7.4.1-ce.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-schema-registry-client-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/wire-schema-jvm-4.4.3.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-stdlib-jdk8-1.6.10.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/javapoet-1.13.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-annotations-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-stdlib-jdk7-1.6.10.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kafka-json-schema-provider-7.4.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/kotlin-scripting-compiler-embeddable-1.6.0.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/error_prone_annotations-2.5.1.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/protobuf-java-util-3.19.6.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-datatype-guava-2.14.2.jar:/u/kafka_BI/bin/../share/java/confluent-security/schema-validator/jackson-core-2.14.2.jar:/u/kafka_BI/bin/../support-metrics-client/build/dependant-libs-2.13.10/*:/u/kafka_BI/bin/../support-metrics-client/build/libs/*:/u/kafka_BI/bin/../share/java/confluent-telemetry/confluent-metrics-7.4.1-ce.jar:/usr/share/java/support-metrics-client/* os.spec = Linux, amd64, 3.10.0-1160.95.1.el7.x86_64 os.vcpus = 8 (org.apache.kafka.connect.runtime.WorkerInfo:72) [2024-02-28 13:36:17,675] INFO Scanning for plugin classes. This might take a moment ... (org.apache.kafka.connect.cli.ConnectDistributed:94) [2024-02-28 13:36:17,709] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-kafka-connect-jdbc/doc (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:17,794] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-kafka-connect-jdbc/doc/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:17,795] INFO Added plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:17,795] INFO Added plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:17,796] INFO Added plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:17,814] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-kafka-connect-jdbc/etc (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:17,823] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-kafka-connect-jdbc/etc/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:17,829] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-kafka-connect-jdbc/lib (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:20,763] INFO Kafka Connect JDBC version: 10.7.3-5 (io.confluent.connect.jdbc.util.Version:35) [2024-02-28 13:36:20,775] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-kafka-connect-jdbc/lib/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:20,775] INFO Added plugin 'io.confluent.connect.jdbc.JdbcSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:20,776] INFO Added plugin 'io.confluent.connect.jdbc.JdbcSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:20,776] INFO Added plugin 'org.apache.kafka.connect.storage.StringConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:20,776] INFO Added plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:20,776] INFO Added plugin 'org.apache.kafka.common.config.provider.FileConfigProvider' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:20,910] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/bson-4.11.1.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:20,968] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/bson-4.11.1.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:20,971] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/ifx-changestream-client-1.1.3.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:20,982] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/ifx-changestream-client-1.1.3.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:20,984] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/jdbc-4.50.10.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,077] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/jdbc-4.50.10.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,083] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/debezium-connector-informix-2.6.0.Alpha2.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,190] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/debezium-connector-informix-2.6.0.Alpha2.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,191] INFO Added plugin 'io.debezium.connector.informix.InformixConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,193] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/debezium-storage-file-2.6.0.Alpha2.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,206] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/debezium-storage-file-2.6.0.Alpha2.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,208] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/debezium-storage-kafka-2.6.0.Alpha2.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,216] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/debezium-storage-kafka-2.6.0.Alpha2.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,218] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/debezium-core-2.6.0.Alpha2.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,379] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/debezium-core-2.6.0.Alpha2.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,380] INFO Added plugin 'io.debezium.converters.ByteArrayConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,380] INFO Added plugin 'io.debezium.converters.BinaryDataConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,380] INFO Added plugin 'io.debezium.converters.CloudEventsConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,380] INFO Added plugin 'io.debezium.transforms.TimezoneConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,381] INFO Added plugin 'io.debezium.transforms.outbox.EventRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,381] INFO Added plugin 'io.debezium.transforms.ExtractNewRecordState' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,381] INFO Added plugin 'io.debezium.transforms.ExtractChangedRecordState' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,381] INFO Added plugin 'io.debezium.transforms.ExtractSchemaToNewRecord' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,381] INFO Added plugin 'io.debezium.transforms.HeaderToValue' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,382] INFO Added plugin 'io.debezium.transforms.partitions.PartitionRouting' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,382] INFO Added plugin 'io.debezium.transforms.ByLogicalTableRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,382] INFO Added plugin 'io.debezium.transforms.SchemaChangeEventFilter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,382] INFO Added plugin 'io.debezium.transforms.tracing.ActivateTracingSpan' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,385] INFO Loading plugin from: /u/kafka_BI/share/official_informix_connector/debezium-api-2.6.0.Alpha2.jar (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,397] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/official_informix_connector/debezium-api-2.6.0.Alpha2.jar} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,399] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/lib (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,485] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/lib/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,485] INFO Added plugin 'io.confluent.connect.transforms.TombstoneHandler' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,486] INFO Added plugin 'io.confluent.connect.transforms.ExtractTopic$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,486] INFO Added plugin 'io.confluent.connect.transforms.Filter$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,486] INFO Added plugin 'io.confluent.connect.transforms.ExtractTopic$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,486] INFO Added plugin 'io.confluent.connect.transforms.ExtractTopic$Header' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,486] INFO Added plugin 'io.confluent.connect.transforms.GzipDecompress$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,487] INFO Added plugin 'io.confluent.connect.transforms.Drop$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,487] INFO Added plugin 'io.confluent.connect.transforms.Filter$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,487] INFO Added plugin 'io.confluent.connect.transforms.MessageTimestampRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,487] INFO Added plugin 'io.confluent.connect.transforms.GzipDecompress$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,487] INFO Added plugin 'io.confluent.connect.transforms.Drop$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:21,490] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/etc (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,494] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/etc/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,496] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/assets (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,500] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/assets/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:21,502] INFO Loading plugin from: /u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/doc (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:275) [2024-02-28 13:36:21,506] INFO Registered loader: PluginClassLoader{pluginLocation=file:/u/kafka_BI/share/confluentinc-connect-transforms-1.4.3/doc/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:30,963] INFO Registered loader: jdk.internal.loader.ClassLoaders$AppClassLoader@75b84c92 (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:297) [2024-02-28 13:36:30,963] INFO Added plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,963] INFO Added plugin 'org.apache.kafka.connect.tools.MockSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,964] INFO Added plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,964] INFO Added plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,964] INFO Added plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,964] INFO Added plugin 'org.apache.kafka.connect.tools.MockSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,964] INFO Added plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,965] INFO Added plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,965] INFO Added plugin 'org.apache.kafka.connect.converters.FloatConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,965] INFO Added plugin 'io.confluent.connect.avro.AvroConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,965] INFO Added plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,965] INFO Added plugin 'org.apache.kafka.connect.converters.LongConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,965] INFO Added plugin 'org.apache.kafka.connect.converters.IntegerConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,966] INFO Added plugin 'io.confluent.connect.json.JsonSchemaConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,966] INFO Added plugin 'io.confluent.connect.protobuf.ProtobufConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,966] INFO Added plugin 'org.apache.kafka.connect.converters.DoubleConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,966] INFO Added plugin 'org.apache.kafka.connect.json.JsonConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,966] INFO Added plugin 'org.apache.kafka.connect.converters.ShortConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,967] INFO Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,967] INFO Added plugin 'org.apache.kafka.connect.transforms.Filter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,967] INFO Added plugin 'org.apache.kafka.connect.transforms.InsertField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,967] INFO Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,967] INFO Added plugin 'org.apache.kafka.connect.transforms.TimestampRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,967] INFO Added plugin 'org.apache.kafka.connect.transforms.RegexRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,968] INFO Added plugin 'org.apache.kafka.connect.transforms.HoistField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,968] INFO Added plugin 'org.apache.kafka.connect.transforms.ValueToKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,968] INFO Added plugin 'org.apache.kafka.connect.transforms.DropHeaders' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,968] INFO Added plugin 'org.apache.kafka.connect.transforms.Cast$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,968] INFO Added plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,968] INFO Added plugin 'org.apache.kafka.connect.transforms.InsertHeader' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,969] INFO Added plugin 'io.confluent.connect.rest.datapreview.extension.util.PreviewRecordTransformer' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,969] INFO Added plugin 'org.apache.kafka.connect.transforms.Flatten$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,969] INFO Added plugin 'org.apache.kafka.connect.transforms.HeaderFrom$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,969] INFO Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,970] INFO Added plugin 'io.debezium.transforms.Filter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,970] INFO Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,970] INFO Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,970] INFO Added plugin 'org.apache.kafka.connect.transforms.HeaderFrom$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,970] INFO Added plugin 'org.apache.kafka.connect.transforms.MaskField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,971] INFO Added plugin 'org.apache.kafka.connect.transforms.MaskField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,971] INFO Added plugin 'org.apache.kafka.connect.transforms.Cast$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,971] INFO Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,971] INFO Added plugin 'org.apache.kafka.connect.transforms.Flatten$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,971] INFO Added plugin 'org.apache.kafka.connect.transforms.InsertField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,972] INFO Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,972] INFO Added plugin 'io.debezium.transforms.ContentBasedRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,972] INFO Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,972] INFO Added plugin 'org.apache.kafka.connect.transforms.HoistField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,972] INFO Added plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,973] INFO Added plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,973] INFO Added plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,973] INFO Added plugin 'io.confluent.kafka.secretregistry.client.config.provider.SecretConfigProvider' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,973] INFO Added plugin 'io.confluent.kafka.schemaregistry.client.config.provider.SchemaRegistryConfigProvider' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,973] INFO Added plugin 'org.apache.kafka.common.config.provider.DirectoryConfigProvider' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,973] INFO Added plugin 'io.confluent.connect.security.ConnectSecurityExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,974] INFO Added plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,974] INFO Added plugin 'io.confluent.connect.rest.datapreview.extension.ConnectorDataPreviewRestExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:230) [2024-02-28 13:36:30,977] INFO Added aliases 'JdbcSinkConnector' and 'JdbcSink' to plugin 'io.confluent.connect.jdbc.JdbcSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,978] INFO Added aliases 'JdbcSourceConnector' and 'JdbcSource' to plugin 'io.confluent.connect.jdbc.JdbcSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,978] INFO Added aliases 'InformixConnector' and 'Informix' to plugin 'io.debezium.connector.informix.InformixConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,978] INFO Added aliases 'MirrorCheckpointConnector' and 'MirrorCheckpoint' to plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,979] INFO Added aliases 'MirrorHeartbeatConnector' and 'MirrorHeartbeat' to plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,979] INFO Added aliases 'MirrorSourceConnector' and 'MirrorSource' to plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,979] INFO Added aliases 'MockSinkConnector' and 'MockSink' to plugin 'org.apache.kafka.connect.tools.MockSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,979] INFO Added aliases 'MockSourceConnector' and 'MockSource' to plugin 'org.apache.kafka.connect.tools.MockSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,980] INFO Added aliases 'SchemaSourceConnector' and 'SchemaSource' to plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,980] INFO Added aliases 'VerifiableSinkConnector' and 'VerifiableSink' to plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,980] INFO Added aliases 'VerifiableSourceConnector' and 'VerifiableSource' to plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,981] INFO Added aliases 'AvroConverter' and 'Avro' to plugin 'io.confluent.connect.avro.AvroConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,981] INFO Added aliases 'JsonSchemaConverter' and 'JsonSchema' to plugin 'io.confluent.connect.json.JsonSchemaConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,981] INFO Added aliases 'ProtobufConverter' and 'Protobuf' to plugin 'io.confluent.connect.protobuf.ProtobufConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,981] INFO Added aliases 'BinaryDataConverter' and 'BinaryData' to plugin 'io.debezium.converters.BinaryDataConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,982] INFO Added aliases 'CloudEventsConverter' and 'CloudEvents' to plugin 'io.debezium.converters.CloudEventsConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,982] INFO Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,982] INFO Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,982] INFO Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,983] INFO Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,983] INFO Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,983] INFO Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,984] INFO Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,984] INFO Added aliases 'BinaryDataConverter' and 'BinaryData' to plugin 'io.debezium.converters.BinaryDataConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,984] INFO Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,984] INFO Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,985] INFO Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,985] INFO Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,985] INFO Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,985] INFO Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,985] INFO Added aliases 'SimpleHeaderConverter' and 'Simple' to plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,986] INFO Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,987] INFO Added alias 'PreviewRecordTransformer' to plugin 'io.confluent.connect.rest.datapreview.extension.util.PreviewRecordTransformer' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,988] INFO Added alias 'Header' to plugin 'io.confluent.connect.transforms.ExtractTopic$Header' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,988] INFO Added alias 'MessageTimestampRouter' to plugin 'io.confluent.connect.transforms.MessageTimestampRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,988] INFO Added alias 'TombstoneHandler' to plugin 'io.confluent.connect.transforms.TombstoneHandler' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,989] INFO Added alias 'ByLogicalTableRouter' to plugin 'io.debezium.transforms.ByLogicalTableRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,989] INFO Added alias 'ContentBasedRouter' to plugin 'io.debezium.transforms.ContentBasedRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,989] INFO Added alias 'ExtractChangedRecordState' to plugin 'io.debezium.transforms.ExtractChangedRecordState' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,989] INFO Added alias 'ExtractNewRecordState' to plugin 'io.debezium.transforms.ExtractNewRecordState' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,990] INFO Added alias 'ExtractSchemaToNewRecord' to plugin 'io.debezium.transforms.ExtractSchemaToNewRecord' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,990] INFO Added alias 'HeaderToValue' to plugin 'io.debezium.transforms.HeaderToValue' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,990] INFO Added alias 'SchemaChangeEventFilter' to plugin 'io.debezium.transforms.SchemaChangeEventFilter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,990] INFO Added alias 'TimezoneConverter' to plugin 'io.debezium.transforms.TimezoneConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,991] INFO Added alias 'EventRouter' to plugin 'io.debezium.transforms.outbox.EventRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,991] INFO Added alias 'PartitionRouting' to plugin 'io.debezium.transforms.partitions.PartitionRouting' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,991] INFO Added alias 'ActivateTracingSpan' to plugin 'io.debezium.transforms.tracing.ActivateTracingSpan' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,992] INFO Added aliases 'PredicatedTransformation' and 'Predicated' to plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,992] INFO Added alias 'DropHeaders' to plugin 'org.apache.kafka.connect.transforms.DropHeaders' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,992] INFO Added alias 'InsertHeader' to plugin 'org.apache.kafka.connect.transforms.InsertHeader' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,992] INFO Added alias 'RegexRouter' to plugin 'org.apache.kafka.connect.transforms.RegexRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,993] INFO Added alias 'TimestampRouter' to plugin 'org.apache.kafka.connect.transforms.TimestampRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,993] INFO Added alias 'ValueToKey' to plugin 'org.apache.kafka.connect.transforms.ValueToKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,993] INFO Added alias 'HasHeaderKey' to plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,993] INFO Added alias 'RecordIsTombstone' to plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,994] INFO Added alias 'TopicNameMatches' to plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,994] INFO Added alias 'ConnectorDataPreviewRestExtension' to plugin 'io.confluent.connect.rest.datapreview.extension.ConnectorDataPreviewRestExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,994] INFO Added alias 'ConnectSecurityExtension' to plugin 'io.confluent.connect.security.ConnectSecurityExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,994] INFO Added alias 'BasicAuthSecurityRestExtension' to plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:505) [2024-02-28 13:36:30,994] INFO Added aliases 'AllConnectorClientConfigOverridePolicy' and 'All' to plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,995] INFO Added aliases 'NoneConnectorClientConfigOverridePolicy' and 'None' to plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:30,995] INFO Added aliases 'PrincipalConnectorClientConfigOverridePolicy' and 'Principal' to plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:508) [2024-02-28 13:36:31,165] INFO DistributedConfig values: access.control.allow.methods = access.control.allow.origin = admin.listeners = null auto.include.jmx.reporter = true bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] client.dns.lookup = use_all_dns_ips client.id = config.providers = [] config.storage.replication.factor = 1 config.storage.topic = connect-configs confluent.connector.task.status.metrics = false confluent.license = [hidden] confluent.license.inject.into.connectors = true confluent.topic = _confluent-command confluent.topic.auto.include.jmx.reporter = true confluent.topic.bootstrap.servers = [] confluent.topic.client.dns.lookup = use_all_dns_ips confluent.topic.client.id = confluent.topic.confluent.proxy.protocol.client.address = null confluent.topic.confluent.proxy.protocol.client.port = null confluent.topic.confluent.proxy.protocol.client.version = NONE confluent.topic.connections.max.idle.ms = 540000 confluent.topic.consumer.allow.auto.create.topics = true confluent.topic.consumer.auto.commit.interval.ms = 5000 confluent.topic.consumer.auto.include.jmx.reporter = true confluent.topic.consumer.auto.offset.reset = latest confluent.topic.consumer.check.crcs = true confluent.topic.consumer.client.dns.lookup = use_all_dns_ips confluent.topic.consumer.client.id = confluent.topic.consumer.client.rack = confluent.topic.consumer.confluent.proxy.protocol.client.address = null confluent.topic.consumer.confluent.proxy.protocol.client.port = null confluent.topic.consumer.confluent.proxy.protocol.client.version = NONE confluent.topic.consumer.connections.max.idle.ms = 540000 confluent.topic.consumer.default.api.timeout.ms = 60000 confluent.topic.consumer.enable.auto.commit = true confluent.topic.consumer.exclude.internal.topics = true confluent.topic.consumer.fetch.max.bytes = 52428800 confluent.topic.consumer.fetch.max.wait.ms = 500 confluent.topic.consumer.fetch.min.bytes = 1 confluent.topic.consumer.group.id = null confluent.topic.consumer.group.instance.id = null confluent.topic.consumer.heartbeat.interval.ms = 3000 confluent.topic.consumer.interceptor.classes = [] confluent.topic.consumer.internal.leave.group.on.close = true confluent.topic.consumer.internal.throw.on.fetch.stable.offset.unsupported = false confluent.topic.consumer.isolation.level = read_uncommitted confluent.topic.consumer.max.partition.fetch.bytes = 1048576 confluent.topic.consumer.max.poll.interval.ms = 300000 confluent.topic.consumer.max.poll.records = 500 confluent.topic.consumer.metadata.max.age.ms = 300000 confluent.topic.consumer.metric.reporters = [] confluent.topic.consumer.metrics.num.samples = 2 confluent.topic.consumer.metrics.recording.level = INFO confluent.topic.consumer.metrics.sample.window.ms = 30000 confluent.topic.consumer.partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] confluent.topic.consumer.receive.buffer.bytes = 65536 confluent.topic.consumer.reconnect.backoff.max.ms = 1000 confluent.topic.consumer.reconnect.backoff.ms = 50 confluent.topic.consumer.request.timeout.ms = 30000 confluent.topic.consumer.retry.backoff.ms = 100 confluent.topic.consumer.sasl.client.callback.handler.class = null confluent.topic.consumer.sasl.jaas.config = null confluent.topic.consumer.sasl.kerberos.kinit.cmd = /usr/bin/kinit confluent.topic.consumer.sasl.kerberos.min.time.before.relogin = 60000 confluent.topic.consumer.sasl.kerberos.service.name = null confluent.topic.consumer.sasl.kerberos.ticket.renew.jitter = 0.05 confluent.topic.consumer.sasl.kerberos.ticket.renew.window.factor = 0.8 confluent.topic.consumer.sasl.login.callback.handler.class = null confluent.topic.consumer.sasl.login.class = null confluent.topic.consumer.sasl.login.connect.timeout.ms = null confluent.topic.consumer.sasl.login.read.timeout.ms = null confluent.topic.consumer.sasl.login.refresh.buffer.seconds = 300 confluent.topic.consumer.sasl.login.refresh.min.period.seconds = 60 confluent.topic.consumer.sasl.login.refresh.window.factor = 0.8 confluent.topic.consumer.sasl.login.refresh.window.jitter = 0.05 confluent.topic.consumer.sasl.login.retry.backoff.max.ms = 10000 confluent.topic.consumer.sasl.login.retry.backoff.ms = 100 confluent.topic.consumer.sasl.mechanism = GSSAPI confluent.topic.consumer.sasl.oauthbearer.clock.skew.seconds = 30 confluent.topic.consumer.sasl.oauthbearer.expected.audience = null confluent.topic.consumer.sasl.oauthbearer.expected.issuer = null confluent.topic.consumer.sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 confluent.topic.consumer.sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 confluent.topic.consumer.sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 confluent.topic.consumer.sasl.oauthbearer.jwks.endpoint.url = null confluent.topic.consumer.sasl.oauthbearer.scope.claim.name = scope confluent.topic.consumer.sasl.oauthbearer.sub.claim.name = sub confluent.topic.consumer.sasl.oauthbearer.token.endpoint.url = null confluent.topic.consumer.security.protocol = PLAINTEXT confluent.topic.consumer.security.providers = null confluent.topic.consumer.send.buffer.bytes = 131072 confluent.topic.consumer.session.timeout.ms = 45000 confluent.topic.consumer.socket.connection.setup.timeout.max.ms = 30000 confluent.topic.consumer.socket.connection.setup.timeout.ms = 10000 confluent.topic.consumer.ssl.cipher.suites = null confluent.topic.consumer.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] confluent.topic.consumer.ssl.endpoint.identification.algorithm = https confluent.topic.consumer.ssl.engine.factory.class = null confluent.topic.consumer.ssl.key.password = null confluent.topic.consumer.ssl.keymanager.algorithm = SunX509 confluent.topic.consumer.ssl.keystore.certificate.chain = null confluent.topic.consumer.ssl.keystore.key = null confluent.topic.consumer.ssl.keystore.location = null confluent.topic.consumer.ssl.keystore.password = null confluent.topic.consumer.ssl.keystore.type = JKS confluent.topic.consumer.ssl.protocol = TLSv1.3 confluent.topic.consumer.ssl.provider = null confluent.topic.consumer.ssl.secure.random.implementation = null confluent.topic.consumer.ssl.trustmanager.algorithm = PKIX confluent.topic.consumer.ssl.truststore.certificates = null confluent.topic.consumer.ssl.truststore.location = null confluent.topic.consumer.ssl.truststore.password = null confluent.topic.consumer.ssl.truststore.type = JKS confluent.topic.interceptor.classes = [] confluent.topic.metadata.max.age.ms = 300000 confluent.topic.metric.reporters = [] confluent.topic.metrics.num.samples = 2 confluent.topic.metrics.recording.level = INFO confluent.topic.metrics.sample.window.ms = 30000 confluent.topic.producer.acks = all confluent.topic.producer.auto.include.jmx.reporter = true confluent.topic.producer.batch.size = 16384 confluent.topic.producer.buffer.memory = 33554432 confluent.topic.producer.client.dns.lookup = use_all_dns_ips confluent.topic.producer.client.id = confluent.topic.producer.compression.type = none confluent.topic.producer.confluent.proxy.protocol.client.address = null confluent.topic.producer.confluent.proxy.protocol.client.port = null confluent.topic.producer.confluent.proxy.protocol.client.version = NONE confluent.topic.producer.connections.max.idle.ms = 540000 confluent.topic.producer.delivery.timeout.ms = 120000 confluent.topic.producer.enable.idempotence = true confluent.topic.producer.interceptor.classes = [] confluent.topic.producer.linger.ms = 0 confluent.topic.producer.max.block.ms = 60000 confluent.topic.producer.max.in.flight.requests.per.connection = 5 confluent.topic.producer.max.request.size = 1048576 confluent.topic.producer.metadata.max.age.ms = 300000 confluent.topic.producer.metadata.max.idle.ms = 300000 confluent.topic.producer.metric.reporters = [] confluent.topic.producer.metrics.num.samples = 2 confluent.topic.producer.metrics.recording.level = INFO confluent.topic.producer.metrics.sample.window.ms = 30000 confluent.topic.producer.partitioner.adaptive.partitioning.enable = true confluent.topic.producer.partitioner.availability.timeout.ms = 0 confluent.topic.producer.partitioner.class = null confluent.topic.producer.partitioner.ignore.keys = false confluent.topic.producer.receive.buffer.bytes = 32768 confluent.topic.producer.reconnect.backoff.max.ms = 1000 confluent.topic.producer.reconnect.backoff.ms = 50 confluent.topic.producer.request.timeout.ms = 30000 confluent.topic.producer.retry.backoff.ms = 100 confluent.topic.producer.sasl.client.callback.handler.class = null confluent.topic.producer.sasl.jaas.config = null confluent.topic.producer.sasl.kerberos.kinit.cmd = /usr/bin/kinit confluent.topic.producer.sasl.kerberos.min.time.before.relogin = 60000 confluent.topic.producer.sasl.kerberos.service.name = null confluent.topic.producer.sasl.kerberos.ticket.renew.jitter = 0.05 confluent.topic.producer.sasl.kerberos.ticket.renew.window.factor = 0.8 confluent.topic.producer.sasl.login.callback.handler.class = null confluent.topic.producer.sasl.login.class = null confluent.topic.producer.sasl.login.connect.timeout.ms = null confluent.topic.producer.sasl.login.read.timeout.ms = null confluent.topic.producer.sasl.login.refresh.buffer.seconds = 300 confluent.topic.producer.sasl.login.refresh.min.period.seconds = 60 confluent.topic.producer.sasl.login.refresh.window.factor = 0.8 confluent.topic.producer.sasl.login.refresh.window.jitter = 0.05 confluent.topic.producer.sasl.login.retry.backoff.max.ms = 10000 confluent.topic.producer.sasl.login.retry.backoff.ms = 100 confluent.topic.producer.sasl.mechanism = GSSAPI confluent.topic.producer.sasl.oauthbearer.clock.skew.seconds = 30 confluent.topic.producer.sasl.oauthbearer.expected.audience = null confluent.topic.producer.sasl.oauthbearer.expected.issuer = null confluent.topic.producer.sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 confluent.topic.producer.sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 confluent.topic.producer.sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 confluent.topic.producer.sasl.oauthbearer.jwks.endpoint.url = null confluent.topic.producer.sasl.oauthbearer.scope.claim.name = scope confluent.topic.producer.sasl.oauthbearer.sub.claim.name = sub confluent.topic.producer.sasl.oauthbearer.token.endpoint.url = null confluent.topic.producer.security.protocol = PLAINTEXT confluent.topic.producer.security.providers = null confluent.topic.producer.send.buffer.bytes = 131072 confluent.topic.producer.socket.connection.setup.timeout.max.ms = 30000 confluent.topic.producer.socket.connection.setup.timeout.ms = 10000 confluent.topic.producer.ssl.cipher.suites = null confluent.topic.producer.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] confluent.topic.producer.ssl.endpoint.identification.algorithm = https confluent.topic.producer.ssl.engine.factory.class = null confluent.topic.producer.ssl.key.password = null confluent.topic.producer.ssl.keymanager.algorithm = SunX509 confluent.topic.producer.ssl.keystore.certificate.chain = null confluent.topic.producer.ssl.keystore.key = null confluent.topic.producer.ssl.keystore.location = null confluent.topic.producer.ssl.keystore.password = null confluent.topic.producer.ssl.keystore.type = JKS confluent.topic.producer.ssl.protocol = TLSv1.3 confluent.topic.producer.ssl.provider = null confluent.topic.producer.ssl.secure.random.implementation = null confluent.topic.producer.ssl.trustmanager.algorithm = PKIX confluent.topic.producer.ssl.truststore.certificates = null confluent.topic.producer.ssl.truststore.location = null confluent.topic.producer.ssl.truststore.password = null confluent.topic.producer.ssl.truststore.type = JKS confluent.topic.producer.transaction.timeout.ms = 60000 confluent.topic.producer.transactional.id = null confluent.topic.receive.buffer.bytes = 32768 confluent.topic.reconnect.backoff.max.ms = 1000 confluent.topic.reconnect.backoff.ms = 50 confluent.topic.replication.factor = 3 confluent.topic.request.timeout.ms = 30000 confluent.topic.retry.backoff.ms = 100 confluent.topic.sasl.client.callback.handler.class = null confluent.topic.sasl.jaas.config = null confluent.topic.sasl.kerberos.kinit.cmd = /usr/bin/kinit confluent.topic.sasl.kerberos.min.time.before.relogin = 60000 confluent.topic.sasl.kerberos.service.name = null confluent.topic.sasl.kerberos.ticket.renew.jitter = 0.05 confluent.topic.sasl.kerberos.ticket.renew.window.factor = 0.8 confluent.topic.sasl.login.callback.handler.class = null confluent.topic.sasl.login.class = null confluent.topic.sasl.login.connect.timeout.ms = null confluent.topic.sasl.login.read.timeout.ms = null confluent.topic.sasl.login.refresh.buffer.seconds = 300 confluent.topic.sasl.login.refresh.min.period.seconds = 60 confluent.topic.sasl.login.refresh.window.factor = 0.8 confluent.topic.sasl.login.refresh.window.jitter = 0.05 confluent.topic.sasl.login.retry.backoff.max.ms = 10000 confluent.topic.sasl.login.retry.backoff.ms = 100 confluent.topic.sasl.mechanism = GSSAPI confluent.topic.sasl.oauthbearer.clock.skew.seconds = 30 confluent.topic.sasl.oauthbearer.expected.audience = null confluent.topic.sasl.oauthbearer.expected.issuer = null confluent.topic.sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 confluent.topic.sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 confluent.topic.sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 confluent.topic.sasl.oauthbearer.jwks.endpoint.url = null confluent.topic.sasl.oauthbearer.scope.claim.name = scope confluent.topic.sasl.oauthbearer.sub.claim.name = sub confluent.topic.sasl.oauthbearer.token.endpoint.url = null confluent.topic.security.protocol = PLAINTEXT confluent.topic.security.providers = null confluent.topic.send.buffer.bytes = 131072 confluent.topic.socket.connection.setup.timeout.max.ms = 30000 confluent.topic.socket.connection.setup.timeout.ms = 10000 confluent.topic.ssl.cipher.suites = null confluent.topic.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] confluent.topic.ssl.endpoint.identification.algorithm = https confluent.topic.ssl.engine.factory.class = null confluent.topic.ssl.key.password = null confluent.topic.ssl.keymanager.algorithm = SunX509 confluent.topic.ssl.keystore.certificate.chain = null confluent.topic.ssl.keystore.key = null confluent.topic.ssl.keystore.location = null confluent.topic.ssl.keystore.password = null confluent.topic.ssl.keystore.type = JKS confluent.topic.ssl.protocol = TLSv1.3 confluent.topic.ssl.provider = null confluent.topic.ssl.secure.random.implementation = null confluent.topic.ssl.trustmanager.algorithm = PKIX confluent.topic.ssl.truststore.certificates = null confluent.topic.ssl.truststore.location = null confluent.topic.ssl.truststore.password = null confluent.topic.ssl.truststore.type = JKS connect.protocol = sessioned connections.max.idle.ms = 540000 connector.client.config.override.policy = All exactly.once.source.support = disabled group.id = connect-cluster header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter heartbeat.interval.ms = 3000 inter.worker.key.generation.algorithm = HmacSHA256 inter.worker.key.size = null inter.worker.key.ttl.ms = 3600000 inter.worker.signature.algorithm = HmacSHA256 inter.worker.verification.algorithms = [HmacSHA256] key.converter = class io.confluent.connect.json.JsonSchemaConverter listeners = [HTTP://0.0.0.0:8083] metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 offset.flush.interval.ms = 10000 offset.flush.timeout.ms = 5000 offset.storage.partitions = 25 offset.storage.replication.factor = 1 offset.storage.topic = connect-offsets plugin.path = [/u/kafka_BI/share/confluentinc-kafka-connect-jdbc, /u/kafka_BI/share/official_informix_connector, /u/kafka_BI/share/confluentinc-connect-transforms-1.4.3] rebalance.timeout.ms = 60000 receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 40000 response.http.headers.config = rest.advertised.host.name = null rest.advertised.listener = null rest.advertised.port = null rest.extension.classes = [] rest.servlet.initializor.classes = [] retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null scheduled.rebalance.max.delay.ms = 300000 security.protocol = PLAINTEXT send.buffer.bytes = 131072 session.timeout.ms = 10000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.client.auth = none ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS status.storage.partitions = 5 status.storage.replication.factor = 1 status.storage.topic = connect-status task.shutdown.graceful.timeout.ms = 5000 topic.creation.enable = true topic.tracking.allow.reset = true topic.tracking.enable = true value.converter = class io.confluent.connect.json.JsonSchemaConverter worker.sync.timeout.ms = 3000 worker.unsync.backoff.ms = 300000 (org.apache.kafka.connect.runtime.distributed.DistributedConfig:376) [2024-02-28 13:36:31,168] INFO Creating Kafka admin client (org.apache.kafka.connect.runtime.WorkerConfig:353) [2024-02-28 13:36:31,175] INFO AdminClientConfig values: auto.include.jmx.reporter = true bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] client.dns.lookup = use_all_dns_ips client.id = confluent.metrics.reporter.bootstrap.servers = kafka-0:9071 confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE confluent.use.controller.listener = false connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 host.resolver.class = class org.apache.kafka.clients.DefaultHostResolver metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:376) [2024-02-28 13:36:31,454] WARN These configurations '[expose.internal.connect.endpoints, config.storage.topic, listeners, status.storage.topic, group.id, plugin.path, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, status.storage.replication.factor, value.converter.schemas.enable, offset.storage.replication.factor, offset.storage.topic, value.converter, key.converter]' were supplied but are not used yet. (org.apache.kafka.clients.admin.AdminClientConfig:385) [2024-02-28 13:36:31,456] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:31,456] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:31,456] INFO Kafka startTimeMs: 1709109391455 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:32,623] INFO Kafka cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.connect.runtime.WorkerConfig:370) [2024-02-28 13:36:32,624] INFO App info kafka.admin.client for adminclient-1 unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:32,641] INFO Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:32,641] INFO Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:32,642] INFO Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:33,077] INFO Logging initialized @17424ms to org.eclipse.jetty.util.log.Slf4jLog (org.eclipse.jetty.util.log:170) [2024-02-28 13:36:33,173] INFO Added connector for HTTP://0.0.0.0:8083 (org.apache.kafka.connect.runtime.rest.RestServer:132) [2024-02-28 13:36:33,176] INFO Initializing REST server (org.apache.kafka.connect.runtime.rest.RestServer:203) [2024-02-28 13:36:33,228] INFO jetty-9.4.51.v20230217; built: 2023-02-17T08:19:37.309Z; git: b45c405e4544384de066f814ed42ae3dceacdd49; jvm 11.0.21+9-LTS (org.eclipse.jetty.server.Server:375) [2024-02-28 13:36:33,278] INFO Started http_0.0.0.08083@6ae32ff0{HTTP/1.1, (http/1.1)}{0.0.0.0:8083} (org.eclipse.jetty.server.AbstractConnector:333) [2024-02-28 13:36:33,279] INFO Started @17625ms (org.eclipse.jetty.server.Server:415) [2024-02-28 13:36:33,332] INFO Advertised URI: http://0.0.0.0:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:386) [2024-02-28 13:36:33,332] INFO REST server listening at http://0.0.0.0:8083/, advertising URL http://0.0.0.0:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:223) [2024-02-28 13:36:33,333] INFO Advertised URI: http://0.0.0.0:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:386) [2024-02-28 13:36:33,333] INFO REST admin endpoints at http://0.0.0.0:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:224) [2024-02-28 13:36:33,333] INFO Advertised URI: http://0.0.0.0:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:386) [2024-02-28 13:36:33,359] INFO Setting up All Policy for ConnectorClientConfigOverride. This will allow all client configurations to be overridden (org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy:44) [2024-02-28 13:36:33,387] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:33,387] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:33,388] INFO Kafka startTimeMs: 1709109393387 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:33,426] INFO JsonConverterConfig values: converter.type = key decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false (org.apache.kafka.connect.json.JsonConverterConfig:376) [2024-02-28 13:36:33,430] INFO JsonConverterConfig values: converter.type = value decimal.format = BASE64 schemas.cache.size = 1000 schemas.enable = false (org.apache.kafka.connect.json.JsonConverterConfig:376) [2024-02-28 13:36:33,549] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:33,549] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:33,549] INFO Kafka startTimeMs: 1709109393549 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:33,554] INFO Kafka Connect distributed worker initialization took 15879ms (org.apache.kafka.connect.cli.ConnectDistributed:146) [2024-02-28 13:36:33,555] INFO Kafka Connect starting (org.apache.kafka.connect.runtime.Connect:50) [2024-02-28 13:36:33,557] INFO Initializing REST resources (org.apache.kafka.connect.runtime.rest.RestServer:228) [2024-02-28 13:36:33,558] INFO [Worker clientId=connect-1, groupId=connect-cluster] Herder starting (org.apache.kafka.connect.runtime.distributed.DistributedHerder:344) [2024-02-28 13:36:33,558] INFO Worker starting (org.apache.kafka.connect.runtime.Worker:260) [2024-02-28 13:36:33,558] INFO Starting KafkaOffsetBackingStore (org.apache.kafka.connect.storage.KafkaOffsetBackingStore:273) [2024-02-28 13:36:33,559] INFO Starting KafkaBasedLog with topic connect-offsets (org.apache.kafka.connect.util.KafkaBasedLog:280) [2024-02-28 13:36:33,561] INFO AdminClientConfig values: auto.include.jmx.reporter = true bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] client.dns.lookup = use_all_dns_ips client.id = connect-cluster--shared-admin confluent.metrics.reporter.bootstrap.servers = kafka-0:9071 confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE confluent.use.controller.listener = false connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 host.resolver.class = class org.apache.kafka.clients.DefaultHostResolver metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:376) [2024-02-28 13:36:33,578] WARN These configurations '[expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, group.id, plugin.path, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, value.converter.schemas.enable, offset.storage.replication.factor, offset.storage.topic, value.converter, key.converter]' were supplied but are not used yet. (org.apache.kafka.clients.admin.AdminClientConfig:385) [2024-02-28 13:36:33,579] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:33,579] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:33,579] INFO Kafka startTimeMs: 1709109393579 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:33,631] INFO Adding admin resources to main listener (org.apache.kafka.connect.runtime.rest.RestServer:248) [2024-02-28 13:36:33,731] INFO DefaultSessionIdManager workerName=node0 (org.eclipse.jetty.server.session:334) [2024-02-28 13:36:33,731] INFO No SessionScavenger set, using defaults (org.eclipse.jetty.server.session:339) [2024-02-28 13:36:33,733] INFO node0 Scavenging every 660000ms (org.eclipse.jetty.server.session:132) Feb 28, 2024 1:36:34 PM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.RootResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.RootResource will be ignored. Feb 28, 2024 1:36:34 PM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource will be ignored. Feb 28, 2024 1:36:34 PM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource will be ignored. Feb 28, 2024 1:36:34 PM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConfluentV1MetadataResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConfluentV1MetadataResource will be ignored. Feb 28, 2024 1:36:34 PM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource will be ignored. [2024-02-28 13:36:34,419] INFO HV000001: Hibernate Validator 6.1.7.Final (org.hibernate.validator.internal.util.Version:21) [2024-02-28 13:36:34,572] INFO 172.20.0.2 - - [28/Feb/2024:08:36:34 +0000] "GET /connectors HTTP/1.1" 404 62 "-" "ReactorNetty/1.1.6" 49 (org.apache.kafka.connect.runtime.rest.RestServer:62) Feb 28, 2024 1:36:34 PM org.glassfish.jersey.internal.Errors logErrors WARNING: The following warnings have been detected: WARNING: The (sub)resource method listLoggers in org.apache.kafka.connect.runtime.rest.resources.LoggingResource contains empty path annotation. WARNING: The (sub)resource method listConnectors in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method createConnector in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation. WARNING: The (sub)resource method listConnectorPlugins in org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource contains empty path annotation. WARNING: The (sub)resource method serverInfo in org.apache.kafka.connect.runtime.rest.resources.RootResource contains empty path annotation. [2024-02-28 13:36:34,850] INFO Started o.e.j.s.ServletContextHandler@7253c53{/,null,AVAILABLE} (org.eclipse.jetty.server.handler.ContextHandler:921) [2024-02-28 13:36:34,851] INFO REST resources initialized; server is started and ready to handle requests (org.apache.kafka.connect.runtime.rest.RestServer:332) [2024-02-28 13:36:34,851] INFO Kafka Connect started (org.apache.kafka.connect.runtime.Connect:56) [2024-02-28 13:36:36,592] INFO [AdminClient clientId=connect-cluster--shared-admin] Node -3 disconnected. (org.apache.kafka.clients.NetworkClient:1049) [2024-02-28 13:36:36,594] WARN [AdminClient clientId=connect-cluster--shared-admin] Connection to node -3 (/192.168.151.252:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient:870) [2024-02-28 13:36:36,667] INFO ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connect-cluster--offsets compression.type = none confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:376) [2024-02-28 13:36:36,720] WARN These configurations '[metrics.context.resource.version, group.id, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, offset.storage.topic, value.converter, key.converter, expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, value.converter.schemas.enable, offset.storage.replication.factor]' were supplied but are not used yet. (org.apache.kafka.clients.producer.ProducerConfig:385) [2024-02-28 13:36:36,720] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:36,721] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:36,721] INFO Kafka startTimeMs: 1709109396720 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:36,731] INFO ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = connect-cluster--offsets client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = connect-cluster group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:36,813] WARN These configurations '[expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, plugin.path, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, value.converter.schemas.enable, offset.storage.replication.factor, offset.storage.topic, value.converter, key.converter]' were supplied but are not used yet. (org.apache.kafka.clients.consumer.ConsumerConfig:385) [2024-02-28 13:36:36,814] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:36,814] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:36,814] INFO Kafka startTimeMs: 1709109396813 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:36,839] INFO [Consumer clientId=connect-cluster--offsets, groupId=connect-cluster] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:36,846] INFO [Consumer clientId=connect-cluster--offsets, groupId=connect-cluster] Assigned to partition(s): connect-offsets-0 (org.apache.kafka.clients.consumer.KafkaConsumer:1124) [2024-02-28 13:36:36,855] INFO [Consumer clientId=connect-cluster--offsets, groupId=connect-cluster] Seeking to earliest offset of partition connect-offsets-0 (org.apache.kafka.clients.consumer.internals.SubscriptionState:647) [2024-02-28 13:36:36,906] INFO 172.20.0.2 - - [28/Feb/2024:08:36:36 +0000] "GET /connectors HTTP/1.1" 200 2 "-" "ReactorNetty/1.1.6" 319 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:36,909] INFO [Consumer clientId=connect-cluster--offsets, groupId=connect-cluster] Resetting the last seen epoch of partition connect-offsets-0 to 0 since the associated topicId changed from null to YgXYjFuFQZ64FfG00I-mCQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:37,035] INFO Finished reading KafkaBasedLog for topic connect-offsets (org.apache.kafka.connect.util.KafkaBasedLog:337) [2024-02-28 13:36:37,036] INFO Started KafkaBasedLog for topic connect-offsets (org.apache.kafka.connect.util.KafkaBasedLog:339) [2024-02-28 13:36:37,037] INFO Finished reading offsets topic and starting KafkaOffsetBackingStore (org.apache.kafka.connect.storage.KafkaOffsetBackingStore:290) [2024-02-28 13:36:37,068] INFO LogEventsConfig values: confluent.event.logger.cloudevent.codec = binary confluent.event.logger.deduplicate.errors = false confluent.event.logger.deduplicate.errors.reset.time.ms = 43200000 confluent.event.logger.enable = false confluent.event.logger.exporter.class = class io.confluent.telemetry.events.exporter.kafka.EventKafkaExporter confluent.event.logger.exporter.kafka.producer.bootstrap.servers = confluent.event.logger.exporter.kafka.producer.client.id = confluent-connect-log-events-emitter-connect-cluster confluent.event.logger.exporter.kafka.topic.create = true confluent.event.logger.exporter.kafka.topic.name = confluent-connect-log-events confluent.event.logger.exporter.kafka.type = kafka (io.confluent.logevents.connect.LogEventsConfig:376) [2024-02-28 13:36:37,069] INFO Connect Log Events aren't enabled. (io.confluent.logevents.connect.LogEventsKafkaEmitter:65) [2024-02-28 13:36:37,069] INFO Worker started (org.apache.kafka.connect.runtime.Worker:278) [2024-02-28 13:36:37,069] INFO Starting KafkaBasedLog with topic connect-status (org.apache.kafka.connect.util.KafkaBasedLog:280) [2024-02-28 13:36:37,079] INFO ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connect-cluster--statuses compression.type = none confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 0 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:376) [2024-02-28 13:36:37,088] WARN These configurations '[metrics.context.resource.version, group.id, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, offset.storage.topic, value.converter, key.converter, expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, value.converter.schemas.enable, offset.storage.replication.factor]' were supplied but are not used yet. (org.apache.kafka.clients.producer.ProducerConfig:385) [2024-02-28 13:36:37,088] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:37,088] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:37,089] INFO Kafka startTimeMs: 1709109397088 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:37,091] INFO ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = connect-cluster--statuses client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = connect-cluster group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:37,094] INFO [Producer clientId=connect-cluster--statuses] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:37,100] WARN These configurations '[metrics.context.resource.version, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, offset.storage.topic, value.converter, key.converter, expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, value.converter.schemas.enable, offset.storage.replication.factor]' were supplied but are not used yet. (org.apache.kafka.clients.consumer.ConsumerConfig:385) [2024-02-28 13:36:37,100] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:37,101] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:37,101] INFO Kafka startTimeMs: 1709109397100 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:37,107] INFO [Consumer clientId=connect-cluster--statuses, groupId=connect-cluster] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:37,108] INFO [Consumer clientId=connect-cluster--statuses, groupId=connect-cluster] Assigned to partition(s): connect-status-0 (org.apache.kafka.clients.consumer.KafkaConsumer:1124) [2024-02-28 13:36:37,109] INFO [Consumer clientId=connect-cluster--statuses, groupId=connect-cluster] Seeking to earliest offset of partition connect-status-0 (org.apache.kafka.clients.consumer.internals.SubscriptionState:647) [2024-02-28 13:36:37,118] INFO [Consumer clientId=connect-cluster--statuses, groupId=connect-cluster] Resetting the last seen epoch of partition connect-status-0 to 0 since the associated topicId changed from null to p6lXs05RQBCrJeXDaF3xUQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:37,224] INFO Finished reading KafkaBasedLog for topic connect-status (org.apache.kafka.connect.util.KafkaBasedLog:337) [2024-02-28 13:36:37,224] INFO Started KafkaBasedLog for topic connect-status (org.apache.kafka.connect.util.KafkaBasedLog:339) [2024-02-28 13:36:37,233] INFO Starting KafkaConfigBackingStore (org.apache.kafka.connect.storage.KafkaConfigBackingStore:363) [2024-02-28 13:36:37,233] INFO Starting KafkaBasedLog with topic connect-configs (org.apache.kafka.connect.util.KafkaBasedLog:280) [2024-02-28 13:36:37,244] INFO ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connect-cluster--configs compression.type = none confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 60000 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:376) [2024-02-28 13:36:37,252] WARN These configurations '[metrics.context.resource.version, group.id, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, offset.storage.topic, value.converter, key.converter, expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, value.converter.schemas.enable, offset.storage.replication.factor]' were supplied but are not used yet. (org.apache.kafka.clients.producer.ProducerConfig:385) [2024-02-28 13:36:37,252] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:37,252] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:37,253] INFO Kafka startTimeMs: 1709109397252 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:37,254] INFO ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = connect-cluster--configs client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = connect-cluster group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:37,258] INFO [Producer clientId=connect-cluster--configs] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:37,262] WARN These configurations '[metrics.context.resource.version, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, offset.storage.topic, value.converter, key.converter, expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, value.converter.schemas.enable, offset.storage.replication.factor]' were supplied but are not used yet. (org.apache.kafka.clients.consumer.ConsumerConfig:385) [2024-02-28 13:36:37,263] INFO Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:37,263] INFO Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:37,263] INFO Kafka startTimeMs: 1709109397263 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:39,430] INFO 172.20.0.2 - - [28/Feb/2024:08:36:39 +0000] "GET /connectors HTTP/1.1" 200 2 "-" "ReactorNetty/1.1.6" 8 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:39,597] INFO [Producer clientId=connect-cluster--offsets] Node -3 disconnected. (org.apache.kafka.clients.NetworkClient:1049) [2024-02-28 13:36:39,597] INFO [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Node -3 disconnected. (org.apache.kafka.clients.NetworkClient:1049) [2024-02-28 13:36:39,598] WARN [Producer clientId=connect-cluster--offsets] Connection to node -3 (/192.168.151.252:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient:870) [2024-02-28 13:36:39,598] WARN [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Connection to node -3 (/192.168.151.252:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient:870) [2024-02-28 13:36:39,598] WARN [Producer clientId=connect-cluster--offsets] Bootstrap broker 192.168.151.252:9092 (id: -3 rack: null) disconnected (org.apache.kafka.clients.NetworkClient:1191) [2024-02-28 13:36:39,598] WARN [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Bootstrap broker 192.168.151.252:9092 (id: -3 rack: null) disconnected (org.apache.kafka.clients.NetworkClient:1191) [2024-02-28 13:36:39,703] INFO [Producer clientId=connect-cluster--offsets] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:39,706] INFO [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:39,707] INFO [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Assigned to partition(s): connect-configs-0 (org.apache.kafka.clients.consumer.KafkaConsumer:1124) [2024-02-28 13:36:39,707] INFO [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Seeking to earliest offset of partition connect-configs-0 (org.apache.kafka.clients.consumer.internals.SubscriptionState:647) [2024-02-28 13:36:39,718] INFO [Consumer clientId=connect-cluster--configs, groupId=connect-cluster] Resetting the last seen epoch of partition connect-configs-0 to 2 since the associated topicId changed from null to Al-pWSFaSz2bRsI3hN82uQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:39,726] INFO Successfully processed removal of connector 'ssource' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,748] INFO Successfully processed removal of connector 'ssource' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,751] INFO Successfully processed removal of connector 'new_sink' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,765] INFO Successfully processed removal of connector 'new_sourcess' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,770] INFO Successfully processed removal of connector 'sum_source' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,771] INFO Successfully processed removal of connector 'new_ssourcess' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,772] INFO Successfully processed removal of connector 'new_trans_source' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,773] INFO Successfully processed removal of connector 'neww_source' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,776] INFO Successfully processed removal of connector 'n_source' (org.apache.kafka.connect.storage.KafkaConfigBackingStore:977) [2024-02-28 13:36:39,777] INFO Finished reading KafkaBasedLog for topic connect-configs (org.apache.kafka.connect.util.KafkaBasedLog:337) [2024-02-28 13:36:39,778] INFO Started KafkaBasedLog for topic connect-configs (org.apache.kafka.connect.util.KafkaBasedLog:339) [2024-02-28 13:36:39,778] INFO Started KafkaConfigBackingStore (org.apache.kafka.connect.storage.KafkaConfigBackingStore:387) [2024-02-28 13:36:39,779] INFO [Worker clientId=connect-1, groupId=connect-cluster] Herder started (org.apache.kafka.connect.runtime.distributed.DistributedHerder:349) [2024-02-28 13:36:42,602] INFO [Worker clientId=connect-1, groupId=connect-cluster] Node -3 disconnected. (org.apache.kafka.clients.NetworkClient:1049) [2024-02-28 13:36:42,603] WARN [Worker clientId=connect-1, groupId=connect-cluster] Connection to node -3 (/192.168.151.252:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient:870) [2024-02-28 13:36:42,603] WARN [Worker clientId=connect-1, groupId=connect-cluster] Bootstrap broker 192.168.151.252:9092 (id: -3 rack: null) disconnected (org.apache.kafka.clients.NetworkClient:1191) [2024-02-28 13:36:42,709] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition mcp_kafka_net_21-0 to 2 since the associated topicId changed from null to PdZ5a9iHTZerWcZvE84bZw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,709] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition mcp_kafka_net_21.mcp.trans_requests_reps-0 to 2 since the associated topicId changed from null to 7frYXFYVRvqqhwO3t6BbKQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,710] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition connect-configs-0 to 2 since the associated topicId changed from null to Al-pWSFaSz2bRsI3hN82uQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,710] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition connect-offsets-0 to 0 since the associated topicId changed from null to YgXYjFuFQZ64FfG00I-mCQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,710] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition mcp_kafka_net_21.kafka.cards-0 to 0 since the associated topicId changed from null to TiWC7ioKT2i1Nmd-Y7munQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,710] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition _schema-0 to 27 since the associated topicId changed from null to FPd-0jKtSo2qrbC0ZxbIgg (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,710] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02.mcp.trans_requests_rep-0 to 2 since the associated topicId changed from null to 3VOWNEZySXuJQ75INno7-Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,711] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition _schema_encoders-0 to 27830 since the associated topicId changed from null to GIPLf5uITHGk7b6dIfa0VA (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,711] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition informix-gpdb-sink-errors-0 to 2 since the associated topicId changed from null to uU3J730MRdKGiwUZff_bug (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,711] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02.mcp.trans_requests_cdc-0 to 2 since the associated topicId changed from null to Db611dBBQVyLsM-ZrV4UrQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,711] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition connect-status-0 to 0 since the associated topicId changed from null to p6lXs05RQBCrJeXDaF3xUQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,711] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02.mcpdev.cards-0 to 2 since the associated topicId changed from null to Ms7mpQSmTPuxI6mVN6VtOQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,712] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition _schemas-0 to 27830 since the associated topicId changed from null to OiHRd-6RSnqmMoTCIomOig (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,712] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition mcp_kafka_net_21.mcp.dbz_signal-0 to 2 since the associated topicId changed from null to 0tSBNPGxSmCN99VedR__vQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,712] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition _confluent-command-0 to 27819 since the associated topicId changed from null to 4McAHPEJSSGs5eZYUFGhjw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,712] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02.mcp.trans_requests_reps-0 to 0 since the associated topicId changed from null to xmO7pruTS8GMYHgtZwxalA (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,713] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02.mcpdev.sumd_card_funds-0 to 2 since the associated topicId changed from null to ogCra3reRfagDd4PEt0qSg (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,713] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02-0 to 2 since the associated topicId changed from null to sVmOMJBQTFmOCDAKwgX8Ag (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,713] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition inst0016_net_02.mcpdev.trans_requests-0 to 0 since the associated topicId changed from null to QxxJTb_BQc6oSGL6G9iNFA (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,713] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition mcp_kafka_net_21.mcp.trans_requests_rep-0 to 2 since the associated topicId changed from null to vIvfTwAMSF6V2BjZ4w8H5A (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,713] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition informixschemahistory-0 to 0 since the associated topicId changed from null to I0lhKqlDRS2y6SwdunOftw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,714] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition _confluent-telemetry-metrics-0 to 24 since the associated topicId changed from null to gg1xeBnKSaqlX4t4VFQxUA (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,714] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-30 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,714] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-12 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,714] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-19 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,714] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-47 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,715] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-5 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,715] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-13 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,715] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-44 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,715] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-40 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,716] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-22 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,716] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-46 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,716] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-15 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,716] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-9 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,716] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-6 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,717] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-36 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,717] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-32 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,717] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-0 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,717] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-17 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,717] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-43 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,717] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-26 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,718] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-49 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,718] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-33 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,718] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-16 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,718] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-3 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,718] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-20 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,719] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-42 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,719] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-23 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,719] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-29 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,719] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-31 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,719] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-11 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,720] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-18 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,720] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-4 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,720] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-25 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,720] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-35 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,720] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-45 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,720] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-39 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,721] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-10 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,721] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-14 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,721] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-38 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,721] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-7 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,722] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-37 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,722] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-1 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,722] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-8 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,722] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-27 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,722] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-48 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,723] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-34 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,723] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-2 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,723] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-21 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,723] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-41 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,723] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-24 to 10 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,724] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition __consumer_offsets-28 to 27822 since the associated topicId changed from null to gd_QKR7lQoSHL2itGBTk0Q (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,724] INFO [Worker clientId=connect-1, groupId=connect-cluster] Resetting the last seen epoch of partition mcp_kafka_net_22-0 to 2 since the associated topicId changed from null to l3_nyR_5QT6KzGPzkxXYeg (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:42,727] INFO [Worker clientId=connect-1, groupId=connect-cluster] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:42,733] INFO [Worker clientId=connect-1, groupId=connect-cluster] Discovered group coordinator 192.168.151.203:9092 (id: 2147483644 rack: null) (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:896) [2024-02-28 13:36:42,737] INFO [Worker clientId=connect-1, groupId=connect-cluster] Rebalance started (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:231) [2024-02-28 13:36:42,737] INFO [Worker clientId=connect-1, groupId=connect-cluster] (Re-)joining group (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:566) [2024-02-28 13:36:42,756] INFO [Worker clientId=connect-1, groupId=connect-cluster] Request joining group due to: rebalance failed due to 'The group member needs to have a valid member id before actually entering a consumer group.' (MemberIdRequiredException) (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:1062) [2024-02-28 13:36:42,756] INFO [Worker clientId=connect-1, groupId=connect-cluster] (Re-)joining group (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:566) [2024-02-28 13:36:42,760] INFO [Worker clientId=connect-1, groupId=connect-cluster] Successfully joined group with generation Generation{generationId=1, memberId='connect-1-828f1868-1fac-4332-ba62-b9c37f8e9d19', protocol='sessioned'} (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:627) [2024-02-28 13:36:42,829] INFO [Worker clientId=connect-1, groupId=connect-cluster] Successfully synced group in generation Generation{generationId=1, memberId='connect-1-828f1868-1fac-4332-ba62-b9c37f8e9d19', protocol='sessioned'} (org.apache.kafka.connect.runtime.distributed.WorkerCoordinator:802) [2024-02-28 13:36:42,830] INFO [Worker clientId=connect-1, groupId=connect-cluster] Joined group at generation 1 with protocol version 2 and got assignment: Assignment{error=0, leader='connect-1-828f1868-1fac-4332-ba62-b9c37f8e9d19', leaderUrl='http://0.0.0.0:8083/', offset=1141, connectorIds=[transss, sink_new], taskIds=[transss-0, sink_new-0], revokedConnectorIds=[], revokedTaskIds=[], delay=0} with rebalance delay: 0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:2304) [2024-02-28 13:36:42,833] WARN [Worker clientId=connect-1, groupId=connect-cluster] Catching up to assignment's config offset. (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1558) [2024-02-28 13:36:42,834] INFO [Worker clientId=connect-1, groupId=connect-cluster] Current config state offset -1 is behind group assignment 1141, reading to end of config log (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1623) [2024-02-28 13:36:42,838] INFO [Worker clientId=connect-1, groupId=connect-cluster] Finished reading to end of log and updated config snapshot, new config log offset: 1141 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1648) [2024-02-28 13:36:42,838] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting connectors and tasks using config offset 1141 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1698) [2024-02-28 13:36:42,841] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting task transss-0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1740) [2024-02-28 13:36:42,842] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting connector sink_new (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1815) [2024-02-28 13:36:42,841] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting connector transss (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1815) [2024-02-28 13:36:42,842] INFO [Worker clientId=connect-1, groupId=connect-cluster] Starting task sink_new-0 (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1740) [2024-02-28 13:36:42,858] INFO [sink_new|worker] Creating connector sink_new of type io.confluent.connect.jdbc.JdbcSinkConnector (org.apache.kafka.connect.runtime.Worker:366) [2024-02-28 13:36:42,858] INFO [transss|worker] Creating connector transss of type io.debezium.connector.informix.InformixConnector (org.apache.kafka.connect.runtime.Worker:366) [2024-02-28 13:36:42,866] INFO [transss|task-0] Creating task transss-0 (org.apache.kafka.connect.runtime.Worker:761) [2024-02-28 13:36:42,866] INFO [sink_new|task-0] Creating task sink_new-0 (org.apache.kafka.connect.runtime.Worker:761) [2024-02-28 13:36:42,885] INFO [transss|task-0] ConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss predicates = [] tasks.max = 1 transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig:376) [2024-02-28 13:36:42,903] INFO [sink_new|task-0] ConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 transforms = [unwrap, route, TSF8] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig:376) [2024-02-28 13:36:42,945] INFO [transss|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss predicates = [] tasks.max = 1 transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:42,946] INFO [sink_new|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 transforms = [unwrap, route, TSF8] transforms.TSF8.negate = false transforms.TSF8.offset.field = null transforms.TSF8.partition.field = null transforms.TSF8.predicate = transforms.TSF8.static.field = null transforms.TSF8.static.value = null transforms.TSF8.timestamp.field = rec_sync_created_at transforms.TSF8.topic.field = null transforms.TSF8.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.route.negate = false transforms.route.predicate = transforms.route.regex = ([^.]+)\.([^.]+)\.([^.]+) transforms.route.replacement = $3 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:42,955] INFO [transss|task-0] TaskConfig values: task.class = class io.debezium.connector.informix.InformixConnectorTask (org.apache.kafka.connect.runtime.TaskConfig:376) [2024-02-28 13:36:42,961] INFO [sink_new|task-0] TaskConfig values: task.class = class io.confluent.connect.jdbc.sink.JdbcSinkTask (org.apache.kafka.connect.runtime.TaskConfig:376) [2024-02-28 13:36:42,964] INFO [sink_new|task-0] Instantiated task sink_new-0 with version 10.7.3-5 of type io.confluent.connect.jdbc.sink.JdbcSinkTask (org.apache.kafka.connect.runtime.Worker:775) [2024-02-28 13:36:42,969] INFO [transss|task-0] Instantiated task transss-0 with version 2.6.0.Alpha2 of type io.debezium.connector.informix.InformixConnectorTask (org.apache.kafka.connect.runtime.Worker:775) [2024-02-28 13:36:42,971] INFO [sink_new|worker] SinkConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.deadletterqueue.context.headers.enable = true errors.deadletterqueue.topic.name = informix-gpdb-sink-errors errors.deadletterqueue.topic.replication.factor = 1 errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 topics = [] topics.regex = mcp_kafka_net_21.mcp.trans_requests_reps transforms = [unwrap, route, TSF8] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SinkConnectorConfig:376) [2024-02-28 13:36:42,972] INFO [sink_new|task-0] AvroConverterConfig values: auto.register.schemas = true basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.connect.avro.AvroConverterConfig:376) [2024-02-28 13:36:42,973] INFO [sink_new|worker] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.deadletterqueue.context.headers.enable = true errors.deadletterqueue.topic.name = informix-gpdb-sink-errors errors.deadletterqueue.topic.replication.factor = 1 errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 topics = [] topics.regex = mcp_kafka_net_21.mcp.trans_requests_reps transforms = [unwrap, route, TSF8] transforms.TSF8.negate = false transforms.TSF8.offset.field = null transforms.TSF8.partition.field = null transforms.TSF8.predicate = transforms.TSF8.static.field = null transforms.TSF8.static.value = null transforms.TSF8.timestamp.field = rec_sync_created_at transforms.TSF8.topic.field = null transforms.TSF8.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.route.negate = false transforms.route.predicate = transforms.route.regex = ([^.]+)\.([^.]+)\.([^.]+) transforms.route.replacement = $3 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:42,972] INFO [transss|task-0] AvroConverterConfig values: auto.register.schemas = true basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.connect.avro.AvroConverterConfig:376) [2024-02-28 13:36:42,981] INFO [sink_new|worker] Instantiated connector sink_new with version 10.7.3-5 of type class io.confluent.connect.jdbc.JdbcSinkConnector (org.apache.kafka.connect.runtime.Worker:399) [2024-02-28 13:36:42,983] INFO [sink_new|worker] Finished creating connector sink_new (org.apache.kafka.connect.runtime.Worker:420) [2024-02-28 13:36:43,005] INFO [Producer clientId=connect-cluster--statuses] Resetting the last seen epoch of partition connect-status-0 to 0 since the associated topicId changed from null to p6lXs05RQBCrJeXDaF3xUQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:43,053] INFO [transss|worker] SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig:376) [2024-02-28 13:36:43,054] INFO [transss|worker] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:43,060] INFO [transss|worker] EnrichedSourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.default.exclude = [] topic.creation.default.include = [.*] topic.creation.default.partitions = 1 topic.creation.default.replication.factor = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig$EnrichedSourceConnectorConfig:376) [2024-02-28 13:36:43,061] INFO [transss|worker] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.default.exclude = [] topic.creation.default.include = [.*] topic.creation.default.partitions = 1 topic.creation.default.replication.factor = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:43,071] INFO [transss|worker] Instantiated connector transss with version 2.6.0.Alpha2 of type class io.debezium.connector.informix.InformixConnector (org.apache.kafka.connect.runtime.Worker:399) [2024-02-28 13:36:43,071] INFO [transss|worker] Finished creating connector transss (org.apache.kafka.connect.runtime.Worker:420) [2024-02-28 13:36:43,208] INFO [transss|task-0] KafkaAvroSerializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.remove.java.properties = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroSerializerConfig:376) [2024-02-28 13:36:43,208] INFO [sink_new|task-0] KafkaAvroSerializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.remove.java.properties = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroSerializerConfig:376) [2024-02-28 13:36:43,703] INFO [sink_new|task-0] KafkaAvroDeserializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] specific.avro.key.type = null specific.avro.reader = false specific.avro.value.type = null use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroDeserializerConfig:376) [2024-02-28 13:36:43,703] INFO [transss|task-0] KafkaAvroDeserializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] specific.avro.key.type = null specific.avro.reader = false specific.avro.value.type = null use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroDeserializerConfig:376) [2024-02-28 13:36:43,784] INFO [transss|task-0] AvroDataConfig values: allow.optional.map.keys = false connect.meta.data = true discard.type.doc.default = false enhanced.avro.schema.support = false generalized.sum.type.support = false ignore.default.for.nullables = false schemas.cache.config = 1000 scrub.invalid.names = false (io.confluent.connect.avro.AvroDataConfig:376) [2024-02-28 13:36:43,784] INFO [sink_new|task-0] AvroDataConfig values: allow.optional.map.keys = false connect.meta.data = true discard.type.doc.default = false enhanced.avro.schema.support = false generalized.sum.type.support = false ignore.default.for.nullables = false schemas.cache.config = 1000 scrub.invalid.names = false (io.confluent.connect.avro.AvroDataConfig:376) [2024-02-28 13:36:43,786] INFO [transss|task-0] AvroConverterConfig values: auto.register.schemas = true basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.connect.avro.AvroConverterConfig:376) [2024-02-28 13:36:43,786] INFO [sink_new|task-0] AvroConverterConfig values: auto.register.schemas = true basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.connect.avro.AvroConverterConfig:376) [2024-02-28 13:36:43,788] INFO [transss|task-0] KafkaAvroSerializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.remove.java.properties = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroSerializerConfig:376) [2024-02-28 13:36:43,788] INFO [sink_new|task-0] KafkaAvroSerializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.remove.java.properties = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroSerializerConfig:376) [2024-02-28 13:36:43,803] INFO [transss|task-0] KafkaAvroDeserializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] specific.avro.key.type = null specific.avro.reader = false specific.avro.value.type = null use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroDeserializerConfig:376) [2024-02-28 13:36:43,804] INFO [sink_new|task-0] KafkaAvroDeserializerConfig values: auto.register.schemas = true avro.reflection.allow.null = false avro.use.logical.type.converters = false basic.auth.credentials.source = URL basic.auth.user.info = [hidden] bearer.auth.cache.expiry.buffer.seconds = 300 bearer.auth.client.id = null bearer.auth.client.secret = null bearer.auth.credentials.source = STATIC_TOKEN bearer.auth.custom.provider.class = null bearer.auth.identity.pool.id = null bearer.auth.issuer.endpoint.url = null bearer.auth.logical.cluster = null bearer.auth.scope = null bearer.auth.scope.claim.name = scope bearer.auth.sub.claim.name = sub bearer.auth.token = [hidden] context.name.strategy = class io.confluent.kafka.serializers.context.NullContextNameStrategy http.connect.timeout.ms = 60000 http.read.timeout.ms = 60000 id.compatibility.strict = true key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy latest.cache.size = 1000 latest.cache.ttl.sec = -1 latest.compatibility.strict = true max.schemas.per.subject = 1000 normalize.schemas = false proxy.host = proxy.port = -1 rule.actions = [] rule.executors = [] rule.service.loader.enable = true schema.format = null schema.reflection = false schema.registry.basic.auth.user.info = [hidden] schema.registry.ssl.cipher.suites = null schema.registry.ssl.enabled.protocols = [TLSv1.2, TLSv1.3] schema.registry.ssl.endpoint.identification.algorithm = https schema.registry.ssl.engine.factory.class = null schema.registry.ssl.key.password = null schema.registry.ssl.keymanager.algorithm = SunX509 schema.registry.ssl.keystore.certificate.chain = null schema.registry.ssl.keystore.key = null schema.registry.ssl.keystore.location = null schema.registry.ssl.keystore.password = null schema.registry.ssl.keystore.type = JKS schema.registry.ssl.protocol = TLSv1.3 schema.registry.ssl.provider = null schema.registry.ssl.secure.random.implementation = null schema.registry.ssl.trustmanager.algorithm = PKIX schema.registry.ssl.truststore.certificates = null schema.registry.ssl.truststore.location = null schema.registry.ssl.truststore.password = null schema.registry.ssl.truststore.type = JKS schema.registry.url = [http://192.168.151.201:8081] specific.avro.key.type = null specific.avro.reader = false specific.avro.value.type = null use.latest.version = false use.latest.with.metadata = null use.schema.id = -1 value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy (io.confluent.kafka.serializers.KafkaAvroDeserializerConfig:376) [2024-02-28 13:36:43,816] INFO [transss|task-0] AvroDataConfig values: allow.optional.map.keys = false connect.meta.data = true discard.type.doc.default = false enhanced.avro.schema.support = false generalized.sum.type.support = false ignore.default.for.nullables = false schemas.cache.config = 1000 scrub.invalid.names = false (io.confluent.connect.avro.AvroDataConfig:376) [2024-02-28 13:36:43,816] INFO [transss|task-0] Set up the key converter class io.confluent.connect.avro.AvroConverter for task transss-0 using the connector config (org.apache.kafka.connect.runtime.Worker:793) [2024-02-28 13:36:43,817] INFO [sink_new|task-0] AvroDataConfig values: allow.optional.map.keys = false connect.meta.data = true discard.type.doc.default = false enhanced.avro.schema.support = false generalized.sum.type.support = false ignore.default.for.nullables = false schemas.cache.config = 1000 scrub.invalid.names = false (io.confluent.connect.avro.AvroDataConfig:376) [2024-02-28 13:36:43,817] INFO [transss|task-0] Set up the value converter class io.confluent.connect.avro.AvroConverter for task transss-0 using the connector config (org.apache.kafka.connect.runtime.Worker:799) [2024-02-28 13:36:43,817] INFO [sink_new|task-0] Set up the key converter class io.confluent.connect.avro.AvroConverter for task sink_new-0 using the connector config (org.apache.kafka.connect.runtime.Worker:793) [2024-02-28 13:36:43,817] INFO [sink_new|task-0] Set up the value converter class io.confluent.connect.avro.AvroConverter for task sink_new-0 using the connector config (org.apache.kafka.connect.runtime.Worker:799) [2024-02-28 13:36:43,818] INFO [transss|task-0] Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task transss-0 using the worker config (org.apache.kafka.connect.runtime.Worker:805) [2024-02-28 13:36:43,818] INFO [sink_new|task-0] Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task sink_new-0 using the worker config (org.apache.kafka.connect.runtime.Worker:805) [2024-02-28 13:36:43,828] INFO [transss|task-0] SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig:376) [2024-02-28 13:36:43,831] INFO [transss|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:43,832] INFO [transss|task-0] EnrichedSourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.default.exclude = [] topic.creation.default.include = [.*] topic.creation.default.partitions = 1 topic.creation.default.replication.factor = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig$EnrichedSourceConnectorConfig:376) [2024-02-28 13:36:43,833] INFO [transss|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.default.exclude = [] topic.creation.default.include = [.*] topic.creation.default.partitions = 1 topic.creation.default.replication.factor = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:43,834] INFO [transss|task-0] ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-producer-transss-0 compression.type = none confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:376) [2024-02-28 13:36:43,840] WARN [transss|task-0] These configurations '[metrics.context.resource.connector, metrics.context.resource.version, metrics.context.connect.group.id, metrics.context.resource.type, metrics.context.resource.commit.id, metrics.context.resource.task, metrics.context.connect.kafka.cluster.id]' were supplied but are not used yet. (org.apache.kafka.clients.producer.ProducerConfig:385) [2024-02-28 13:36:43,840] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:43,840] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:43,840] INFO [transss|task-0] Kafka startTimeMs: 1709109403840 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:43,842] INFO [transss|task-0] AdminClientConfig values: auto.include.jmx.reporter = true bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] client.dns.lookup = use_all_dns_ips client.id = connector-adminclient-transss-0 confluent.metrics.reporter.bootstrap.servers = kafka-0:9071 confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE confluent.use.controller.listener = false connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 host.resolver.class = class org.apache.kafka.clients.DefaultHostResolver metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:376) [2024-02-28 13:36:43,845] WARN [transss|task-0] These configurations '[expose.internal.connect.endpoints, metrics.context.resource.version, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, group.id, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, value.converter.schemas.enable, offset.storage.replication.factor, offset.storage.topic, value.converter, key.converter]' were supplied but are not used yet. (org.apache.kafka.clients.admin.AdminClientConfig:385) [2024-02-28 13:36:43,845] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:43,845] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:43,845] INFO [transss|task-0] Kafka startTimeMs: 1709109403845 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:43,850] INFO [transss|task-0] AbstractConfig values: trace.records.enable = false trace.records.header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter trace.records.key.converter = class org.apache.kafka.connect.json.JsonConverter trace.records.predicates = [] trace.records.topic = connect-traces trace.records.topic.partition = 1 trace.records.topic.replication.factor = 3 trace.records.transforms = [] trace.records.value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.common.config.AbstractConfig:376) [2024-02-28 13:36:43,850] INFO [transss|task-0] TracerConfig values: trace.records.enable = false trace.records.header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter trace.records.key.converter = class org.apache.kafka.connect.json.JsonConverter trace.records.predicates = [] trace.records.topic = connect-traces trace.records.topic.partition = 1 trace.records.topic.replication.factor = 3 trace.records.transforms = [] trace.records.value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.tracing.TracerConfig:376) [2024-02-28 13:36:43,852] WARN [sink_new|task-0] The deleted record handling configs "drop.tombstones" and "delete.handling.mode" have been deprecated, please use "delete.tombstone.handling.mode" instead. (io.debezium.transforms.AbstractExtractNewRecordState:101) [2024-02-28 13:36:43,854] WARN [transss|task-0] The deleted record handling configs "drop.tombstones" and "delete.handling.mode" have been deprecated, please use "delete.tombstone.handling.mode" instead. (io.debezium.transforms.AbstractExtractNewRecordState:101) [2024-02-28 13:36:43,863] INFO [transss|task-0] Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.InsertField$Value} (org.apache.kafka.connect.runtime.Worker:1677) [2024-02-28 13:36:43,863] INFO [sink_new|task-0] Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, org.apache.kafka.connect.transforms.InsertField$Value} (org.apache.kafka.connect.runtime.Worker:1577) [2024-02-28 13:36:43,864] INFO [sink_new|task-0] SinkConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.deadletterqueue.context.headers.enable = true errors.deadletterqueue.topic.name = informix-gpdb-sink-errors errors.deadletterqueue.topic.replication.factor = 1 errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 topics = [] topics.regex = mcp_kafka_net_21.mcp.trans_requests_reps transforms = [unwrap, route, TSF8] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SinkConnectorConfig:376) [2024-02-28 13:36:43,866] INFO [sink_new|task-0] EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.deadletterqueue.context.headers.enable = true errors.deadletterqueue.topic.name = informix-gpdb-sink-errors errors.deadletterqueue.topic.replication.factor = 1 errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 topics = [] topics.regex = mcp_kafka_net_21.mcp.trans_requests_reps transforms = [unwrap, route, TSF8] transforms.TSF8.negate = false transforms.TSF8.offset.field = null transforms.TSF8.partition.field = null transforms.TSF8.predicate = transforms.TSF8.static.field = null transforms.TSF8.static.value = null transforms.TSF8.timestamp.field = rec_sync_created_at transforms.TSF8.topic.field = null transforms.TSF8.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.route.negate = false transforms.route.predicate = transforms.route.regex = ([^.]+)\.([^.]+)\.([^.]+) transforms.route.replacement = $3 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:43,870] INFO [sink_new|task-0] AdminClientConfig values: auto.include.jmx.reporter = true bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] client.dns.lookup = use_all_dns_ips client.id = connector-dlq-adminclient- confluent.metrics.reporter.bootstrap.servers = kafka-0:9071 confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE confluent.use.controller.listener = false connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 host.resolver.class = class org.apache.kafka.clients.DefaultHostResolver metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:376) [2024-02-28 13:36:43,874] WARN [sink_new|task-0] These configurations '[metrics.context.resource.connector, metrics.context.resource.version, group.id, metrics.context.resource.type, metrics.context.resource.commit.id, plugin.path, metrics.context.resource.task, metrics.context.connect.kafka.cluster.id, status.storage.replication.factor, offset.storage.topic, value.converter, key.converter, expose.internal.connect.endpoints, config.storage.topic, listeners, metrics.context.connect.group.id, status.storage.topic, config.storage.replication.factor, offset.flush.interval.ms, key.converter.schemas.enable, value.converter.schemas.enable, offset.storage.replication.factor]' were supplied but are not used yet. (org.apache.kafka.clients.admin.AdminClientConfig:385) [2024-02-28 13:36:43,875] INFO [sink_new|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:43,875] INFO [sink_new|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:43,875] INFO [sink_new|task-0] Kafka startTimeMs: 1709109403875 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:43,891] INFO [sink_new|task-0] App info kafka.admin.client for connector-dlq-adminclient- unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:43,894] INFO [sink_new|task-0] Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:43,895] INFO [sink_new|task-0] Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:43,895] INFO [sink_new|task-0] Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:43,896] INFO [sink_new|task-0] ProducerConfig values: acks = -1 auto.include.jmx.reporter = true batch.size = 16384 bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] buffer.memory = 33554432 client.dns.lookup = use_all_dns_ips client.id = connector-dlq-producer-sink_new-0 compression.type = none confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 delivery.timeout.ms = 2147483647 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 2147483647 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer (org.apache.kafka.clients.producer.ProducerConfig:376) [2024-02-28 13:36:43,903] WARN [sink_new|task-0] These configurations '[metrics.context.resource.connector, metrics.context.resource.version, metrics.context.connect.group.id, metrics.context.resource.type, metrics.context.resource.commit.id, metrics.context.resource.task, metrics.context.connect.kafka.cluster.id]' were supplied but are not used yet. (org.apache.kafka.clients.producer.ProducerConfig:385) [2024-02-28 13:36:43,903] INFO [sink_new|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:43,904] INFO [sink_new|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:43,904] INFO [sink_new|task-0] Kafka startTimeMs: 1709109403903 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:43,904] INFO [transss|task-0] Starting InformixConnectorTask with configuration: (io.debezium.connector.common.BaseSourceTask:135) [2024-02-28 13:36:43,906] INFO [sink_new|task-0] TracerConfig values: trace.records.enable = false trace.records.header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter trace.records.key.converter = class org.apache.kafka.connect.json.JsonConverter trace.records.predicates = [] trace.records.topic = connect-traces trace.records.topic.partition = 1 trace.records.topic.replication.factor = 3 trace.records.transforms = [] trace.records.value.converter = class org.apache.kafka.connect.json.JsonConverter (org.apache.kafka.connect.runtime.tracing.TracerConfig:376) [2024-02-28 13:36:43,907] WARN [sink_new|task-0] The deleted record handling configs "drop.tombstones" and "delete.handling.mode" have been deprecated, please use "delete.tombstone.handling.mode" instead. (io.debezium.transforms.AbstractExtractNewRecordState:101) [2024-02-28 13:36:43,908] INFO [sink_new|task-0] Initializing: org.apache.kafka.connect.runtime.TransformationChain{io.debezium.transforms.ExtractNewRecordState, org.apache.kafka.connect.transforms.RegexRouter, org.apache.kafka.connect.transforms.InsertField$Value} (org.apache.kafka.connect.runtime.Worker:1593) [2024-02-28 13:36:43,909] INFO [transss|task-0] connector.class = io.debezium.connector.informix.InformixConnector (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] errors.log.include.messages = true (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] topic.creation.default.partitions = 1 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] value.converter.schema.registry.subject.name.strategy = io.confluent.kafka.serializers.subject.TopicNameStrategy (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] tasks.max = 1 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] key.converter.schema.registry.subject.name.strategy = io.confluent.kafka.serializers.subject.TopicNameStrategy (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] transforms = unwrap,TSF1 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] transforms.TSF1.static.field = instance_id (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] errors.deadletterqueue.context.headers.enable = true (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,910] INFO [transss|task-0] database.connection.retry.interval.ms = 1000 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] schema.history.internal.store.only.captured.databases.ddl = true (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] schema.history.internal.store.only.captured.tables.ddl = true (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] transforms.TSF1.type = org.apache.kafka.connect.transforms.InsertField$Value (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] tombstones.on.delete = true (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] topic.prefix = mcp_kafka_net_21 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] decimal.handling.mode = double (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] schema.history.internal.kafka.topic = informixschemahistory (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] transforms.unwrap.drop.tombstones = false (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] topic.creation.default.replication.factor = 1 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] errors.deadletterqueue.topic.replication.factor = 1 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] transforms.unwrap.type = io.debezium.transforms.ExtractNewRecordState (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] value.converter = io.confluent.connect.avro.AvroConverter (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] errors.log.enable = true (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] key.converter = io.confluent.connect.avro.AvroConverter (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [transss|task-0] database.user = kafka (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] database.dbname = cards_1952 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] topic.creation.default.compression.type = lz4 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] topic.creation.default.cleanup.policy = compact (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] time.precision.mode = connect (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] database.server.name = mcp_kafka_net_21 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] snapshot.isolation.mode = read_committed (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] schema.history.internal.kafka.bootstrap.servers = 192.168.151.201:9092,192.168.151.202:9092,192.168.151.203:9092 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] database.port = 9260 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] value.converter.schema.registry.url = http://192.168.151.201:8081 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] transforms.TSF1.static.value = 150 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] task.class = io.debezium.connector.informix.InformixConnectorTask (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] errors.max.retries = 10 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] database.connection.retries = 5 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] errors.deadletterqueue.topic.name = informix-gpdb-source-errors (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,912] INFO [transss|task-0] database.hostname = 192.168.151.101 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] database.password = ******** (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] name = transss (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] errors.tolerance = all (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] table.include.list = cards_1952.mcp.trans_requests_reps (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] pk.mode = primary_key (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] key.converter.schema.registry.url = http://192.168.151.201:8081 (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,913] INFO [transss|task-0] snapshot.mode = schema_only (io.debezium.connector.common.BaseSourceTask:137) [2024-02-28 13:36:43,911] INFO [sink_new|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.252:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = connector-consumer-sink_new-0 client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = connect-sink_new group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 10000 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 45000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:43,917] INFO [sink_new|task-0] [Producer clientId=connector-dlq-producer-sink_new-0] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:43,923] WARN [sink_new|task-0] These configurations '[metrics.context.resource.version, metrics.context.connect.group.id, metrics.context.resource.type, metrics.context.resource.commit.id, metrics.context.connect.kafka.cluster.id]' were supplied but are not used yet. (org.apache.kafka.clients.consumer.ConsumerConfig:385) [2024-02-28 13:36:43,924] INFO [sink_new|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:43,924] INFO [sink_new|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:43,924] INFO [sink_new|task-0] Kafka startTimeMs: 1709109403924 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:43,933] INFO [transss|task-0] Loading the custom source info struct maker plugin: io.debezium.connector.informix.InformixSourceInfoStructMaker (io.debezium.config.CommonConnectorConfig:1410) [2024-02-28 13:36:43,936] INFO [Worker clientId=connect-1, groupId=connect-cluster] Finished starting connectors and tasks (org.apache.kafka.connect.runtime.distributed.DistributedHerder:1726) [2024-02-28 13:36:43,939] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Subscribed to pattern: 'mcp_kafka_net_21.mcp.trans_requests_reps' (org.apache.kafka.clients.consumer.KafkaConsumer:1033) [2024-02-28 13:36:43,943] INFO [sink_new|task-0] Starting JDBC Sink task (io.confluent.connect.jdbc.sink.JdbcSinkTask:53) [2024-02-28 13:36:43,946] INFO [sink_new|task-0] JdbcSinkConfig values: auto.create = true auto.evolve = true batch.insert.mode = gpload batch.size = 3000 column.alternative = null column.selection.strategy = DEFAULT connection.attempts = 3 connection.backoff.ms = 10000 connection.password = null connection.url = jdbc:postgresql://192.168.150.79:5444/gpdb_dev?schema=smi&user=gpapp&password=gpapp connection.user = null csv.encoding = UTF-8 csv.header = true csv.quote = " data.delimiter = , db.schema = smi db.timezone = UTC debug.logs = false delete.enabled = true dialect.name = fields.whitelist = [] gp.error.limit = 999999999 gp.error.percentage.limit = 0 gp.log.errors = true gp.max.line.length = 65535 gpfdist.host = null gpss.host = localhost gpss.port = 5000 gpss.use.sticky.session = true greenplum.home = /usr/local/greenplum-db-6.20.5 insert.mode = merge keep.gp.files = true max.batch.wait.time = 6000 max.retries = 10 mssql.use.merge.holdlock = true null.string = null pk.fields = [] pk.mode = record_key port.range = [8000, 9000] quote.sql.identifiers = ALWAYS retry.backoff.ms = 3000 table.name.format = ${topic} table.types = [TABLE] trim.sensitive.log = false update.mode = last_row_only (io.confluent.connect.jdbc.sink.JdbcSinkConfig:376) Name Type Default Importance Documentation ------------------------------------------------------------------------------------------------------------------ connection.url STRING java.lang.Object@763aa507 HIGH JDBC connection URL. For example: ``jdbc:oracle:thin:@localhost:1521:orclpdb1``, ``jdbc:mysql://localhost/db_name``, ``jdbc:sqlserver://localhost;instance=SQLEXPRESS;databaseName=db_name`` connection.user STRING null HIGH JDBC connection user. connection.password PASSWORD null HIGH JDBC connection password. dialect.name STRING LOW The name of the database dialect that should be used for this connector. By default this is empty, and the connector automatically determines the dialect based upon the JDBC connection URL. Use this if you want to override that behavior and use a specific dialect. All properly-packaged dialects in the JDBC connector plugin can be used. connection.attempts INT 3 LOW Maximum number of attempts to retrieve a valid JDBC connection. Must be a positive integer. connection.backoff.ms LONG 10000 LOW Backoff time in milliseconds between connection attempts. batch.insert.mode STRING none HIGH The batch insertion mode to use. Supported modes are: ``default`` Use standard SQL ``INSERT`` statements. ``gpload`` Use gpload utility to load data by creating a yml file at runtime (For greenplum only). Make sure that the gpload is in your path ``GPSS`` Use greenplum streaming server (For greenplum only) https://docs.vmware.com/en/VMware-Greenplum-Streaming-Server/1.10/greenplum-streaming-server/ref-gpss.html ``GPFDIST`` Use gpfdist in memory server insert.mode STRING insert HIGH The insertion mode to use. Supported modes are: ``insert`` Use standard SQL ``INSERT`` statements. ``upsert`` Use the appropriate upsert semantics for the target database if it is supported by the connector, e.g. ``INSERT OR IGNORE``. ``update`` Use the appropriate update semantics for the target database if it is supported by the connector, e.g. ``UPDATE``. batch.size INT 3000 MEDIUM Specifies how many records to attempt to batch together for insertion into the destination table, when possible. delete.enabled BOOLEAN false MEDIUM Whether to treat ``null`` record values as deletes. Requires ``pk.mode`` to be ``record_key``. table.types LIST [TABLE] LOW The comma-separated types of database tables to which the sink connector can write. By default this is ``TABLE``, but any combination of ``TABLE``, ``PARTITIONED TABLE`` and ``VIEW`` is allowed. Not all databases support writing to views, and when they do the sink connector will fail if the view definition does not match the records' schemas (regardless of ``auto.evolve``). table.name.format STRING ${topic} MEDIUM A format string for the destination table name, which may contain '${topic}' as a placeholder for the originating topic name. For example, ``kafka_${topic}`` for the topic 'orders' will map to the table name 'kafka_orders'. pk.mode STRING none HIGH The primary key mode, also refer to ``pk.fields`` documentation for interplay. Supported modes are: ``none`` No keys utilized. ``kafka`` Kafka coordinates are used as the PK. ``record_key`` Field(s) from the record key are used, which may be a primitive or a struct. ``record_value`` Field(s) from the record value are used, which must be a struct. pk.fields LIST [] MEDIUM List of comma-separated primary key field names. The runtime interpretation of this config depends on the ``pk.mode``: ``none`` Ignored as no fields are used as primary key in this mode. ``kafka`` Must be a trio representing the Kafka coordinates, defaults to ``__connect_topic,__connect_partition,__connect_offset`` if empty. ``record_key`` If empty, all fields from the key struct will be used, otherwise used to extract the desired fields - for primitive key only a single field name must be configured. ``record_value`` If empty, all fields from the value struct will be used, otherwise used to extract the desired fields. fields.whitelist LIST [] MEDIUM List of comma-separated record value field names. If empty, all fields from the record value are utilized, otherwise used to filter to the desired fields. Note that ``pk.fields`` is applied independently in the context of which field(s) form the primary key columns in the destination database, while this configuration is applicable for the other columns. db.timezone STRING UTC MEDIUM Name of the JDBC timezone that should be used in the connector when inserting time-based values. Defaults to UTC. auto.create BOOLEAN false MEDIUM Whether to automatically create the destination table based on record schema if it is found to be missing by issuing ``CREATE``. auto.evolve BOOLEAN false MEDIUM Whether to automatically add columns in the table schema when found to be missing relative to the record schema by issuing ``ALTER``. quote.sql.identifiers STRING ALWAYS MEDIUM When to quote table names, column names, and other identifiers in SQL statements. For backward compatibility, the default is ``always``. mssql.use.merge.holdlock BOOLEAN true LOW Whether to use HOLDLOCK when performing a MERGE INTO upsert statement. Note that it is only applicable to SQL Server. max.retries INT 10 MEDIUM The maximum number of times to retry on errors before failing the task. max.batch.wait.time LONG 60000 MEDIUM The maximum time to wait for a batch to be completed. retry.backoff.ms INT 3000 MEDIUM The time in milliseconds to wait following an error before a retry attempt is made. port.range LIST [8000, 9000] MEDIUM The range of ports to use for gpfdist. trim.sensitive.log BOOLEAN false LOW data.delimiter STRING , MEDIUM Data line delimiter character. gp.error.limit INT 0 MEDIUM Error limit for gpload processing. csv.header BOOLEAN true MEDIUM Whether CSV file for gpload includes headers. csv.quote STRING " MEDIUM CSV quote character. csv.encoding STRING UTF-8 MEDIUM CSV encoding. gp.log.errors BOOLEAN true MEDIUM Whether to log CSV errors. greenplum.home STRING null MEDIUM The path to the Greenplum installation directory. gpfdist.host STRING null MEDIUM The gpfdist host for gpfdist and gpload modes. Gpfdist server will fallback to current machine's ip or hostname if not specified. keep.gp.files BOOLEAN false MEDIUM Whether to keep Greenplum files for debugging. db.schema STRING null MEDIUM The schema to use for the connector's tables. gpss.host STRING localhost MEDIUM The gpss host for gpss mode. gpss.port STRING 5000 MEDIUM The gpss port for gpss mode. gp.error.percentage.limit INT 0 MEDIUM The maximum percentage of errors allowed in a batch before the batch is considered failed. gpss.use.sticky.session BOOLEAN false MEDIUM Whether to use sticky session for gpss mode. gp.max.line.length LONG 65535 MEDIUM The maximum length of a line for gpfdist. update.mode STRING DEFAULT MEDIUM The update mode to use for updates:`DEFAULT`: Do nothing, use default behavior.`FIRST_ROW_ONLY`: Choose first row only from a batch of updates.`LAST_ROW_ONLY`: Choose last row only from a batch of updates. null.string STRING null MEDIUM The string to use for null values in the CSV file/gpss stream. debug.logs BOOLEAN false MEDIUM Whether to log debug logs. column.selection.strategy STRING DEFAULT MEDIUM The column selection strategy to use (for gpss only). Supported strategies are: ``DEFAULT`` Use all columns received from source table. ``SINK_PREFERRED`` Prefer columns in the sink table if there is a difference between the source and sink tables. column.alternative STRING null MEDIUM The column alternative, i.e. use value of other column if it is missed in the source. [2024-02-28 13:36:43,954] INFO [Producer clientId=connect-cluster--configs] Resetting the last seen epoch of partition connect-configs-0 to 2 since the associated topicId changed from null to Al-pWSFaSz2bRsI3hN82uQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:44,059] INFO [sink_new|task-0] Initializing JDBC writer (io.confluent.connect.jdbc.sink.JdbcSinkTask:67) [2024-02-28 13:36:43,997] INFO [transss|task-0] Loading the custom topic naming strategy plugin: io.debezium.schema.SchemaTopicNamingStrategy (io.debezium.config.CommonConnectorConfig:1154) [2024-02-28 13:36:44,062] INFO [sink_new|task-0] Validating JDBC URL. (io.confluent.connect.jdbc.dialect.DatabaseDialects:171) [2024-02-28 13:36:44,062] INFO [sink_new|task-0] Validated JDBC URL. (io.confluent.connect.jdbc.dialect.DatabaseDialects:174) [2024-02-28 13:36:44,072] INFO [Worker clientId=connect-1, groupId=connect-cluster] Session key updated (org.apache.kafka.connect.runtime.distributed.DistributedHerder:2172) [2024-02-28 13:36:44,073] INFO [sink_new|task-0] Validating JDBC URL. (io.confluent.connect.jdbc.dialect.DatabaseDialects:171) [2024-02-28 13:36:44,073] INFO [sink_new|task-0] Validated JDBC URL. (io.confluent.connect.jdbc.dialect.DatabaseDialects:174) [2024-02-28 13:36:44,078] INFO [sink_new|task-0] Initializing writer using SQL dialect: PostgreSqlDatabaseDialect (io.confluent.connect.jdbc.sink.JdbcSinkTask:74) [2024-02-28 13:36:44,084] INFO SinkConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.deadletterqueue.context.headers.enable = true errors.deadletterqueue.topic.name = informix-gpdb-sink-errors errors.deadletterqueue.topic.replication.factor = 1 errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 topics = [] topics.regex = mcp_kafka_net_21.mcp.trans_requests_reps transforms = [unwrap, route, TSF8] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SinkConnectorConfig:376) [2024-02-28 13:36:44,085] INFO [sink_new|task-0] JDBC writer initialized (io.confluent.connect.jdbc.sink.JdbcSinkTask:88) [2024-02-28 13:36:44,085] INFO [sink_new|task-0] WorkerSinkTask{id=sink_new-0} Sink task finished initialization and start (org.apache.kafka.connect.runtime.WorkerSinkTask:317) [2024-02-28 13:36:44,088] INFO [sink_new|task-0] WorkerSinkTask{id=sink_new-0} Executing sink task (org.apache.kafka.connect.runtime.WorkerSinkTask:202) [2024-02-28 13:36:44,088] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.confluent.connect.jdbc.JdbcSinkConnector errors.deadletterqueue.context.headers.enable = true errors.deadletterqueue.topic.name = informix-gpdb-sink-errors errors.deadletterqueue.topic.replication.factor = 1 errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = sink_new predicates = [] tasks.max = 1 topics = [] topics.regex = mcp_kafka_net_21.mcp.trans_requests_reps transforms = [unwrap, route, TSF8] transforms.TSF8.negate = false transforms.TSF8.offset.field = null transforms.TSF8.partition.field = null transforms.TSF8.predicate = transforms.TSF8.static.field = null transforms.TSF8.static.value = null transforms.TSF8.timestamp.field = rec_sync_created_at transforms.TSF8.topic.field = null transforms.TSF8.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.route.negate = false transforms.route.predicate = transforms.route.regex = ([^.]+)\.([^.]+)\.([^.]+) transforms.route.replacement = $3 transforms.route.type = class org.apache.kafka.connect.transforms.RegexRouter transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:44,089] INFO [sink_new|worker] Setting task configurations for 1 workers. (io.confluent.connect.jdbc.JdbcSinkConnector:51) [2024-02-28 13:36:44,091] INFO SourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig:376) [2024-02-28 13:36:44,094] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:44,095] INFO EnrichedSourceConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.default.exclude = [] topic.creation.default.include = [.*] topic.creation.default.partitions = 1 topic.creation.default.replication.factor = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.SourceConnectorConfig$EnrichedSourceConnectorConfig:376) [2024-02-28 13:36:44,097] INFO EnrichedConnectorConfig values: config.action.reload = restart connector.class = io.debezium.connector.informix.InformixConnector errors.log.enable = true errors.log.include.messages = true errors.retry.delay.max.ms = 60000 errors.retry.timeout = 0 errors.tolerance = all exactly.once.support = requested header.converter = null key.converter = class io.confluent.connect.avro.AvroConverter name = transss offsets.storage.topic = null predicates = [] tasks.max = 1 topic.creation.default.exclude = [] topic.creation.default.include = [.*] topic.creation.default.partitions = 1 topic.creation.default.replication.factor = 1 topic.creation.groups = [] transaction.boundary = poll transaction.boundary.interval.ms = null transforms = [unwrap, TSF1] transforms.TSF1.negate = false transforms.TSF1.offset.field = null transforms.TSF1.partition.field = null transforms.TSF1.predicate = transforms.TSF1.static.field = instance_id transforms.TSF1.static.value = 150 transforms.TSF1.timestamp.field = null transforms.TSF1.topic.field = null transforms.TSF1.type = class org.apache.kafka.connect.transforms.InsertField$Value transforms.unwrap.add.fields = [] transforms.unwrap.add.fields.prefix = __ transforms.unwrap.add.headers = [] transforms.unwrap.add.headers.prefix = __ transforms.unwrap.delete.handling.mode = drop transforms.unwrap.drop.fields.from.key = false transforms.unwrap.drop.fields.header.name = null transforms.unwrap.drop.fields.keep.schema.compatible = true transforms.unwrap.drop.tombstones = false transforms.unwrap.negate = false transforms.unwrap.predicate = transforms.unwrap.route.by.field = transforms.unwrap.type = class io.debezium.transforms.ExtractNewRecordState value.converter = class io.confluent.connect.avro.AvroConverter (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:376) [2024-02-28 13:36:44,098] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Resetting the last seen epoch of partition mcp_kafka_net_21.mcp.trans_requests_reps-0 to 2 since the associated topicId changed from null to 7frYXFYVRvqqhwO3t6BbKQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:44,099] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:44,099] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Discovered group coordinator 192.168.151.203:9092 (id: 2147483644 rack: null) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:896) [2024-02-28 13:36:44,100] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] (Re-)joining group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:566) [2024-02-28 13:36:44,123] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Request joining group due to: need to re-join with the given member-id: connector-consumer-sink_new-0-03103c70-0c26-4c51-979b-9f0ce85e5644 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:44,124] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Request joining group due to: rebalance failed due to 'The group member needs to have a valid member id before actually entering a consumer group.' (MemberIdRequiredException) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:44,124] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] (Re-)joining group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:566) [2024-02-28 13:36:44,128] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Successfully joined group with generation Generation{generationId=48, memberId='connector-consumer-sink_new-0-03103c70-0c26-4c51-979b-9f0ce85e5644', protocol='range'} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:627) [2024-02-28 13:36:44,135] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Finished assignment for group at generation 48: {connector-consumer-sink_new-0-03103c70-0c26-4c51-979b-9f0ce85e5644=Assignment(partitions=[mcp_kafka_net_21.mcp.trans_requests_reps-0])} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:710) [2024-02-28 13:36:44,150] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Successfully synced group in generation Generation{generationId=48, memberId='connector-consumer-sink_new-0-03103c70-0c26-4c51-979b-9f0ce85e5644', protocol='range'} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:802) [2024-02-28 13:36:44,150] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Notifying assignor about the new Assignment(partitions=[mcp_kafka_net_21.mcp.trans_requests_reps-0]) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:305) [2024-02-28 13:36:44,151] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Adding newly assigned partitions: mcp_kafka_net_21.mcp.trans_requests_reps-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:317) [2024-02-28 13:36:44,188] INFO [sink_new|task-0] [Consumer clientId=connector-consumer-sink_new-0, groupId=connect-sink_new] Setting offset for partition mcp_kafka_net_21.mcp.trans_requests_reps-0 to the committed offset FetchPosition{offset=95462, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=Optional[192.168.151.203:9092 (id: 3 rack: null)], epoch=2}} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:973) [2024-02-28 13:36:44,678] INFO [transss|task-0] KafkaSchemaHistory Consumer config: {key.deserializer=org.apache.kafka.common.serialization.StringDeserializer, value.deserializer=org.apache.kafka.common.serialization.StringDeserializer, enable.auto.commit=false, group.id=mcp_kafka_net_21-schemahistory, bootstrap.servers=192.168.151.201:9092,192.168.151.202:9092,192.168.151.203:9092, fetch.min.bytes=1, session.timeout.ms=10000, auto.offset.reset=earliest, client.id=mcp_kafka_net_21-schemahistory} (io.debezium.storage.kafka.history.KafkaSchemaHistory:245) [2024-02-28 13:36:44,679] INFO [transss|task-0] KafkaSchemaHistory Producer config: {retries=1, value.serializer=org.apache.kafka.common.serialization.StringSerializer, acks=1, batch.size=32768, max.block.ms=10000, bootstrap.servers=192.168.151.201:9092,192.168.151.202:9092,192.168.151.203:9092, buffer.memory=1048576, key.serializer=org.apache.kafka.common.serialization.StringSerializer, client.id=mcp_kafka_net_21-schemahistory, linger.ms=0} (io.debezium.storage.kafka.history.KafkaSchemaHistory:246) [2024-02-28 13:36:44,682] INFO [transss|task-0] Requested thread factory for connector InformixConnector, id = mcp_kafka_net_21 named = db-history-config-check (io.debezium.util.Threads:271) [2024-02-28 13:36:44,689] INFO [transss|task-0] Idempotence will be disabled because acks is set to 1, not set to 'all'. (org.apache.kafka.clients.producer.ProducerConfig:561) [2024-02-28 13:36:44,689] INFO [transss|task-0] ProducerConfig values: acks = 1 auto.include.jmx.reporter = true batch.size = 32768 bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.203:9092] buffer.memory = 1048576 client.dns.lookup = use_all_dns_ips client.id = mcp_kafka_net_21-schemahistory compression.type = none confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 delivery.timeout.ms = 120000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.StringSerializer linger.ms = 0 max.block.ms = 10000 max.in.flight.requests.per.connection = 5 max.request.size = 1048576 metadata.max.age.ms = 300000 metadata.max.idle.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.adaptive.partitioning.enable = true partitioner.availability.timeout.ms = 0 partitioner.class = null partitioner.ignore.keys = false receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.StringSerializer (org.apache.kafka.clients.producer.ProducerConfig:376) [2024-02-28 13:36:44,698] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:44,698] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:44,699] INFO [transss|task-0] Kafka startTimeMs: 1709109404698 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:44,700] INFO [transss|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.203:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = mcp_kafka_net_21-schemahistory client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = mcp_kafka_net_21-schemahistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:44,705] INFO [transss|task-0] [Producer clientId=mcp_kafka_net_21-schemahistory] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:44,709] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:44,709] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:44,709] INFO [transss|task-0] Kafka startTimeMs: 1709109404709 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:44,716] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:44,721] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Resetting generation and member id due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1015) [2024-02-28 13:36:44,721] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Request joining group due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:44,722] INFO [transss|task-0] Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:44,722] INFO [transss|task-0] Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:44,723] INFO [transss|task-0] Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:44,725] INFO [transss|task-0] App info kafka.consumer for mcp_kafka_net_21-schemahistory unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:44,927] INFO 172.20.0.2 - - [28/Feb/2024:08:36:44 +0000] "GET /connectors HTTP/1.1" 200 22 "-" "ReactorNetty/1.1.6" 8 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:44,998] INFO [transss|task-0] Found previous partition offset InformixPartition [sourcePartition={databaseName=mcp_kafka_net_21}]: {begin_lsn=440659349655576, commit_lsn=440659349660336, change_lsn=440659349655688} (io.debezium.connector.common.BaseSourceTask:373) [2024-02-28 13:36:45,002] INFO [transss|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.203:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = mcp_kafka_net_21-schemahistory client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = mcp_kafka_net_21-schemahistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:45,008] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:45,008] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:45,009] INFO [transss|task-0] Kafka startTimeMs: 1709109405008 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:45,014] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:45,017] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Resetting generation and member id due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1015) [2024-02-28 13:36:45,017] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Request joining group due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:45,018] INFO [transss|task-0] Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:45,018] INFO [transss|task-0] Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:45,018] INFO [transss|task-0] Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:45,021] INFO [transss|task-0] App info kafka.consumer for mcp_kafka_net_21-schemahistory unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:45,021] INFO [transss|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.203:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = mcp_kafka_net_21-schemahistory client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = mcp_kafka_net_21-schemahistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:45,028] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:45,028] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:45,029] INFO [transss|task-0] Kafka startTimeMs: 1709109405028 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:45,030] INFO [transss|task-0] Creating thread debezium-informixconnector-mcp_kafka_net_21-db-history-config-check (io.debezium.util.Threads:288) [2024-02-28 13:36:45,032] INFO [transss|task-0] AdminClientConfig values: auto.include.jmx.reporter = true bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.203:9092] client.dns.lookup = use_all_dns_ips client.id = mcp_kafka_net_21-schemahistory-topic-check confluent.metrics.reporter.bootstrap.servers = kafka-0:9071 confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE confluent.use.controller.listener = false connections.max.idle.ms = 300000 default.api.timeout.ms = 60000 host.resolver.class = class org.apache.kafka.clients.DefaultHostResolver metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retries = 1 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS (org.apache.kafka.clients.admin.AdminClientConfig:376) [2024-02-28 13:36:45,037] WARN [transss|task-0] These configurations '[value.serializer, acks, batch.size, max.block.ms, buffer.memory, key.serializer, linger.ms]' were supplied but are not used yet. (org.apache.kafka.clients.admin.AdminClientConfig:385) [2024-02-28 13:36:45,037] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:45,038] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:45,038] INFO [transss|task-0] Kafka startTimeMs: 1709109405037 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:45,040] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Resetting the last seen epoch of partition informixschemahistory-0 to 0 since the associated topicId changed from null to I0lhKqlDRS2y6SwdunOftw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:45,042] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:45,052] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Resetting generation and member id due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1015) [2024-02-28 13:36:45,052] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Request joining group due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:45,053] INFO [transss|task-0] Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:45,053] INFO [transss|task-0] Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:45,053] INFO [transss|task-0] Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:45,058] INFO [transss|task-0] App info kafka.consumer for mcp_kafka_net_21-schemahistory unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:45,058] INFO [transss|task-0] Started database schema history recovery (io.debezium.relational.history.SchemaHistoryMetrics:115) [2024-02-28 13:36:45,062] INFO [transss|task-0] Database schema history topic 'informixschemahistory' has correct settings (io.debezium.storage.kafka.history.KafkaSchemaHistory:473) [2024-02-28 13:36:45,065] INFO 172.20.0.2 - - [28/Feb/2024:08:36:44 +0000] "GET /connectors/sink_new HTTP/1.1" 200 2567 "-" "ReactorNetty/1.1.6" 132 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,065] INFO 172.20.0.2 - - [28/Feb/2024:08:36:44 +0000] "GET /connectors/transss HTTP/1.1" 200 2313 "-" "ReactorNetty/1.1.6" 133 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,066] INFO [transss|task-0] App info kafka.admin.client for mcp_kafka_net_21-schemahistory-topic-check unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:45,069] INFO [transss|task-0] Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:45,069] INFO [transss|task-0] Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:45,069] INFO [transss|task-0] Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:45,087] INFO [transss|task-0] ConsumerConfig values: allow.auto.create.topics = true auto.commit.interval.ms = 5000 auto.include.jmx.reporter = true auto.offset.reset = earliest bootstrap.servers = [192.168.151.201:9092, 192.168.151.202:9092, 192.168.151.203:9092] check.crcs = true client.dns.lookup = use_all_dns_ips client.id = mcp_kafka_net_21-schemahistory client.rack = confluent.proxy.protocol.client.address = null confluent.proxy.protocol.client.port = null confluent.proxy.protocol.client.version = NONE connections.max.idle.ms = 540000 default.api.timeout.ms = 60000 enable.auto.commit = false exclude.internal.topics = true fetch.max.bytes = 52428800 fetch.max.wait.ms = 500 fetch.min.bytes = 1 group.id = mcp_kafka_net_21-schemahistory group.instance.id = null heartbeat.interval.ms = 3000 interceptor.classes = [] internal.leave.group.on.close = true internal.throw.on.fetch.stable.offset.unsupported = false isolation.level = read_uncommitted key.deserializer = class org.apache.kafka.common.serialization.StringDeserializer max.partition.fetch.bytes = 1048576 max.poll.interval.ms = 300000 max.poll.records = 500 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor] receive.buffer.bytes = 65536 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 30000 retry.backoff.ms = 100 sasl.client.callback.handler.class = null sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.login.callback.handler.class = null sasl.login.class = null sasl.login.connect.timeout.ms = null sasl.login.read.timeout.ms = null sasl.login.refresh.buffer.seconds = 300 sasl.login.refresh.min.period.seconds = 60 sasl.login.refresh.window.factor = 0.8 sasl.login.refresh.window.jitter = 0.05 sasl.login.retry.backoff.max.ms = 10000 sasl.login.retry.backoff.ms = 100 sasl.mechanism = GSSAPI sasl.oauthbearer.clock.skew.seconds = 30 sasl.oauthbearer.expected.audience = null sasl.oauthbearer.expected.issuer = null sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000 sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000 sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100 sasl.oauthbearer.jwks.endpoint.url = null sasl.oauthbearer.scope.claim.name = scope sasl.oauthbearer.sub.claim.name = sub sasl.oauthbearer.token.endpoint.url = null security.protocol = PLAINTEXT security.providers = null send.buffer.bytes = 131072 session.timeout.ms = 10000 socket.connection.setup.timeout.max.ms = 30000 socket.connection.setup.timeout.ms = 10000 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.3] ssl.endpoint.identification.algorithm = https ssl.engine.factory.class = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.certificate.chain = null ssl.keystore.key = null ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLSv1.3 ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.certificates = null ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS value.deserializer = class org.apache.kafka.common.serialization.StringDeserializer (org.apache.kafka.clients.consumer.ConsumerConfig:376) [2024-02-28 13:36:45,094] INFO [transss|task-0] Kafka version: 7.4.1-ce (org.apache.kafka.common.utils.AppInfoParser:119) [2024-02-28 13:36:45,095] INFO [transss|task-0] Kafka commitId: ec66a57761112352 (org.apache.kafka.common.utils.AppInfoParser:120) [2024-02-28 13:36:45,095] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/sink_new/status HTTP/1.1" 200 156 "-" "ReactorNetty/1.1.6" 22 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,095] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/transss/status HTTP/1.1" 200 157 "-" "ReactorNetty/1.1.6" 20 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,095] INFO [transss|task-0] Kafka startTimeMs: 1709109405094 (org.apache.kafka.common.utils.AppInfoParser:121) [2024-02-28 13:36:45,101] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Subscribed to topic(s): informixschemahistory (org.apache.kafka.clients.consumer.KafkaConsumer:969) [2024-02-28 13:36:45,107] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Resetting the last seen epoch of partition informixschemahistory-0 to 0 since the associated topicId changed from null to I0lhKqlDRS2y6SwdunOftw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:45,108] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:45,108] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/sink_new/config HTTP/1.1" 200 2480 "-" "ReactorNetty/1.1.6" 10 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,108] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/transss/config HTTP/1.1" 200 2226 "-" "ReactorNetty/1.1.6" 9 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,114] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Discovered group coordinator 192.168.151.202:9092 (id: 2147483645 rack: null) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:896) [2024-02-28 13:36:45,120] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] (Re-)joining group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:566) [2024-02-28 13:36:45,120] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/sink_new/tasks HTTP/1.1" 200 2591 "-" "ReactorNetty/1.1.6" 9 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,120] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/transss/tasks HTTP/1.1" 200 2345 "-" "ReactorNetty/1.1.6" 11 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,125] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/sink_new/tasks/0/status HTTP/1.1" 200 53 "-" "ReactorNetty/1.1.6" 4 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,125] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/transss/tasks/0/status HTTP/1.1" 200 53 "-" "ReactorNetty/1.1.6" 4 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,128] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Request joining group due to: need to re-join with the given member-id: mcp_kafka_net_21-schemahistory-33344e7e-e7c2-4365-96b4-cec793cfa4d9 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:45,128] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Request joining group due to: rebalance failed due to 'The group member needs to have a valid member id before actually entering a consumer group.' (MemberIdRequiredException) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:45,128] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] (Re-)joining group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:566) [2024-02-28 13:36:45,133] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/sink_new/topics HTTP/1.1" 200 68 "-" "ReactorNetty/1.1.6" 7 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,134] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Successfully joined group with generation Generation{generationId=1, memberId='mcp_kafka_net_21-schemahistory-33344e7e-e7c2-4365-96b4-cec793cfa4d9', protocol='range'} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:627) [2024-02-28 13:36:45,134] INFO 172.20.0.2 - - [28/Feb/2024:08:36:45 +0000] "GET /connectors/transss/topics HTTP/1.1" 200 86 "-" "ReactorNetty/1.1.6" 8 (org.apache.kafka.connect.runtime.rest.RestServer:62) [2024-02-28 13:36:45,134] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Finished assignment for group at generation 1: {mcp_kafka_net_21-schemahistory-33344e7e-e7c2-4365-96b4-cec793cfa4d9=Assignment(partitions=[informixschemahistory-0])} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:710) [2024-02-28 13:36:45,141] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Successfully synced group in generation Generation{generationId=1, memberId='mcp_kafka_net_21-schemahistory-33344e7e-e7c2-4365-96b4-cec793cfa4d9', protocol='range'} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:802) [2024-02-28 13:36:45,141] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Notifying assignor about the new Assignment(partitions=[informixschemahistory-0]) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:305) [2024-02-28 13:36:45,141] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Adding newly assigned partitions: informixschemahistory-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:317) [2024-02-28 13:36:45,144] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Found no committed offset for partition informixschemahistory-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1543) [2024-02-28 13:36:45,608] INFO [transss|task-0] [Producer clientId=connector-producer-transss-0] Node -3 disconnected. (org.apache.kafka.clients.NetworkClient:1049) [2024-02-28 13:36:45,608] WARN [transss|task-0] [Producer clientId=connector-producer-transss-0] Connection to node -3 (/192.168.151.252:9092) could not be established. Broker may not be available. (org.apache.kafka.clients.NetworkClient:870) [2024-02-28 13:36:45,608] WARN [transss|task-0] [Producer clientId=connector-producer-transss-0] Bootstrap broker 192.168.151.252:9092 (id: -3 rack: null) disconnected (org.apache.kafka.clients.NetworkClient:1191) [2024-02-28 13:36:45,711] INFO [transss|task-0] [Producer clientId=connector-producer-transss-0] Cluster ID: pCcBslJoS9ignBFWtH6mxA (org.apache.kafka.clients.Metadata:287) [2024-02-28 13:36:46,678] INFO [transss|task-0] Database schema history recovery in progress, recovered 212 records (io.debezium.relational.history.SchemaHistoryMetrics:130) [2024-02-28 13:36:46,679] INFO [transss|task-0] Already applied 123 database changes (io.debezium.relational.history.SchemaHistoryMetrics:140) [2024-02-28 13:36:48,676] INFO [transss|task-0] Database schema history recovery in progress, recovered 4733 records (io.debezium.relational.history.SchemaHistoryMetrics:130) [2024-02-28 13:36:48,677] INFO [transss|task-0] Already applied 4644 database changes (io.debezium.relational.history.SchemaHistoryMetrics:140) [2024-02-28 13:36:48,922] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Revoke previously assigned partitions informixschemahistory-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:336) [2024-02-28 13:36:48,923] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Member mcp_kafka_net_21-schemahistory-33344e7e-e7c2-4365-96b4-cec793cfa4d9 sending LeaveGroup request to coordinator 192.168.151.202:9092 (id: 2147483645 rack: null) due to the consumer is being closed (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1123) [2024-02-28 13:36:48,925] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Resetting generation and member id due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1015) [2024-02-28 13:36:48,925] INFO [transss|task-0] [Consumer clientId=mcp_kafka_net_21-schemahistory, groupId=mcp_kafka_net_21-schemahistory] Request joining group due to: consumer pro-actively leaving the group (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:1062) [2024-02-28 13:36:48,928] INFO [transss|task-0] Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:710) [2024-02-28 13:36:48,928] INFO [transss|task-0] Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:714) [2024-02-28 13:36:48,929] INFO [transss|task-0] Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:720) [2024-02-28 13:36:48,932] INFO [transss|task-0] App info kafka.consumer for mcp_kafka_net_21-schemahistory unregistered (org.apache.kafka.common.utils.AppInfoParser:83) [2024-02-28 13:36:48,932] INFO [transss|task-0] Finished database schema history recovery of 4986 change(s) in 3873 ms (io.debezium.relational.history.SchemaHistoryMetrics:121) [2024-02-28 13:36:48,994] INFO [transss|task-0] Parsing default value for column 'merchant_cat_code' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:48,999] INFO [transss|task-0] Parsing default value for column 'exch_rate' with expression '1.0000000000' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,002] INFO [transss|task-0] Parsing default value for column 'trans_source' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,006] INFO [transss|task-0] Parsing default value for column 'nduplicate_trans' with expression '0' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,009] INFO [transss|task-0] Parsing default value for column 'charged' with expression ''Y'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,013] INFO [transss|task-0] Parsing default value for column 'efunddataacqnetid' with expression ''I2C'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,015] INFO [transss|task-0] Parsing default value for column 'auto_expire_preauth' with expression ''Y'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,016] INFO [transss|task-0] Parsing default value for column 'partial_trans_flag' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,060] INFO [transss|task-0] Requested thread factory for connector InformixConnector, id = mcp_kafka_net_21 named = SignalProcessor (io.debezium.util.Threads:271) [2024-02-28 13:36:49,095] INFO [transss|task-0] Requested thread factory for connector InformixConnector, id = mcp_kafka_net_21 named = change-event-source-coordinator (io.debezium.util.Threads:271) [2024-02-28 13:36:49,095] INFO [transss|task-0] Requested thread factory for connector InformixConnector, id = mcp_kafka_net_21 named = blocking-snapshot (io.debezium.util.Threads:271) [2024-02-28 13:36:49,107] INFO [transss|task-0] Creating thread debezium-informixconnector-mcp_kafka_net_21-change-event-source-coordinator (io.debezium.util.Threads:288) [2024-02-28 13:36:49,107] INFO [transss|task-0] WorkerSourceTask{id=transss-0} Source task finished initialization and start (org.apache.kafka.connect.runtime.AbstractWorkerSourceTask:278) [2024-02-28 13:36:49,113] INFO [transss|task-0] Metrics registered (io.debezium.pipeline.ChangeEventSourceCoordinator:132) [2024-02-28 13:36:49,121] INFO [transss|task-0] Context created (io.debezium.pipeline.ChangeEventSourceCoordinator:135) [2024-02-28 13:36:49,134] INFO [transss|task-0] A previous offset indicating a completed snapshot has been found. Neither schema nor data will be snapshot. (io.debezium.connector.informix.InformixSnapshotChangeEventSource:68) [2024-02-28 13:36:49,189] INFO [transss|task-0] Snapshot ended with SnapshotResult [status=SKIPPED, offset=InformixOffsetContext [sourceInfoSchema=Schema{io.debezium.connector.informix.Source:STRUCT}, sourceInfo=SourceInfo [serverName=mcp_kafka_net_21, timestamp=null, db=cards_1952, snapshot=FALSE, commitLsn=440659349660336, changeLsn=440659349655688, txId=-1, beginLsn=440659349655576], snapshotCompleted=false]] (io.debezium.pipeline.ChangeEventSourceCoordinator:252) [2024-02-28 13:36:49,226] INFO [transss|task-0] Connected metrics set to 'true' (io.debezium.pipeline.ChangeEventSourceCoordinator:425) [2024-02-28 13:36:49,248] INFO [transss|task-0] SignalProcessor started. Scheduling it every 5000ms (io.debezium.pipeline.signal.SignalProcessor:105) [2024-02-28 13:36:49,249] INFO [transss|task-0] Creating thread debezium-informixconnector-mcp_kafka_net_21-SignalProcessor (io.debezium.util.Threads:288) [2024-02-28 13:36:49,250] INFO [transss|task-0] Starting streaming (io.debezium.pipeline.ChangeEventSourceCoordinator:271) [2024-02-28 13:36:49,585] INFO [transss|task-0] Parsing default value for column 'merchant_cat_code' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,586] INFO [transss|task-0] Parsing default value for column 'exch_rate' with expression '1.0000000000' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,587] INFO [transss|task-0] Parsing default value for column 'trans_source' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,590] INFO [transss|task-0] Parsing default value for column 'nduplicate_trans' with expression '0' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,591] INFO [transss|task-0] Parsing default value for column 'charged' with expression ''Y'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,593] INFO [transss|task-0] Parsing default value for column 'efunddataacqnetid' with expression ''I2C'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,594] INFO [transss|task-0] Parsing default value for column 'auto_expire_preauth' with expression ''Y'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,595] INFO [transss|task-0] Parsing default value for column 'partial_trans_flag' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,641] INFO [transss|task-0] Set CDCEngine's LSN to '440659349655576' aka LSN(102599,d018) (io.debezium.connector.informix.InformixStreamingChangeEventSource:244) [2024-02-28 13:36:49,726] INFO [transss|task-0] Begin recover: from lastBeginLsn='440659349655576' to lastCommitLsn='440659349660336' (io.debezium.connector.informix.InformixStreamingChangeEventSource:119) [2024-02-28 13:36:49,753] INFO [transss|task-0] Parsing default value for column 'merchant_cat_code' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,754] INFO [transss|task-0] Parsing default value for column 'exch_rate' with expression '1.0000000000' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,754] INFO [transss|task-0] Parsing default value for column 'trans_source' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,756] INFO [transss|task-0] Parsing default value for column 'nduplicate_trans' with expression '0' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,756] INFO [transss|task-0] Parsing default value for column 'charged' with expression ''Y'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,758] INFO [transss|task-0] Parsing default value for column 'efunddataacqnetid' with expression ''I2C'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,758] INFO [transss|task-0] Parsing default value for column 'auto_expire_preauth' with expression ''Y'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,759] INFO [transss|task-0] Parsing default value for column 'partial_trans_flag' with expression ''0'' (io.debezium.connector.informix.InformixDefaultValueConverter:50) [2024-02-28 13:36:49,838] INFO [transss|task-0] [Producer clientId=mcp_kafka_net_21-schemahistory] Resetting the last seen epoch of partition informixschemahistory-0 to 0 since the associated topicId changed from null to I0lhKqlDRS2y6SwdunOftw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:50,114] INFO [transss|task-0] 1 records sent during previous 00:00:07.155, last recorded offset of {databaseName=mcp_kafka_net_21} partition is {begin_lsn=440659349655576, commit_lsn=440659349660336, change_lsn=440659349655688} (io.debezium.connector.common.BaseSourceTask:213) [2024-02-28 13:36:50,533] INFO [transss|task-0] The task will send records to topic 'mcp_kafka_net_21' for the first time. Checking whether topic exists (org.apache.kafka.connect.runtime.AbstractWorkerSourceTask:523) [2024-02-28 13:36:50,539] INFO [transss|task-0] Topic 'mcp_kafka_net_21' already exists. (org.apache.kafka.connect.runtime.AbstractWorkerSourceTask:527) [2024-02-28 13:36:50,545] INFO [transss|task-0] [Producer clientId=connector-producer-transss-0] Resetting the last seen epoch of partition mcp_kafka_net_21-0 to 2 since the associated topicId changed from null to PdZ5a9iHTZerWcZvE84bZw (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:53,883] INFO [transss|task-0|offsets] WorkerSourceTask{id=transss-0} Committing offsets for 1 acknowledged messages (org.apache.kafka.connect.runtime.WorkerSourceTask:228) [2024-02-28 13:36:53,894] INFO [Producer clientId=connect-cluster--offsets] Resetting the last seen epoch of partition connect-offsets-0 to 0 since the associated topicId changed from null to YgXYjFuFQZ64FfG00I-mCQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:36:53,930] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:03,935] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:13,936] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:23,938] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:28,012] INFO [transss|task-0] Recover finished: from lastBeginLsn='440659349655576' to lastCommitLsn='440659349660336', current Lsn='440663644611356' (io.debezium.connector.informix.InformixStreamingChangeEventSource:145) [2024-02-28 13:37:28,123] INFO [transss|task-0] 9 records sent during previous 00:00:38.009, last recorded offset of {databaseName=mcp_kafka_net_21} partition is {begin_lsn=440672234541080, commit_lsn=440672234545840, change_lsn=440672234541192} (io.debezium.connector.common.BaseSourceTask:213) [2024-02-28 13:37:28,230] INFO [transss|task-0] The task will send records to topic 'mcp_kafka_net_21.mcp.trans_requests_reps' for the first time. Checking whether topic exists (org.apache.kafka.connect.runtime.AbstractWorkerSourceTask:523) [2024-02-28 13:37:28,232] INFO [transss|task-0] Topic 'mcp_kafka_net_21.mcp.trans_requests_reps' already exists. (org.apache.kafka.connect.runtime.AbstractWorkerSourceTask:527) [2024-02-28 13:37:28,233] INFO [transss|task-0] [Producer clientId=connector-producer-transss-0] Resetting the last seen epoch of partition mcp_kafka_net_21.mcp.trans_requests_reps-0 to 2 since the associated topicId changed from null to 7frYXFYVRvqqhwO3t6BbKQ (org.apache.kafka.clients.Metadata:402) [2024-02-28 13:37:28,439] INFO [sink_new|task-0] Received 1 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:28,440] INFO [sink_new|task-0] Flushing records in JDBC Writer 1 (io.confluent.connect.jdbc.sink.JdbcDbWriter:63) [2024-02-28 13:37:28,627] INFO [sink_new|task-0] Maximum table name length for database is 63 bytes (io.confluent.connect.jdbc.dialect.PostgreSqlDatabaseDialect:123) [2024-02-28 13:37:28,627] INFO [sink_new|task-0] JdbcDbWriter Connected (io.confluent.connect.jdbc.sink.JdbcDbWriter:54) [2024-02-28 13:37:28,662] INFO [sink_new|task-0] Records is empty (io.confluent.connect.jdbc.sink.GPBufferedRecords:136) [2024-02-28 13:37:28,669] INFO [sink_new|task-0] Checking PostgreSql dialect for existence of TABLE "trans_requests_reps" (io.confluent.connect.jdbc.dialect.GenericDatabaseDialect:589) [2024-02-28 13:37:28,729] INFO [sink_new|task-0] Using PostgreSql dialect TABLE "trans_requests_reps" present (io.confluent.connect.jdbc.dialect.GenericDatabaseDialect:597) [2024-02-28 13:37:30,255] INFO [sink_new|task-0] Checking PostgreSql dialect for type of TABLE "trans_requests_reps" (io.confluent.connect.jdbc.dialect.GenericDatabaseDialect:890) [2024-02-28 13:37:31,011] INFO [sink_new|task-0] Ordered columns: [io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3c213abd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2acdf428, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@25597251, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6977e768, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5c36cc29, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@199f0f33, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@773a3dab, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@21a0d883, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4b9d1f1a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2a9a77cd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@cebc050, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2b28758e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@86eda3e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@71b3e0e1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@238857c7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4b64d23d, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@35a54e45, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5b7f14f8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3a28463f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@57b08e7d, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@487835a8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6b03308b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@13628a6a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@40723d1d, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@492e95ce, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3b5b6308, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3ced8fd3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6da6d786, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@509bbf19, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3038c521, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@607b47e2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3b3aa68c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3654a585, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@40b36fa3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7acd7400, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@14adb313, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7c6448e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5cd3fda2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4dafff4c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@34e3f8c1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@13438e7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4249fb8e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@70ae2914, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5b20f230, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7bf31ab3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@76eb1bc8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@770c7a88, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2b61b72b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@48240676, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@663f71e1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@72f38f94, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@548da60e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3c720d5f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@21dfd349, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4f3e67c9, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@76ba15f6, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5a8ee68b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5942308, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@39a81c5c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1f9bbb3a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@328cacfc, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@162445c6, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6f97f1a2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2d97c501, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7a2b7703, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4e1517f5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1e5ff574, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@df39473, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2cc179a1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4a0d6973, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2b98c113, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3ad9981f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1df2e294, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3c9556a8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@51e89870, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2707fb32, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2d6b5b04, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3bdd7817, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2e634c04, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@36a5ec91, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@41e1323a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@112ce7d1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@259b9a9c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3cc33bda, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@74fbbe9a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@21fddbb, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7ba9e81e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5e11015a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@77376b6c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@b399234, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3f647255, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@69a9bf32, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2509c560, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@62e65bc7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7e0c7b14, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5af80bc8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@31b1f732, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@61d9baa0, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7c10a954, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@66ac5b55, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@589a28d5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@74ba85dc, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@d99c816, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@96b5c03, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@be017b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@200af0d5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@639055ba, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3048968f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@400dfc24, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@37f630a8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@562a9e69, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@22a286cd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@de1b261, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@b94a005, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@94fbec8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3fedd5c2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3f1f0ae4, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@11a5118a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@65e85488, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@347ace32, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@73f5aa73, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4ae5dcc6, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6d0e9d06, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@fe7c5cb, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@511f4076, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@35f8b57f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2c07f86b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1d7fa9bd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@36485694, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@46eb5e6f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@553c3279, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@356e4b22, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@416647ed, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5d626188, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@69792fa7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2e094ef1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5da33df2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@130369ef, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@33a7783f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@57bdd67c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@13352dd4, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1b71724c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@139f979d, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2aa8bda4, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@10497ea8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3a8dff, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6f76c11a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@dc04cd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5b3b8640, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@f3aadad, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@561dc341, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@36075f23, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3189c625, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6b04527c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@53bb8396, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5fc93085, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@200e7db7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@69504c78, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6678d5f7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@70a39ca5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2393b4c3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@484b1f45, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@68bcacd6, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@49653d0, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3a9dca2d, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2a3c4b0c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@276b8881, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@11d5476f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@45cd2bda, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@d617c44, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@76b5d2e9, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4de01d6f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@61df7cf2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@675abfd1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3f201b49, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@726662a7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6844a8f5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5ad2261b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6597945b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2425b114, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@69eba665, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2242b7bd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7baecae2, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@728a6992, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@46663128, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@71ae9906, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7cdab40b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5d3d94fb, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2aa51105, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2fc6cbfe, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@69548e1, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@373e8e84, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@35918c5f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2c912828, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6faf1b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@43665b3c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7a2cc47b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5fd9737e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@223318e9, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@537d0828, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4f6e1120, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5bb8d930, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@617666fe, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5af10eb0, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6529f3df, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7845d713, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2a560f73, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@78000007, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@529da3d3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4360bdc, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1e81a1be, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@66799a51, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@59c53e76, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6d99a679, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7e066625, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5487c0a6, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@244162ce, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4cba251, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4931dba3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2359b0d3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@c21f164, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@55a1039b, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@120a264f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7c3bd3f5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7f92b7c7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2272ce75, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6109bc2a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@438f7f85, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@29c9c4bc, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1dfcb696, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@31b8a1ef, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@973830a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6e785abc, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@164819e3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@620b751c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7775d184, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5b74d86c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@da555e0, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@34bf62dd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6d1f7c3f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@10556e48, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@377833f7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@789e164f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6e7663e7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@55ea674f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@23fb118e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2b5f57d0, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@76eb7007, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@70bdb18a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@b31b972, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5a818e13, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@77c7a60c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6aeb789f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6c97a699, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3a305c0e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@708d7a82, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4193cb8c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@10611e49, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2889f067, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@334bd69e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6cdbb4d0, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@74d07c54, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@133da061, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7eaf7bbe, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4f633d8f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3826caeb, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@46e64b26, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4391bacf, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@15c87877, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1b3789b7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6ffc4e8c, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@2175c67e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@77a819e7, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@50755c94, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5d70e038, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@52399bde, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@6569d75f, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1f82b69a, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@4529e472, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@5b507062, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@70a88778, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7a94d991, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@7014a8c5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@281f05c5, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1cb8d4f9, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@403cdeae, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@153b14ec, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@45da1aa3, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@1f1f11fd, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@39f0651e, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@74219ab8, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@61bb899d, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@64cecae, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@3caea1ec, io.confluent.connect.jdbc.sink.metadata.ColumnDetails@44a2089b] (io.confluent.connect.jdbc.dialect.PostgreSqlDatabaseDialect:625) [2024-02-28 13:37:31,013] INFO [sink_new|task-0] Setting metadata for table "trans_requests_reps" to Table{name='"trans_requests_reps"', type=TABLE columns=[Column{'merchant_cat_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'address_line1', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'add_nat_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'auth_mode', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'address_line2', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'description', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'card_verif_result', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'isuer_contry_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'reported_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'trans_value_cate', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'is_cc_payment_trns', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'org_data_elements', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'interchange_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'cash_back_flag', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'country_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'switch_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_value_cat', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'add_resp_amounts', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'amount_tp_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'po_id', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'adjusted_amt', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'trmnl_verif_result', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'receiving_inst', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'process_mode', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'ex_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'efund_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'bill_amount', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'ex_rate_bill', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'chargeback_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'auth_life_cycle', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'avsresp', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'msg_rvsl_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'merchant_name', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'auth_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'amount_trans_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'primary_bitmap', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'efunddataacqnetid', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'total_auth_amount', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'card_status', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'settlement_status', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'account_type', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'card_iss_ref_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'network_prod_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'fix_rate_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'trans_value_type', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'zip', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'issuer_authr_cat', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'card_expiry_on', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'token_assure_level', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'sec_ctrl_info', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'new_add2', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'sub_service', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'ncards', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'cal_ic_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'new_add3', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'depository_type', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_add_data1', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'comp_time', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'token_add_data2', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'acq_user_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'foreign_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'instance_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'currency_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'interface', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'switch_sr_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'last_updated_at', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'stl_code', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'track2_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'response_code', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'emv_dki', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'request_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'pnding_pros_count', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'trans_process_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'city', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'settlement_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'switch_risk_codes', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'crbk_doc_ind', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'third_bitmap', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'first_name', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'visa_dr_pos_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'rev_trace_audit', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'acq_inst_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'crd_acnt_trace_audit', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'amount_processed', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'bill_curr_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'emv_app_int_prof', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'deposit_cr_amount', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'partial_tr_org_amt', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'efunddataissnetid', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'date_of_birth', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'ext_interface_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'ext_pay_method', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'load_amount', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'totproduct_comms', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'clerk_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'home_phone', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'card_category', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'bank_routing_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_mob_type', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'trans_code', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'surcharge_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'amount_setlment', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'fee_dtl_tab_id', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'trans_description', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_local_dtime', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'adv_reason_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'payment_fund_src', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'auth_agent_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'data2', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'cavv_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'misc_tagged_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'switch_visa_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_trans_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_otp_expiry', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'second_bitmap', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'payment_srno', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'swt_sec_srv_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'settlement_cc', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'additional_fee', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'type_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'fparam_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_status', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'auth_id_resp_len', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'unpredictable_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'payee', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'is_pin_based', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'dcc_indicator', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'iso_serial_no', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'iss_trans_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'switch_prefixdata', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'optional_isuer_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'acq_contry_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'nat_pos_cc', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'excep_code', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'iso_message_type', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'ach_batch_no', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'data3', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'device_type', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'data4', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'post_card_sts', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'network_file_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'data5', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'reported_time', isPrimaryKey=false, allowsNull=true, sqlType=time}, Column{'work_phone', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'exec_time', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'rec_sync_created_at', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'imc_cics_trancode', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'multi_clearing_seq', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'account_no_to', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'new_add', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'pan_extended', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'atm_postal_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_batch_no', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'additional_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'on_behlf_srv_reslt', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'auth_id_resp', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'efunddatapseudoter', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'emv_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'is_multi_currency', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'trans_type', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'exch_rate', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'issuer_pvt_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'shift_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'acq_trans_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'emv_iss_script', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'acnresp', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'social_sec_no', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'replace_amounts', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'fee_dtl_serial', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'auto_expire_preauth', isPrimaryKey=false, allowsNull=false, sqlType=bpchar}, Column{'switch_service_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'add_data_iso', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'service_rest_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'rev_resp_code', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'trans_source', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'curr_hold_amount', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'additional_amounts', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'is_multi_curr_ntw', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'trans_analysis_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'nduplicate_trans', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'token_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_req_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_expiry', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'add_response_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'card_seq_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'dispt_control_num', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trns_expense_tag', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_indicator', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'point_of_srv_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'fwin_contry_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'state', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'amount_setlmnt_req', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'rec_last_sync_updated_at', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'org_trace_audit', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'load_center', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'net_intl_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'device_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'pin_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'amount_sp_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'prev_expiry_date', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'atm_off_premises', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'ex_rate_setlment', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'emv_cvn', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'cp_card_prg_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'service_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'commission_amount', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'ser_no', isPrimaryKey=false, allowsNull=false, sqlType=serial}, Column{'switch_header', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'card_aceptor_name', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'bill_cc', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'forwd_inst_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'pay_acnt_reference', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'is_international', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'new_col6', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'new_col7', isPrimaryKey=false, allowsNull=true, sqlType=text}, Column{'file_cycle_no', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'new_col4', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'cvv2resp', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'new_col5', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'auth_agent_cc', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'chargeback_flag', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'transmission_dtime', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'card_prg_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'account_no_from', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'statement_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'emv_ter_cap_prof', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'pos_cond_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'business_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'pri_card_prg_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'capture_date', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'discounted_price', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'cell_phone', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'emv_iss_script_res', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'card_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_time', isPrimaryKey=false, allowsNull=true, sqlType=time}, Column{'merchant_vol_ind', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'fee_in_multi_curr', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'pin_capture_code', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'receiver_name', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'comm_protocol_id', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'new_col', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'terminal_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'token_otp', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'switch_type', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'payment_stl_status', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'token_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'partial_trans_flag', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'sys_control_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'sys_trace_auditno', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'new_col12', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'charged', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'new_col11', isPrimaryKey=false, allowsNull=true, sqlType=date}, Column{'retrieval_ref_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'is_cash_adv', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'comp_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'new_col10', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'add_res_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'amount_stl_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'card_aceptor_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'misc_amount', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'last_name', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'function_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_inds', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'switch_id', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'org_trans_date', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_geo_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'bank_auth_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'device_address', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'bank_account_title', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'virtual_account', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'amount_requested', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'mult_curr_cnv_rate', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'acq_country_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'atm_terminal_addr', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'network_oif', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'available_balance', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'t', isPrimaryKey=false, allowsNull=true, sqlType=int2}, Column{'email_address', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'remaining_balance', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'optional_acq_fee', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'efunddataprocssrid', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'is_intl_ntw', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'repl_add_fee', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'multi_clearing_cnt', isPrimaryKey=false, allowsNull=true, sqlType=int4}, Column{'cash_amount', isPrimaryKey=false, allowsNull=true, sqlType=numeric}, Column{'fee_trace_audit', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'process_code', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'primary_card_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'bank_account_no', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trace_audit_no', isPrimaryKey=false, allowsNull=true, sqlType=int8}, Column{'consumer_entry', isPrimaryKey=false, allowsNull=true, sqlType=timestamp}, Column{'iso_flag', isPrimaryKey=false, allowsNull=true, sqlType=bpchar}, Column{'avs_data', isPrimaryKey=false, allowsNull=true, sqlType=varchar}, Column{'trans_ind', isPrimaryKey=false, allowsNull=true, sqlType=varchar}]} (io.confluent.connect.jdbc.util.TableDefinitions:64) [2024-02-28 13:37:31,035] INFO [sink_new|task-0] Using GPLOAD to insert records (io.confluent.connect.jdbc.sink.GPBinder:77) [2024-02-28 13:37:31,084] INFO [sink_new|task-0] Received 8 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:31,084] INFO [sink_new|task-0] Flushing records in JDBC Writer 8 (io.confluent.connect.jdbc.sink.JdbcDbWriter:63) [2024-02-28 13:37:31,106] INFO [sink_new|task-0] Received 7 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:31,106] INFO [sink_new|task-0] Flushing records in JDBC Writer 7 (io.confluent.connect.jdbc.sink.JdbcDbWriter:63) [2024-02-28 13:37:33,903] INFO [transss|task-0|offsets] WorkerSourceTask{id=transss-0} Committing offsets for 16 acknowledged messages (org.apache.kafka.connect.runtime.WorkerSourceTask:228) [2024-02-28 13:37:33,938] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:43,946] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:37:43,946] INFO [sink_new|task-0] Flushing records in JDBC Writer for table ID: "trans_requests_reps" (io.confluent.connect.jdbc.sink.JdbcDbWriter:123) [2024-02-28 13:37:43,946] INFO [sink_new|task-0] Flushing 16 buffered records (io.confluent.connect.jdbc.sink.GPBufferedRecords:139) [2024-02-28 13:37:43,947] INFO [sink_new|task-0] Flushing 16 records (io.confluent.connect.jdbc.sink.GPBinder:111) [2024-02-28 13:37:43,947] INFO [sink_new|task-0] Total Columns: 284, Total Key Columns: 1, Total Non Key Columns: 283, Total Records: 16 (io.confluent.connect.jdbc.gp.GpDataIngestionService:154) [2024-02-28 13:37:43,948] INFO [sink_new|task-0] Update mode is LAST_ROW_ONLY (io.confluent.connect.jdbc.gp.GpDataIngestionService:155) [2024-02-28 13:37:43,978] INFO [sink_new|task-0] Total records after applying update mode: 16 (io.confluent.connect.jdbc.gp.GpDataIngestionService:188) [2024-02-28 13:37:43,979] INFO [sink_new|task-0] Writing to file /tmp/gpload/trans_requests_reps17862998180764270671.csv (io.confluent.connect.jdbc.sink.GPBinder:43) [2024-02-28 13:37:43,992] INFO [sink_new|task-0] Rows count (io.confluent.connect.jdbc.sink.GPBinder:49) [2024-02-28 13:37:43,996] INFO [sink_new|task-0] gpfdist running on 192.168.151.201 (io.confluent.connect.jdbc.sink.GPBinder:55) [2024-02-28 13:37:44,444] INFO [sink_new|task-0] Running gpload command /usr/local/greenplum-db-6.20.5/bin/gpload -l /tmp/gpload/trans_requests_reps13770884647786760245.log -f /tmp/gpload/trans_requests_reps2017049426781604120.yml (io.confluent.connect.jdbc.sink.GPBinder:126) [2024-02-28 13:37:47,560] INFO [sink_new|task-0] gpload output: [2024-02-28 13:37:45|INFO|gpload session started 2024-02-28 13:37:45 , 2024-02-28 13:37:45|INFO|started gpfdist -p 8000 -P 9000 -f "/tmp/gpload/trans_requests_reps17862998180764270671.csv" -t 30 -m 65535 , 2024-02-28 13:37:46|INFO|reusing staging table smi.staging_gpload_reusable_183bb2a8fc892e0a6ce78a4500fe5e2a , 2024-02-28 13:37:46|INFO|did not find an external table to reuse. creating smi.ext_gpload_reusable_a4cb749a_d614_11ee_97fe_000c2956772c , 2024-02-28 13:37:47|INFO|running time: 2.37 seconds , 2024-02-28 13:37:47|INFO|rows Inserted = 0 , 2024-02-28 13:37:47|INFO|rows Updated = 32 , 2024-02-28 13:37:47|INFO|data formatting errors = 0 , 2024-02-28 13:37:47|INFO|gpload succeeded ] (io.confluent.connect.jdbc.sink.GPBinder:129) [2024-02-28 13:37:47,561] INFO [sink_new|task-0] GPload finished successfully (io.confluent.connect.jdbc.sink.GPBinder:138) [2024-02-28 13:37:47,561] INFO [sink_new|task-0] Keeping GP files (io.confluent.connect.jdbc.sink.GPBinder:145) [2024-02-28 13:37:57,562] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:38:07,564] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93) [2024-02-28 13:38:17,565] INFO [sink_new|task-0] Received 0 records (io.confluent.connect.jdbc.sink.JdbcSinkTask:93)