SOurce and target DB is ORACLE and using debezium connector version 3.2.4 FINAL source connector COnfig: ==================================================================================== database.hostname: abc database.port: 1527 database.dbname: NIMS database.user: abc database.password: abc topic.prefix: c12 max.queue.size: 60000 max.batch.size: 15000 query.fetch.size: 2000 snapshot.fetch.size: 2000 snapshot.max.threads: 2 time.precision.mode: connect binary.handling.mode: bytes snapshot.mode: initial include.schema.changes: true log.mining.buffer.transaction.events.threshold: 50000 lob.enabled: true archive.destination.name: LOG_ARCHIVE_DEST_1 table.include.list: signal.data.collection: key.converter: org.apache.kafka.connect.json.JsonConverter value.converter: org.apache.kafka.connect.json.JsonConverter key.converter.schemas.enable: true value.converter.schemas.enable: true transforms: unwrap,filter transforms.unwrap.type: io.debezium.transforms.ExtractNewRecordState transforms.unwrap.delete.tombstone.handling.mode: rewrite-with-tombstone transforms.unwrap.add.fields: table,ts_ms,change_lsn,commit_lsn,op transforms.filter.type: io.debezium.transforms.Filter transforms.filter.language: jsr223.groovy transforms.filter.condition: value.schema().field("__deleted") == null || value.__deleted == 'false' producer.override.acks: all post.processors: reselector reselector.type: io.debezium.processors.reselect.ReselectColumnsPostProcessor reselector.reselect.columns.include.list: NIMS.X_EOL_MESSAGE_QUEUE:MESSAGE reselector.reselect.unavailable.values: true reselector.reselect.null.values: true reselector.reselect.use.event.key: false reselector.reselect.error.handling.mode: WARN ========================================================================================================================= target sink connector config: config: connection.restart.on.errors: true use.time.zone: UTC batch.size: 3000 consumer.override.max.poll.records: 9000 consumer.override.heartbeat.interval.ms: 5000 consumer.override.session.timeout.ms: 60000 consumer.override.request.timeout.ms: 180000 consumer.override.max.poll.interval.ms: 300000 insert.mode: upsert primary.key.mode: record_key delete.enabled: true connection.provider: org.hibernate.c3p0.internal.C3P0ConnectionProvider key.converter: org.apache.kafka.connect.json.JsonConverter value.converter: org.apache.kafka.connect.json.JsonConverter key.converter.schemas.enable: true value.converter.schemas.enable: true connection.url: quote.identifiers: always #topics: Source1-EmpSource1-Company collection.name.format: ${topic} topics.regex: # SMT - transformation rules transforms: dropPrefix,ReplaceField transforms.dropPrefix.type: org.apache.kafka.connect.transforms.RegexRouter transforms.dropPrefix.regex: transforms.dropPrefix.replacement: transforms.ReplaceField.type: org.apache.kafka.connect.transforms.ReplaceField$Value transforms.ReplaceField.exclude: __table,__ts_ms,__change_lsn,__commit_lsn,__deleted,__op,__scn