Uploaded image for project: 'Debezium'
  1. Debezium
  2. DBZ-8365

The database schema history couldn't be recovered. Consider to increase the value for schema.history.internal.kafka.recovery.poll.interval.ms

XMLWordPrintable

    • Icon: Task Task
    • Resolution: Won't Do
    • Icon: Critical Critical
    • None
    • None
    • oracle-connector
    • None
    • False
    • None
    • False

      Hi,

      My Debezium connector for oracle is failing with below error in our prod environment.

       
      java.lang.IllegalStateException: The database schema history couldn't be recovered. Consider to increase the value for schema.history.internal.kafka.recovery.poll.interval.ms
          at io.debezium.storage.kafka.history.KafkaSchemaHistory.recoverRecords(KafkaSchemaHistory.java:312)
          at io.debezium.relational.history.AbstractSchemaHistory.recover(AbstractSchemaHistory.java:100)
          at io.debezium.relational.history.SchemaHistory.recover(SchemaHistory.java:192)
          at io.debezium.relational.HistorizedRelationalDatabaseSchema.recover(HistorizedRelationalDatabaseSchema.java:72)
          at io.debezium.schema.HistorizedDatabaseSchema.recover(HistorizedDatabaseSchema.java:40)
          at io.debezium.connector.common.BaseSourceTask.validateAndLoadSchemaHistory(BaseSourceTask.java:148)
          at io.debezium.connector.oracle.OracleConnectorTask.start(OracleConnectorTask.java:101)
          at io.debezium.connector.common.BaseSourceTask.start(BaseSourceTask.java:248)
          at org.apache.kafka.connect.runtime.AbstractWorkerSourceTask.initializeAndStart(AbstractWorkerSourceTask.java:283)
          at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:227)
          at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:284)
          at org.apache.kafka.connect.runtime.AbstractWorkerSourceTask.run(AbstractWorkerSourceTask.java:80)
          at org.apache.kafka.connect.runtime.isolation.Plugins.lambda$withClassLoader$1(Plugins.java:237)
          at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
          at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
          at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
          at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
          at java.base/java.lang.Thread.run(Thread.java:829)
      I tried restarting the connector by setting the property   "schema.history.internal.kafka.recovery.poll.interval.ms": "5000" and still the same issue.
       
      Debezium version: 2.7.2.Final
      Below is the connector config:
      {
        "name": "TMSP_Source_Debezium_transactional_consolidated_le_100MM_v1.0",
        "config":

      {     "connector.class": "io.debezium.connector.oracle.OracleConnector",     "key.converter": "io.confluent.connect.avro.AvroConverter",     "value.converter": "io.confluent.connect.avro.AvroConverter",     "transforms": "unwrap, regexRouter",     "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",     "transforms.unwrap.drop.tombstones": "false",     "transforms.regexRouter.type": "org.apache.kafka.connect.transforms.RegexRouter",     "transforms.regexRouter.regex": "DBZ\\.(.*)\\.(.*)",     "transforms.regexRouter.replacement": "$1_$2",     "topic.prefix": "DBZ",     "database.hostname": "",     "database.port": "1521",     "database.user": "",     "database.password": "",     "database.dbname": "",     "snapshot.mode": "schema_only",     "database.connection.adapter": "LogMiner",     "log.mining.strategy": "online_catalog",     "event.processing.failure.handling.mode": "ignore",     "poll.interval.ms": "1000",     "decimal.handling.mode": "double",     "log.mining.batch.size.default": "5000",     "log.mining.batch.size.max": "100000",     "signal.data.collection": "DEBEZIUM.DEBEZIUM_SIGNAL",     "signal.enabled.channels": "source",     "topic.naming.strategy": "io.debezium.schema.SchemaTopicNamingStrategy",     "table.include.list": "List of tables from different schemas under same database",     "include.schema.changes": "true",     "topic.creation.default.partitions": "6",     "database.history.consumer.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"ilF/\";",     "schema.history.internal.consumer.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"ilF/\";",     "value.converter.basic.auth.credentials.source": "USER_INFO",     "schema.history.internal.producer.security.protocol": "SASL_SSL",     "confluent.topic.replication.factor": "3",     "topic.creation.default.replication.factor": "3",     "schema.history.internal.producer.sasl.mechanism": "PLAIN",     "database.history.producer.sasl.mechanism": "PLAIN",     "confluent.topic.bootstrap.servers": "",     "database.history.kafka.bootstrap.servers": "",     "schema.history.internal.kafka.bootstrap.servers": "",     "value.converter.schema.registry.url": "",     "confluent.topic.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"ilF/\";",     "schema.history.internal.consumer.sasl.mechanism": "PLAIN",     "schema.history.internal.producer.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"ilF/\";",     "key.converter.schema.registry.basic.auth.user.info": "",     "database.history.consumer.sasl.mechanism": "PLAIN",     "key.converter.basic.auth.credentials.source": "USER_INFO",     "schema.history.internal.consumer.security.protocol": "SASL_SSL",     "database.history.kafka.topic": "history",     "value.converter.schema.registry.basic.auth.user.info": "",     "database.history.consumer.security.protocol": "SASL_SSL",     "database.history.consumer.ssl.endpoint.identification.algorithm": "https",     "schema.history.internal.kafka.topic": "schema-changes.internal.oracle",     "database.schema": "debezium",     "confluent.topic.sasl.mechanism": "PLAIN",     "database.history.producer.sasl.jaas.config": "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"UWC7EUY6VNQYTGKA\" password=\"ilF/6b02wA4K6TgCq5Xr8jczDjK9LT8KnZCNQky6VkgDzQRIfkXZGuXM3oM1wVhC\";",     "database.history.producer.ssl.endpoint.identification.algorithm": "https",     "topic.creation.default.cleanup.policy": "delete",     "database.history.producer.security.protocol": "SASL_SSL",     "database.server.name": "prod",     "schema.history.internal.kafka.recovery.poll.interval.ms": "5000",     "confluent.topic.security.protocol": "SASL_SSL",     "key.converter.schema.registry.url": ""   }

      }

              Unassigned Unassigned
              nareshdasari Naresh Dasari
              Votes:
              0 Vote for this issue
              Watchers:
              3 Start watching this issue

                Created:
                Updated:
                Resolved: