-
Bug
-
Resolution: Done
-
Major
-
None
-
None
debezium-connector-mongodb-0.7.4-plugin.tar.gz Confluent Platform 4.0
{ "name": "mongodb-connector-04", "config": { "connector.class": "io.debezium.connector.mongodb.MongoDbConnector", "mongodb.hosts": "rs0/proxmox01.moffatt.me:27017", "mongodb.name": "ubnt", "database.whitelist": "ace", "collection.whitelist": "ace.device", "transforms": "unwrap,changeTopic", "transforms.unwrap.type" :"io.debezium.connector.mongodb.transforms.UnwrapFromMongoDbEnvelope", "transforms.changeTopic.type":"org.apache.kafka.connect.transforms.RegexRouter", "transforms.changeTopic.regex":"(.*)", "transforms.changeTopic.replacement":"$1-04" } }
Error:
[2018-03-07 17:35:28,076] ERROR WorkerSourceTask{id=mongodb-connector-04-0} Task threw an uncaught and unrecoverable exception (org.apache.kafka.connect.runtime.WorkerTask:172) org.apache.kafka.connect.errors.DataException: ubnt.ace.device-04 at io.confluent.connect.avro.AvroConverter.fromConnectData(AvroConverter.java:77) at org.apache.kafka.connect.runtime.WorkerSourceTask.sendRecords(WorkerSourceTask.java:220) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:187) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:170) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:214) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.common.errors.SerializationException: Error serializing Avro message Caused by: org.apache.avro.SchemaParseException: Can't redefine: io.confluent.connect.avro.ConnectDefault at org.apache.avro.Schema$Names.put(Schema.java:1128) at org.apache.avro.Schema$NamedSchema.writeNameRef(Schema.java:562) at org.apache.avro.Schema$RecordSchema.toJson(Schema.java:690) at org.apache.avro.Schema$RecordSchema.fieldsToJson(Schema.java:716) at org.apache.avro.Schema$RecordSchema.toJson(Schema.java:701) at org.apache.avro.Schema.toString(Schema.java:324) at org.apache.avro.Schema.toString(Schema.java:314) at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.registerAndGetId(CachedSchemaRegistryClient.java:61) at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.register(CachedSchemaRegistryClient.java:100) at io.confluent.kafka.serializers.AbstractKafkaAvroSerializer.serializeImpl(AbstractKafkaAvroSerializer.java:79) at io.confluent.connect.avro.AvroConverter$Serializer.serialize(AvroConverter.java:110) at io.confluent.connect.avro.AvroConverter.fromConnectData(AvroConverter.java:75) at org.apache.kafka.connect.runtime.WorkerSourceTask.sendRecords(WorkerSourceTask.java:220) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:187) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:170) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:214) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)