seethb commented on issue #10142:
URL: https://github.com/apache/hudi/issues/10142#issuecomment-1827342163

   Hi After successfully copied the AVRO Jar. my kafka connect is started with 
below connect-distributed.properties bootstrap.servers=localhost:9092
   group.id=hudi-connect-cluster
   key.converter.schemas.enable=true
   value.converter.schemas.enable=true
   offset.storage.topic=connect-offsets
   offset.storage.replication.factor=1
   config.storage.topic=connect-configs
   config.storage.replication.factor=1
   status.storage.topic=connect-status
   status.storage.replication.factor=1
   key.converter=io.confluent.connect.avro.AvroConverter
   value.converter=io.confluent.connect.avro.AvroConverter
   key.converter.schema.registry.url=http://localhost:8081
   value.converter.schema.registry.url=http://localhost:8081
   schema.registry.url=http://localhost:8081
   offset.flush.interval.ms=60000
   listeners=HTTP://:8085
   plugin.path=/usr/local/share/kafka/plugins
   
   
   
   
   when I tried with below config curl -i -X POST -H "Accept:application/json" 
-H "Content-Type:application/json" localhost:8085/connectors/ -d '{
       "name": "hudi-yb-fileregistry",
   "config": {
       "connector.class": "org.apache.hudi.connect.HoodieSinkConnector",
       "bootstrap.servers": "localhost:9092",
       "tasks.max": "1",
       "topics": "cdc555.public.cdcdemo",
       "hoodie.table.name": "cdc555-public-mor-cdcdemo",
       "hoodie.table.type": "MERGE_ON_READ",
       "hoodie.base.path": "file:///tmp/hoodie/cdc555-public-mor-cdcdemo",
       "hoodie.write.payload.class": 
"org.apache.hudi.common.model.HoodieAvroPayload",
       "hoodie.write.payload.serializer.class": 
"org.apache.hudi.common.model.HoodieAvroPayloadSerializer",
       "key.deserializer": 
"io.confluent.kafka.serializers.KafkaAvroDeserializer",
       "value.deserializer": 
"io.confluent.kafka.serializers.KafkaAvroDeserializer",
       "key.converter": "io.confluent.connect.avro.AvroConverter",
       "value.converter": "io.confluent.connect.avro.AvroConverter",
       "key.converter.schema.registry.url": "http://schema-registry:8081";,
       "value.converter.schema.registry.url": "http://schema-registry:8081";,
       "key.converter.schemas.enable": "true",
       "value.converter.schemas.enable": "true",
       "hoodie.datasource.write.recordkey.field": "payload.ts_ms",
       "hoodie.datasource.write.partitionpath.field": "payload.op",
       "hoodie.schemaprovider.class": 
"org.apache.hudi.schema.SchemaRegistryProvider",
       "hoodie.deltastreamer.schemaprovider.registry.url": 
"http://localhost:8081/subjects/cdc555.public.cdcdemo-value/versions/latest";,
       "hoodie.kafka.commit.interval.secs": 60,
       "schema.registry.url": "http://localhost:8081";
   }
   }'.  getting below error 
   
   
   
   [2023-11-24 16:51:44,280] INFO [hudi-yb-fileregistry|task-0] Creating task 
hudi-yb-fileregistry-0 (org.apache.kafka.connect.runtime.Worker:600)
   [2023-11-24 16:51:44,280] INFO [hudi-yb-fileregistry|task-0] ConnectorConfig 
values:
        config.action.reload = restart
        connector.class = org.apache.hudi.connect.HoodieSinkConnector
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = class io.confluent.connect.avro.AvroConverter
        name = hudi-yb-fileregistry
        predicates = []
        tasks.max = 1
        transforms = []
        value.converter = class io.confluent.connect.avro.AvroConverter
    (org.apache.kafka.connect.runtime.ConnectorConfig:369)
   [2023-11-24 16:51:44,280] INFO [hudi-yb-fileregistry|task-0] 
EnrichedConnectorConfig values:
        config.action.reload = restart
        connector.class = org.apache.hudi.connect.HoodieSinkConnector
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = class io.confluent.connect.avro.AvroConverter
        name = hudi-yb-fileregistry
        predicates = []
        tasks.max = 1
        transforms = []
        value.converter = class io.confluent.connect.avro.AvroConverter
    
(org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:369)
   [2023-11-24 16:51:44,280] INFO [hudi-yb-fileregistry|task-0] TaskConfig 
values:
        task.class = class org.apache.hudi.connect.HoodieSinkTask
    (org.apache.kafka.connect.runtime.TaskConfig:369)
   [2023-11-24 16:51:44,280] INFO [hudi-yb-fileregistry|task-0] Instantiated 
task hudi-yb-fileregistry-0 with version 0.1.0 of type 
org.apache.hudi.connect.HoodieSinkTask 
(org.apache.kafka.connect.runtime.Worker:614)
   [2023-11-24 16:51:44,283] ERROR [hudi-yb-fileregistry|task-0] Failed to 
start task hudi-yb-fileregistry-0 (org.apache.kafka.connect.runtime.Worker:656)
   java.lang.NoSuchMethodError: 
io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig.withClientSslSupport(Lorg/apache/kafka/common/config/ConfigDef;Ljava/lang/String;)V
        at 
io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.baseConfigDef(AbstractKafkaSchemaSerDeConfig.java:355)
        at 
io.confluent.connect.avro.AvroConverterConfig.<init>(AvroConverterConfig.java:27)
        at 
io.confluent.connect.avro.AvroConverter.configure(AvroConverter.java:68)
        at 
org.apache.kafka.connect.runtime.isolation.Plugins.newConverter(Plugins.java:328)
        at org.apache.kafka.connect.runtime.Worker.startTask(Worker.java:620)
        at 
org.apache.kafka.connect.runtime.Worker.startSinkTask(Worker.java:525)
        at 
org.apache.kafka.connect.runtime.distributed.DistributedHerder.startTask(DistributedHerder.java:1800)
        at 
org.apache.kafka.connect.runtime.distributed.DistributedHerder.lambda$getTaskStartingCallable$32(DistributedHerder.java:1850)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:750)
   [2023-11-24 16:51:44,284] INFO [Worker clientId=connect-1, 
groupId=hudi-connect-cluster] Finished starting connectors and tasks 
(org.apache.kafka.connect.runtime.distributed.DistributedHerder:1782)


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to