yanbigot opened a new issue #1230: java.lang.NoSuchMethodError: 
org.apache.avro.Schema.getLogicalType()Lorg/apache/avro/LogicalType
URL: https://github.com/apache/incubator-hudi/issues/1230
 
 
   While running on yarn cluster with:
   - spark 2.2.0
   - hdp 2.6.4
   
   Works on local env.
   
   Here is my pom.xml
   `<properties>
           <jdk.version.source>1.8</jdk.version.source>
           <jdk.version.target>1.8</jdk.version.target>
           <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
           <build.id>NA</build.id><!-- need to have a default value -->
   
           <!-- Plugins versions -->
           <maven.jar-plugin.version>2.6</maven.jar-plugin.version>
           <maven.compiler-plugin.version>3.5.1</maven.compiler-plugin.version>
           <scoverage-plugin.version>1.3.0</scoverage-plugin.version>
           <scala-maven-plugin.version>3.2.0</scala-maven-plugin.version>
           <maven-surefire-plugin.version>2.7</maven-surefire-plugin.version>
           <scalatest-maven-plugin.version>1.0</scalatest-maven-plugin.version>
           <maven-assembly-plugin.version>2.6</maven-assembly-plugin.version>
           
<maven-resources-plugin.version>3.0.1</maven-resources-plugin.version>
           
<buildnumber-maven-plugin.version>1.4</buildnumber-maven-plugin.version>
   
           <!-- Dependency versions -->
           <commons-configuration.version>1.10</commons-configuration.version>
           <spark.version>2.2.0</spark.version>
           <logback.version>1.2.3</logback.version>
           <joda-time.version>2.9.4</joda-time.version>
           <joda-convert.version>1.8</joda-convert.version>
           <hadoop.version>2.9.0</hadoop.version>
           <scala.tool.version>2.11</scala.tool.version>
           <scala.version>2.11.12</scala.version>
           <jackson.version>2.9.6</jackson.version>
           <jackson-asl.version>1.9.13</jackson-asl.version>
           <scopt.version>3.4.0</scopt.version>
           <scalaj.version>2.3.0</scalaj.version>
           <oozie.version>4.2.0.2.4.2.0-258</oozie.version>
           <junit.version>4.12</junit.version>
           <specs2.version>2.3.13</specs2.version>
           <assertj.version>3.8.0</assertj.version>
           <lift.version>2.6</lift.version>
           <spark-streaming-kafka.version>2.0.2</spark-streaming-kafka.version>
           <scalatest.version>3.0.1</scalatest.version>
           <scalacheck.version>1.13.4</scalacheck.version>
           <derby.version>10.10.2.0</derby.version> <!-- 10.14.1.0 -->
           <curator.version>2.5.0</curator.version>
           <kafka.version>0.10.0.0</kafka.version>
           <log4j.version>1.2.17</log4j.version>
           <slf4j.version>1.7.10</slf4j.version>
           <objenesis.version>1.2</objenesis.version>
           <commons-lang.version>2.6</commons-lang.version>
           <commons-logging.version>1.1.3</commons-logging.version>
           <xml-apis.version>1.4.01</xml-apis.version>
           <xercesImpl.version>2.10.0</xercesImpl.version>
           <jetty.version>6.1.26</jetty.version>
           <servlet-api.version>2.5</servlet-api.version>
           <guava.version>11.0.2</guava.version>
           <simplyscala.version>0.5</simplyscala.version>
           <bonecp.version>0.8.0.RELEASE</bonecp.version>
           <scoverage.plugin.version>1.3.0</scoverage.plugin.version>
           <typesafe-config.version>1.3.0</typesafe-config.version>
           <postgresql.version>42.1.4</postgresql.version>
           <spark-testing-base.version>0.7.4</spark-testing-base.version>
           <mvel.version>2.4.0.Final</mvel.version>
           <jdbc-slim.version>1.2.2</jdbc-slim.version>
           <postgresql.version>42.1.4</postgresql.version>
           <dbfit.version>3.2.0</dbfit.version>
           <smartrics-RestFixture.version>4.4</smartrics-RestFixture.version>
           <commons-email.version>1.5</commons-email.version>
           <commons-dbcp2.version>2.3.0</commons-dbcp2.version>
       </properties>
       <dependencies>
           <!-- Avro -->
           <dependency>
               <groupId>org.apache.parquet</groupId>
               <artifactId>parquet-avro</artifactId>
               <version>1.8.2</version>
           </dependency>
   
           <!-- Hudi -->
           <dependency>
               <groupId>org.apache.hudi</groupId>
               <artifactId>hudi-spark</artifactId>
               <version>0.5.0-incubating</version>
           </dependency>
   
           <!-- Spark -->
           <dependency>
               <groupId>org.apache.spark</groupId>
               <artifactId>spark-core_${scala.tool.version}</artifactId>
               <version>${spark.version}</version>
               <exclusions>
                   <exclusion>
                       <groupId>org.slf4j</groupId>
                       <artifactId>slf4j-log4j12</artifactId>
                   </exclusion>
               </exclusions>
           </dependency>
           <dependency>
               <groupId>org.apache.spark</groupId>
               <artifactId>spark-sql_${scala.tool.version}</artifactId>
               <version>${spark.version}</version>
               <exclusions>
                   <exclusion>
                       <groupId>org.slf4j</groupId>
                       <artifactId>slf4j-log4j12</artifactId>
                   </exclusion>
               </exclusions>
           </dependency>
           <dependency>
               <groupId>org.apache.spark</groupId>
               <artifactId>spark-hive_${scala.tool.version}</artifactId>
               <version>${spark.version}</version>
               <!--<scope>provided</scope>-->
           </dependency>
   
   
   
           <!-- Dhr -->
           <dependency>
               <groupId>com.socgen.dhr</groupId>
               <artifactId>dhr-configuration</artifactId>
               <version>4.0.0</version>
           </dependency>
   
           <!-- Spark testing base-->
           <dependency>
               <groupId>com.holdenkarau</groupId>
               <artifactId>spark-testing-base_2.11</artifactId>
               <scope>test</scope>
               <exclusions>
                   <exclusion>
                       <groupId>org.mockito</groupId>
                       <artifactId>mockito-core</artifactId>
                   </exclusion>
               </exclusions>
           </dependency>
   
           <!-- Scala -->
           <dependency>
               <groupId>org.scala-lang</groupId>
               <artifactId>scala-library</artifactId>
               <version>${scala.version}</version>
           </dependency>
           <dependency>
               <groupId>org.scala-lang</groupId>
               <artifactId>scala-reflect</artifactId>
               <version>${scala.version}</version>
           </dependency>
           <dependency>
               <groupId>org.scala-lang</groupId>
               <artifactId>scala-compiler</artifactId>
               <version>${scala.version}</version>
           </dependency>
           <dependency>
               <groupId>org.scala-lang</groupId>
               <artifactId>scalap</artifactId>
               <version>${scala.version}</version>
           </dependency>
   
           <dependency>
               <groupId>org.slf4j</groupId>
               <artifactId>slf4j-api</artifactId>
               <version>${slf4j.version}</version>
           </dependency>
           <dependency>
               <groupId>org.slf4j</groupId>
               <artifactId>slf4j-log4j12</artifactId>
               <version>${slf4j.version}</version>
           </dependency>
           <dependency>
               <groupId>ch.qos.logback</groupId>
               <artifactId>logback-classic</artifactId>
               <version>${logback.version}</version>
           </dependency>
   
           <dependency>
               <groupId>org.scalatest</groupId>
               <artifactId>scalatest_2.11</artifactId>
               <version>${scalatest.version}</version>
               <scope>test</scope>
           </dependency>
       </dependencies>
   
       <dependencyManagement>
           <dependencies>
               <dependency>
                   <groupId>com.holdenkarau</groupId>
                   
<artifactId>spark-testing-base_${scala.tool.version}</artifactId>
                   
<version>${spark.version}_${spark-testing-base.version}</version>
                   <scope>test</scope>
               </dependency>
           </dependencies>
       </dependencyManagement>`
   
   The stack trace:
   ```
   Caused by: java.lang.NoSuchMethodError: 
org.apache.avro.Schema.getLogicalType()Lorg/apache/avro/LogicalType;
           at 
org.apache.parquet.avro.AvroSchemaConverter.convertField(AvroSchemaConverter.java:178)
           at 
org.apache.parquet.avro.AvroSchemaConverter.convertUnion(AvroSchemaConverter.java:214)
           at 
org.apache.parquet.avro.AvroSchemaConverter.convertField(AvroSchemaConverter.java:171)
           at 
org.apache.parquet.avro.AvroSchemaConverter.convertField(AvroSchemaConverter.java:130)
           at 
org.apache.parquet.avro.AvroSchemaConverter.convertField(AvroSchemaConverter.java:227)
           at 
org.apache.parquet.avro.AvroSchemaConverter.convertFields(AvroSchemaConverter.java:124)
           at 
org.apache.parquet.avro.AvroSchemaConverter.convert(AvroSchemaConverter.java:115)
           at 
org.apache.hudi.io.storage.HoodieStorageWriterFactory.newParquetStorageWriter(HoodieStorageWriterFactory.java:60)
           at 
org.apache.hudi.io.storage.HoodieStorageWriterFactory.getStorageWriter(HoodieStorageWriterFactory.java:47)
           at 
org.apache.hudi.io.HoodieCreateHandle.<init>(HoodieCreateHandle.java:72)
           at 
org.apache.hudi.func.CopyOnWriteLazyInsertIterable$CopyOnWriteInsertHandler.consumeOneRecord(CopyOnWriteLazyInser
                                  tIterable.java:139)
           at 
org.apache.hudi.func.CopyOnWriteLazyInsertIterable$CopyOnWriteInsertHandler.consumeOneRecord(CopyOnWriteLazyInser
                                  tIterable.java:127)
           at 
org.apache.hudi.common.util.queue.BoundedInMemoryQueueConsumer.consume(BoundedInMemoryQueueConsumer.java:37)
           at 
org.apache.hudi.common.util.queue.BoundedInMemoryExecutor.lambda$null$192(BoundedInMemoryExecutor.java:121)
           at java.util.concurrent.FutureTask.run(FutureTask.java:266)
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to