[ 
https://issues.apache.org/jira/browse/HDFS-1900?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13054011#comment-13054011
 ] 

Abel Perez commented on HDFS-1900:
----------------------------------

Hey Eli, thanks for the function.  Not sure if my environment is properly 
setup, I tried running test-patch and got the following error:

[exec] Exception in thread "main" java.lang.NoClassDefFoundError: 
org/apache/fop/messaging/MessageHandler
     [exec]     at 
org.apache.cocoon.serialization.FOPSerializer.configure(FOPSerializer.java:122)
     [exec]     at 
org.apache.avalon.framework.container.ContainerUtil.configure(ContainerUtil.java:201)
     [exec]     at 
org.apache.avalon.excalibur.component.DefaultComponentFactory.newInstance(DefaultComponentFactory.java:289)
     [exec]     at 
org.apache.avalon.excalibur.pool.InstrumentedResourceLimitingPool.newPoolable(InstrumentedResourceLimitingPool.java:655)
     [exec]     at 
org.apache.avalon.excalibur.pool.InstrumentedResourceLimitingPool.get(InstrumentedResourceLimitingPool.java:371)
     [exec]     at 
org.apache.avalon.excalibur.component.PoolableComponentHandler.doGet(PoolableComponentHandler.java:198)
     [exec]     at 
org.apache.avalon.excalibur.component.ComponentHandler.get(ComponentHandler.java:381)
     [exec]     at 
org.apache.avalon.excalibur.component.ExcaliburComponentSelector.select(ExcaliburComponentSelector.java:215)
     [exec]     at 
org.apache.cocoon.components.ExtendedComponentSelector.select(ExtendedComponentSelector.java:268)
     [exec]     at 
org.apache.cocoon.components.pipeline.AbstractProcessingPipeline.setSerializer(AbstractProcessingPipeline.java:311)
     [exec]     at 
org.apache.cocoon.components.pipeline.impl.AbstractCachingProcessingPipeline.setSerializer(AbstractCachingProcessingPipeline.java:171)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.SerializeNode.invoke(SerializeNode.java:120)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:69)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.SelectNode.invoke(SelectNode.java:103)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:47)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.PreparableMatchNode.invoke(PreparableMatchNode.java:131)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:69)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.PipelineNode.invoke(PipelineNode.java:143)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:69)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.PipelinesNode.invoke(PipelinesNode.java:93)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.ConcreteTreeProcessor.process(ConcreteTreeProcessor.java:235)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.ConcreteTreeProcessor.process(ConcreteTreeProcessor.java:177)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.TreeProcessor.process(TreeProcessor.java:254)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.MountNode.invoke(MountNode.java:118)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:69)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.SelectNode.invoke(SelectNode.java:98)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:69)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.PipelineNode.invoke(PipelineNode.java:143)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.AbstractParentProcessingNode.invokeNodes(AbstractParentProcessingNode.java:69)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.sitemap.PipelinesNode.invoke(PipelinesNode.java:93)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.ConcreteTreeProcessor.process(ConcreteTreeProcessor.java:235)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.ConcreteTreeProcessor.process(ConcreteTreeProcessor.java:177)
     [exec]     at 
org.apache.cocoon.components.treeprocessor.TreeProcessor.process(TreeProcessor.java:254)
     [exec]     at org.apache.cocoon.Cocoon.process(Cocoon.java:699)
     [exec]     at 
org.apache.cocoon.bean.CocoonWrapper.getPage(CocoonWrapper.java:514)
     [exec]     at 
org.apache.cocoon.bean.CocoonBean.processTarget(CocoonBean.java:499)
     [exec]     at 
org.apache.cocoon.bean.CocoonBean.process(CocoonBean.java:356)
     [exec]     at org.apache.cocoon.Main.main(Main.java:321)
     [exec] Caused by: java.lang.ClassNotFoundException: 
org.apache.fop.messaging.MessageHandler
     [exec]     at java.net.URLClassLoader$1.run(URLClassLoader.java:202)
     [exec]     at java.security.AccessController.doPrivileged(Native Method)
     [exec]     at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
     [exec]     at java.lang.ClassLoader.loadClass(ClassLoader.java:307)
     [exec]     at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
     [exec]     at java.lang.ClassLoader.loadClass(ClassLoader.java:248)
     [exec]     ... 38 more
     [exec] * [8/24]    [0/0]     0.838s 0b      linkmap.pdf

> Use the block size key defined by common 
> -----------------------------------------
>
>                 Key: HDFS-1900
>                 URL: https://issues.apache.org/jira/browse/HDFS-1900
>             Project: Hadoop HDFS
>          Issue Type: Improvement
>    Affects Versions: 0.21.1
>            Reporter: Eli Collins
>            Assignee: Abel Perez
>              Labels: newbie
>             Fix For: 0.22.0
>
>         Attachments: HDFS-1900.txt
>
>
> HADOOP-4952 added a dfs.block.size key to common configuration, defined in 
> o.a.h.fs.FsConfig. This conflicts with the original HDFS block size key of 
> the same name, which is now deprecated in favor of dfs.blocksize. It doesn't 
> make sense to have two different keys for the block size (ie they can 
> disagree). Why doesn't HDFS just use the key defined in common?

--
This message is automatically generated by JIRA.
For more information on JIRA, see: http://www.atlassian.com/software/jira

        

Reply via email to