[ 
https://issues.apache.org/jira/browse/BIGTOP-1200?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13898722#comment-13898722
 ] 

jay vyas commented on BIGTOP-1200:
----------------------------------

Thanks roman:  For this artifact language independence is important. 


I think in the BIGTOP-952 jira we can add custom groovy stuff , but this jira 
attempts to create an artifact that both bigtop as well as other non bigtop 
deployments can use...

And other folks who don't use groovy to provision can still use this json 
artifact to provision their  HCFS clusters as well, since json is language 
independant.

Does that make sense?

> Implement Generic Text File to define HCFS filesystem semantics, to be used 
> by all bigtop deployments.
> ------------------------------------------------------------------------------------------------------
>
>                 Key: BIGTOP-1200
>                 URL: https://issues.apache.org/jira/browse/BIGTOP-1200
>             Project: Bigtop
>          Issue Type: Improvement
>            Reporter: jay vyas
>            Assignee: jay vyas
>         Attachments: BIGTOP-1200.1.patch, BIGTOP-1200.patch
>
>
> One of the really useful artifacts in bigtop is the init-hdfs.sh script.  It 
> defines ecosystem semantics and expectations for hadoop clusters. 
> Other HCFS filesystems can leverage the logic in this script quite easily, if 
> we decouple its implementation from being HDFS specific by specifying a 
> "SUPERUSER" parameter to replace "hdfs".
> And yes we can still have the init-hdfs.sh convenience script : which just 
> calls "init-hcfs.sh hdfs" .
> Initial tests in puppet VMs pass.  (attaching patch with this JIRA)
> {noformat}
> [root@vagrant bigtop-puppet]# ./init-hdfs.sh 
> + echo 'Now initializing the Distributed File System with root=HDFS'
> Now initializing the Distributed File System with root=HDFS
> + ./init-hcfs.sh hdfs
> + '[' 1 -ne 1 ']'
> + SUPER_USER=hdfs
> + echo 'Initializing the DFS with super user : hdfs'
> Initializing the DFS with super user : hdfs
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /tmp'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 1777 /tmp'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /var'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /var/log'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 1775 /var/log'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown yarn:mapred /var/log'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /tmp/hadoop-yarn'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown -R mapred:mapred 
> /tmp/hadoop-yarn'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /tmp/hadoop-yarn'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p 
> /var/log/hadoop-yarn/apps'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 1777 
> /var/log/hadoop-yarn/apps'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown yarn:mapred 
> /var/log/hadoop-yarn/apps'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /hbase'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown hbase:hbase /hbase'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /solr'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown solr:solr /solr'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /benchmarks'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /benchmarks'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod 755 /user'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown hdfs  /user'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/history'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown mapred:mapred 
> /user/history'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod 755 /user/history'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/jenkins'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/jenkins'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown jenkins /user/jenkins'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/hive'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/hive'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown hive /user/hive'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/root'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/root'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown root /user/root'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/hue'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/hue'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown hue /user/hue'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/sqoop'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/sqoop'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown sqoop /user/sqoop'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/oozie'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chmod -R 777 /user/oozie'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -chown -R oozie /user/oozie'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/oozie/share'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p /user/oozie/share/lib'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p 
> /user/oozie/share/lib/hive'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p 
> /user/oozie/share/lib/mapreduce-streaming'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p 
> /user/oozie/share/lib/distcp'
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -mkdir -p 
> /user/oozie/share/lib/pig'
> + ls '/usr/lib/hive/lib/*.jar'
> + ls /usr/lib/hadoop-mapreduce/hadoop-streaming-2.0.6-alpha.jar 
> /usr/lib/hadoop-mapreduce/hadoop-streaming.jar
> + su -s /bin/bash hdfs -c '/usr/bin/hadoop fs -put 
> /usr/lib/hadoop-mapreduce/hadoop-streaming*.jar 
> /user/oozie/share/lib/mapreduce-streaming'
> put: 
> `/user/oozie/share/lib/mapreduce-streaming/hadoop-streaming-2.0.6-alpha.jar': 
> File exists
> put: `/user/oozie/share/lib/mapreduce-streaming/hadoop-streaming.jar': File 
> exists
> [root@vagrant bigtop-puppet]# 
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.1.5#6160)

Reply via email to