AMBARI-22199.Zeppelin start fails due to permission denied error during 
kinit(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33d67f3e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33d67f3e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33d67f3e

Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 33d67f3ead24fcda4fcdb7d5920ccc76d1743657
Parents: 7e885a3
Author: Venkata Sairam <venkatasairam.la...@gmail.com>
Authored: Wed Oct 11 14:46:39 2017 +0530
Committer: Venkata Sairam <venkatasairam.la...@gmail.com>
Committed: Wed Oct 11 14:46:39 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.7.0/package/scripts/master.py    |  48 ++++---
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    | 140 ++++++++++++++++---
 2 files changed, 147 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/33d67f3e/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index d615d06..09944bd 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -192,8 +192,7 @@ class Master(Script):
       notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \
                            
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
 
-
-    if self.is_path_exists_in_HDFS(notebook_directory, params.zeppelin_user):
+    if not self.is_path_exists_in_HDFS(notebook_directory, 
params.zeppelin_user):
       # hdfs dfs -mkdir {notebook_directory}
       params.HdfsResource(format("{notebook_directory}"),
                           type="directory",
@@ -231,14 +230,14 @@ class Master(Script):
     Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"),
              os.path.join(params.zeppelin_dir, "notebook")), sudo=True)
 
+    if params.security_enabled:
+      zeppelin_kinit_cmd = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
+      Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
+
     if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
         and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
       self.check_and_copy_notebook_in_hdfs(params)
 
-    if params.security_enabled:
-        zeppelin_kinit_cmd = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
-        Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
-
     zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies()
     if zeppelin_spark_dependencies and 
os.path.exists(zeppelin_spark_dependencies[0]):
       self.create_zeppelin_dir(params)
@@ -292,14 +291,17 @@ class Master(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
       stack_select.select_packages(params.version)
 
-  def getZeppelinConfFS(self, params):
-    hdfs_interpreter_config = 
params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'] + 
"/interpreter.json"
+  def get_zeppelin_conf_FS_directory(self, params):
+    hdfs_interpreter_config = 
params.config['configurations']['zeppelin-config']['zeppelin.config.fs.dir']
 
     if not hdfs_interpreter_config.startswith("/"):
       hdfs_interpreter_config = "/user/" + format("{zeppelin_user}") + "/" + 
hdfs_interpreter_config
 
     return hdfs_interpreter_config
 
+  def get_zeppelin_conf_FS(self, params):
+    return self.get_zeppelin_conf_FS_directory(params) + "/interpreter.json"
+
   def is_path_exists_in_HDFS(self, path, as_user):
     kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
     kinit_if_needed = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
@@ -325,17 +327,15 @@ class Master(Script):
       and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
 
       if 'zeppelin.config.fs.dir' in 
params.config['configurations']['zeppelin-config']:
-        zeppelin_conf_fs = self.getZeppelinConfFS(params)
+        zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)
 
         if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
           # copy from hdfs to /etc/zeppelin/conf/interpreter.json
-          params.HdfsResource(interpreter_config,
-                              type="file",
-                              action="download_on_execute",
-                              source=zeppelin_conf_fs,
-                              user=params.zeppelin_user,
-                              group=params.zeppelin_group,
-                              owner=params.zeppelin_user)
+          kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
+          kinit_if_needed = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
+          shell.call(format("rm {interpreter_config};"
+                            "{kinit_if_needed} hdfs --config {hadoop_conf_dir} 
dfs -get {zeppelin_conf_fs} {interpreter_config}"),
+                     user=params.zeppelin_user)
         else:
           Logger.info(format("{zeppelin_conf_fs} does not exist. Skipping 
upload of DFS."))
 
@@ -357,13 +357,23 @@ class Master(Script):
       and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
 
       if 'zeppelin.config.fs.dir' in 
params.config['configurations']['zeppelin-config']:
-        params.HdfsResource(self.getZeppelinConfFS(params),
+        if not 
self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), 
params.zeppelin_user):
+          # hdfs dfs -mkdir {zeppelin's conf directory}
+          params.HdfsResource(self.get_zeppelin_conf_FS_directory(params),
+                              type="directory",
+                              action="create_on_execute",
+                              owner=params.zeppelin_user,
+                              recursive_chown=True,
+                              recursive_chmod=True
+                          )
+
+        params.HdfsResource(self.get_zeppelin_conf_FS(params),
                             type="file",
                             action="create_on_execute",
                             source=interpreter_config,
-                            user=params.zeppelin_user,
-                            group=params.zeppelin_group,
                             owner=params.zeppelin_user,
+                            recursive_chown=True,
+                            recursive_chmod=True,
                             replace_existing_files=True)
 
   def update_kerberos_properties(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/33d67f3e/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py 
b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index 3064880..3adb94c 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -221,6 +221,42 @@ class TestZeppelin070(RMFTestCase):
                                           
'/usr/hdp/current/zeppelin-server/notebook'),
                               sudo=True,
                               )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/notebook',
+                              
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              keytab=UnknownConfigurationMock(),
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              owner='zeppelin',
+                              principal_name=UnknownConfigurationMock(),
+                              recursive_chown=True,
+                              security_enabled=False,
+                              
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              recursive_chmod=True
+                              )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/notebook',
+                              
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
+                              
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              keytab=UnknownConfigurationMock(),
+                              kinit_path_local='/usr/bin/kinit',
+                              user='hdfs',
+                              owner='zeppelin',
+                              principal_name=UnknownConfigurationMock(),
+                              recursive_chown=True,
+                              security_enabled=False,
+                              
source='/usr/hdp/current/zeppelin-server/notebook',
+                              
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              recursive_chmod=True
+                              )
+
     self.assertResourceCalled('HdfsResource', '/user/zeppelin',
                               
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               
default_fs=u'hdfs://c6401.ambari.apache.org:8020',
@@ -312,6 +348,25 @@ class TestZeppelin070(RMFTestCase):
                           )
 
     self.assertResourceCalled('HdfsResource',
+                              '/user/zeppelin/hdfs:///user/zeppelin/conf',
+                              security_enabled=False,
+                              
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
+                              keytab=UnknownConfigurationMock(),
+                              default_fs='hdfs://c6401.ambari.apache.org:8020',
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              principal_name=UnknownConfigurationMock(),
+                              recursive_chown=True,
+                              recursive_chmod=True,
+                              owner='zeppelin',
+                              user='hdfs',
+                              
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              )
+
+    self.assertResourceCalled('HdfsResource',
                               
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
                               
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -322,9 +377,10 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
                               principal_name=UnknownConfigurationMock(),
-                              user='zeppelin',
+                              recursive_chown=True,
+                              recursive_chmod=True,
+                              user='hdfs',
                               owner='zeppelin',
-                              group='zeppelin',
                               replace_existing_files=True,
                               
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='file',
@@ -337,24 +393,45 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource', 
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
-        keytab = UnknownConfigurationMock(),
-        source = '/etc/zeppelin/conf/interpreter.json',
-        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        replace_existing_files = True,
-        hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hdfs_site = {u'a': u'b'},
-        kinit_path_local = '/usr/bin/kinit',
-        principal_name = UnknownConfigurationMock(),
-        user = 'zeppelin',
-        owner = 'zeppelin',
-        group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
-        type = 'file',
-        action = ['create_on_execute'],
-    )
+    self.assertResourceCalled('HdfsResource',
+                              '/user/zeppelin/hdfs:///user/zeppelin/conf',
+                              security_enabled=False,
+                              
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
+                              keytab=UnknownConfigurationMock(),
+                              default_fs='hdfs://c6401.ambari.apache.org:8020',
+                              recursive_chown=True,
+                              recursive_chmod=True,
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              principal_name=UnknownConfigurationMock(),
+                              user='hdfs',
+                              owner='zeppelin',
+                              
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
+                              type='directory',
+                              action=['create_on_execute'],
+                              )
+
+    self.assertResourceCalled('HdfsResource',
+                              
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+                              security_enabled=False,
+                              
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
+                              keytab=UnknownConfigurationMock(),
+                              source='/etc/zeppelin/conf/interpreter.json',
+                              default_fs='hdfs://c6401.ambari.apache.org:8020',
+                              replace_existing_files=True,
+                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site={u'a': u'b'},
+                              kinit_path_local='/usr/bin/kinit',
+                              principal_name=UnknownConfigurationMock(),
+                              recursive_chmod=True,
+                              recursive_chown=True,
+                              user='hdfs',
+                              owner='zeppelin',
+                              
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
+                              type='file',
+                              action=['create_on_execute'],
+                              )
 
     self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
                               
content=interpreter_json_generated.template_after_kerberos,
@@ -362,6 +439,24 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
+    self.assertResourceCalled('HdfsResource', 
'/user/zeppelin/hdfs:///user/zeppelin/conf',
+                              security_enabled = False,
+                              hadoop_bin_dir = 
'/usr/hdp/2.5.0.0-1235/hadoop/bin',
+                              keytab = UnknownConfigurationMock(),
+                              default_fs = 
'hdfs://c6401.ambari.apache.org:8020',
+                              hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+                              hdfs_site = {u'a': u'b'},
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              recursive_chown=True,
+                              recursive_chmod=True,
+                              user = 'hdfs',
+                              owner = 'zeppelin',
+                              hadoop_conf_dir = 
'/usr/hdp/2.5.0.0-1235/hadoop/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'],
+                              )
+
     self.assertResourceCalled('HdfsResource', 
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -373,9 +468,10 @@ class TestZeppelin070(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         replace_existing_files = True,
-        user = 'zeppelin',
+        recursive_chown=True,
+        recursive_chmod=True,
+        user = 'hdfs',
         owner = 'zeppelin',
-        group = 'zeppelin',
         hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'],

Reply via email to