Repository: ambari
Updated Branches:
  refs/heads/trunk 3c3b1b810 -> be605cbfd


AMBARI-22212.Alter logic for storing interpreter.json(Prabhjyot Singh via 
Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be605cbf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be605cbf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be605cbf

Branch: refs/heads/trunk
Commit: be605cbfdc977ff927df89fc772b8fbb1f589fce
Parents: 3c3b1b8
Author: Venkata Sairam <venkatasairam.la...@gmail.com>
Authored: Thu Oct 12 17:17:07 2017 +0530
Committer: Venkata Sairam <venkatasairam.la...@gmail.com>
Committed: Thu Oct 12 17:17:07 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.7.0/package/scripts/master.py    | 69 +++++++++-----------
 .../ZEPPELIN/0.7.0/package/scripts/params.py    |  5 ++
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    | 52 +++------------
 3 files changed, 44 insertions(+), 82 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index eaa2cb9..a8b1b32 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -242,6 +242,17 @@ class Master(Script):
     if zeppelin_spark_dependencies and 
os.path.exists(zeppelin_spark_dependencies[0]):
       self.create_zeppelin_dir(params)
 
+    if params.conf_stored_in_hdfs:
+      if not 
self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), 
params.zeppelin_user):
+        # hdfs dfs -mkdir {zeppelin's conf directory}
+        params.HdfsResource(self.get_zeppelin_conf_FS_directory(params),
+                            type="directory",
+                            action="create_on_execute",
+                            owner=params.zeppelin_user,
+                            recursive_chown=True,
+                            recursive_chmod=True
+                            )
+
     # if first_setup:
     if not glob.glob(params.conf_dir + "/interpreter.json") and \
       not os.path.exists(params.conf_dir + "/interpreter.json"):
@@ -323,21 +334,16 @@ class Master(Script):
     import json
 
     interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
-    if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
-      and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
-
-      if 'zeppelin.config.fs.dir' in 
params.config['configurations']['zeppelin-config']:
-        zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)
-
-        if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
-          # copy from hdfs to /etc/zeppelin/conf/interpreter.json
-          kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
-          kinit_if_needed = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
-          shell.call(format("rm {interpreter_config};"
-                            "{kinit_if_needed} hdfs --config {hadoop_conf_dir} 
dfs -get {zeppelin_conf_fs} {interpreter_config}"),
-                     user=params.zeppelin_user)
-        else:
-          Logger.info(format("{zeppelin_conf_fs} does not exist. Skipping 
upload of DFS."))
+    if params.conf_stored_in_hdfs:
+      zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)
+
+      if self.is_path_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
+        # copy from hdfs to /etc/zeppelin/conf/interpreter.json
+        kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
+        kinit_if_needed = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
+        shell.call(format("rm {interpreter_config};"
+                          "{kinit_if_needed} hdfs --config {hadoop_conf_dir} 
dfs -get {zeppelin_conf_fs} {interpreter_config}"),
+                   user=params.zeppelin_user)
 
     config_content = sudo.read_file(interpreter_config)
     config_data = json.loads(config_content)
@@ -353,28 +359,15 @@ class Master(Script):
          owner=params.zeppelin_user,
          content=json.dumps(config_data, indent=2))
 
-    if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
-      and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
-
-      if 'zeppelin.config.fs.dir' in 
params.config['configurations']['zeppelin-config']:
-        if not 
self.is_path_exists_in_HDFS(self.get_zeppelin_conf_FS_directory(params), 
params.zeppelin_user):
-          # hdfs dfs -mkdir {zeppelin's conf directory}
-          params.HdfsResource(self.get_zeppelin_conf_FS_directory(params),
-                              type="directory",
-                              action="create_on_execute",
-                              owner=params.zeppelin_user,
-                              recursive_chown=True,
-                              recursive_chmod=True
-                          )
-
-        params.HdfsResource(self.get_zeppelin_conf_FS(params),
-                            type="file",
-                            action="create_on_execute",
-                            source=interpreter_config,
-                            owner=params.zeppelin_user,
-                            recursive_chown=True,
-                            recursive_chmod=True,
-                            replace_existing_files=True)
+    if params.conf_stored_in_hdfs:
+      params.HdfsResource(self.get_zeppelin_conf_FS(params),
+                          type="file",
+                          action="create_on_execute",
+                          source=interpreter_config,
+                          owner=params.zeppelin_user,
+                          recursive_chown=True,
+                          recursive_chmod=True,
+                          replace_existing_files=True)
 
   def update_kerberos_properties(self):
     import params
@@ -451,7 +444,7 @@ class Master(Script):
     hive_interactive_properties_key = 'hive_interactive'
     for setting_key in interpreter_settings.keys():
       interpreter = interpreter_settings[setting_key]
-      if interpreter['group'] == 'jdbc':
+      if interpreter['group'] == 'jdbc' and interpreter['name'] == 'jdbc':
         interpreter['dependencies'] = []
 
         if not params.hive_server_host and 
params.hive_server_interactive_hosts:

http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index b7d3df0..e69037c 100644
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -104,6 +104,11 @@ conf_dir = "/etc/zeppelin/conf"
 external_dependency_conf = "/etc/zeppelin/conf/external-dependency-conf"
 notebook_dir = os.path.join(*[install_dir, zeppelin_dirname, 'notebook'])
 
+conf_stored_in_hdfs = False
+if 'zeppelin.config.fs.dir' in config['configurations']['zeppelin-config'] and 
\
+  not 
config['configurations']['zeppelin-config']['zeppelin.config.fs.dir'].startswith('file://'):
+  conf_stored_in_hdfs = True
+
 # zeppelin-env.sh
 zeppelin_env_content = 
config['configurations']['zeppelin-env']['zeppelin_env_content']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/be605cbf/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py 
b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index 3adb94c..e5d0240 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -341,12 +341,6 @@ class TestZeppelin070(RMFTestCase):
                               security_enabled=False,
                               )
 
-    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
-                          
content=interpreter_json_generated.template_after_base,
-                          owner='zeppelin',
-                          group='zeppelin',
-                          )
-
     self.assertResourceCalled('HdfsResource',
                               '/user/zeppelin/hdfs:///user/zeppelin/conf',
                               security_enabled=False,
@@ -359,13 +353,20 @@ class TestZeppelin070(RMFTestCase):
                               principal_name=UnknownConfigurationMock(),
                               recursive_chown=True,
                               recursive_chmod=True,
-                              owner='zeppelin',
                               user='hdfs',
+                              owner='zeppelin',
                               
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='directory',
                               action=['create_on_execute'],
                               )
 
+    self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
+                          
content=interpreter_json_generated.template_after_base,
+                          owner='zeppelin',
+                          group='zeppelin',
+                          )
+
+
     self.assertResourceCalled('HdfsResource',
                               
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
@@ -394,25 +395,6 @@ class TestZeppelin070(RMFTestCase):
                               )
 
     self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf',
-                              security_enabled=False,
-                              
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
-                              keytab=UnknownConfigurationMock(),
-                              default_fs='hdfs://c6401.ambari.apache.org:8020',
-                              recursive_chown=True,
-                              recursive_chmod=True,
-                              
hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              principal_name=UnknownConfigurationMock(),
-                              user='hdfs',
-                              owner='zeppelin',
-                              
hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
-                              type='directory',
-                              action=['create_on_execute'],
-                              )
-
-    self.assertResourceCalled('HdfsResource',
                               
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
                               security_enabled=False,
                               
hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
@@ -439,24 +421,6 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource', 
'/user/zeppelin/hdfs:///user/zeppelin/conf',
-                              security_enabled = False,
-                              hadoop_bin_dir = 
'/usr/hdp/2.5.0.0-1235/hadoop/bin',
-                              keytab = UnknownConfigurationMock(),
-                              default_fs = 
'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file = 
'/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site = {u'a': u'b'},
-                              kinit_path_local = '/usr/bin/kinit',
-                              principal_name = UnknownConfigurationMock(),
-                              recursive_chown=True,
-                              recursive_chmod=True,
-                              user = 'hdfs',
-                              owner = 'zeppelin',
-                              hadoop_conf_dir = 
'/usr/hdp/2.5.0.0-1235/hadoop/conf',
-                              type = 'directory',
-                              action = ['create_on_execute'],
-                              )
-
     self.assertResourceCalled('HdfsResource', 
'/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
         hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',

Reply via email to