This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a commit to branch DLAB-515
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git


The following commit(s) were added to refs/heads/DLAB-515 by this push:
     new b45fecb  [DLAB-515] JAVA_HOME for Zeppelin/Spark added
b45fecb is described below

commit b45fecb8961edbd8f9e7b2d2d497ada801b13b4d
Author: bodnarmykola <[email protected]>
AuthorDate: Tue Aug 4 09:42:50 2020 +0300

    [DLAB-515] JAVA_HOME for Zeppelin/Spark added
---
 infrastructure-provisioning/src/general/lib/aws/actions_lib.py        | 4 ++++
 infrastructure-provisioning/src/general/lib/azure/actions_lib.py      | 4 ++++
 infrastructure-provisioning/src/general/lib/gcp/actions_lib.py        | 4 ++++
 infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py | 1 +
 .../src/zeppelin/scripts/configure_zeppelin_node.py                   | 2 ++
 5 files changed, 15 insertions(+)

diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index ba21ffd..8b9ffab 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1647,6 +1647,10 @@ def configure_local_spark(jars_dir, templates_dir, 
memory_type='driver'):
             endpoint_url))
         sudo('echo "spark.hadoop.fs.s3a.server-side-encryption-algorithm   
AES256" >> '
              '/tmp/notebook_spark-defaults_local.conf')
+        if not exists('/opt/spark/conf/spark-env.sh'):
+            sudo('mv /opt/spark/conf/spark-env.sh.template 
/opt/spark/conf/spark-env.sh')
+            java_home = run("update-alternatives --query java | grep -o 
\'/.*/java-8.*/jre\'").splitlines()[0]
+            sudo("echo 'export JAVA_HOME=\'{}\'' >> 
/opt/spark/conf/spark-env.sh".format(java_home))
         if os.environ['application'] == 'zeppelin':
             sudo('echo \"spark.jars $(ls -1 ' + jars_dir + '* | tr \'\\n\' 
\',\')\" >> '
                                                            
'/tmp/notebook_spark-defaults_local.conf')
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 8cac3c4..761be9c 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1126,6 +1126,10 @@ def configure_local_spark(jars_dir, templates_dir, 
memory_type='driver'):
             sudo('sed -i "/spark.*.memory/d" 
/opt/spark/conf/spark-defaults.conf')
             sudo('echo "spark.{0}.memory {1}m" >> 
/opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                
               spark_memory))
+        if not exists('/opt/spark/conf/spark-env.sh'):
+            sudo('mv /opt/spark/conf/spark-env.sh.template 
/opt/spark/conf/spark-env.sh')
+            java_home = run("update-alternatives --query java | grep -o 
\'/.*/java-8.*/jre\'").splitlines()[0]
+            sudo("echo 'export JAVA_HOME=\'{}\'' >> 
/opt/spark/conf/spark-env.sh".format(java_home))
         if 'spark_configurations' in os.environ:
             dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | 
grep "^#"')
             spark_configurations = 
ast.literal_eval(os.environ['spark_configurations'])
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index 7310f37..e550d9d 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -1408,6 +1408,10 @@ def configure_local_spark(jars_dir, templates_dir, 
memory_type='driver'):
             sudo('sed -i "/spark.*.memory/d" 
/opt/spark/conf/spark-defaults.conf')
             sudo('echo "spark.{0}.memory {1}m" >> 
/opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                
               spark_memory))
+        if not exists('/opt/spark/conf/spark-env.sh'):
+            sudo('mv /opt/spark/conf/spark-env.sh.template 
/opt/spark/conf/spark-env.sh')
+            java_home = run("update-alternatives --query java | grep -o 
\'/.*/java-8.*/jre\'").splitlines()[0]
+            sudo("echo 'export JAVA_HOME=\'{}\'' >> 
/opt/spark/conf/spark-env.sh".format(java_home))
         if 'spark_configurations' in os.environ:
             dlab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | 
grep "^#"')
             spark_configurations = 
ast.literal_eval(os.environ['spark_configurations'])
diff --git 
a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py 
b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index 6c73b2d..2904d9f 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -138,6 +138,7 @@ def install_rstudio(os_user, local_spark_path, 
rstudio_pass, rstudio_version):
             sudo('touch /home/{}/.Renviron'.format(os_user))
             sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
             sudo('''echo 'SPARK_HOME="{0}"' >> 
/home/{1}/.Renviron'''.format(local_spark_path, os_user))
+            sudo('''echo 'JAVA_HOME="{0}"' >> 
/home/{1}/.Renviron'''.format(java_home, os_user))
             sudo('touch /home/{}/.Rprofile'.format(os_user))
             sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
             sudo('''echo 'library(SparkR, lib.loc = 
c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> 
/home/{}/.Rprofile'''.format(os_user))
diff --git 
a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py 
b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 178f457..1afef2b 100644
--- 
a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ 
b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -85,6 +85,8 @@ def configure_zeppelin(os_user):
             sudo('tar -zxvf /tmp/zeppelin-' + zeppelin_version + 
'-bin-netinst.tgz -C /opt/')
             sudo('ln -s /opt/zeppelin-' + zeppelin_version + '-bin-netinst 
/opt/zeppelin')
             sudo('cp /opt/zeppelin/conf/zeppelin-env.sh.template 
/opt/zeppelin/conf/zeppelin-env.sh')
+            java_home = run("update-alternatives --query java | grep -o 
\'/.*/java-8.*/jre\'").splitlines()[0]
+            sudo("echo 'export JAVA_HOME=\'{}\'' >> 
/opt/zeppelin/conf/zeppelin-env.sh".format(java_home))
             sudo('cp /opt/zeppelin/conf/zeppelin-site.xml.template 
/opt/zeppelin/conf/zeppelin-site.xml')
             sudo('sed -i \"/# export ZEPPELIN_PID_DIR/c\export 
ZEPPELIN_PID_DIR=/var/run/zeppelin\" /opt/zeppelin/conf/zeppelin-env.sh')
             sudo('sed -i \"/# export ZEPPELIN_IDENT_STRING/c\export 
ZEPPELIN_IDENT_STRING=notebook\" /opt/zeppelin/conf/zeppelin-env.sh')


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to