This is an automated email from the ASF dual-hosted git repository.
mykolabodnar pushed a commit to branch DLAB-515
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git
The following commit(s) were added to refs/heads/DLAB-515 by this push:
new e5540a2 [DLAB-515] Dataengine kernels removing fixed
e5540a2 is described below
commit e5540a2aac37b33a6fd631486240d81f6479e303
Author: bodnarmykola <[email protected]>
AuthorDate: Tue Jul 28 21:59:25 2020 +0300
[DLAB-515] Dataengine kernels removing fixed
---
.../scripts/aws/jupyter_dataengine-service_create_configs.py | 6 +++---
.../src/general/scripts/os/jupyter_dataengine_create_configs.py | 6 +++---
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git
a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index 456f9a5..1bb09b3 100644
---
a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++
b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -184,11 +184,11 @@ def install_sparkamagic_kernels(args):
local('sed -i \'s|SparkR|{0}|g\'
/home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
sparkr_kernel_name, args.os_user))
local('sudo mv -f /home/{0}/.local/share/jupyter/kernels/pysparkkernel
'
-
'/home/{0}/.local/share/jupyter/kernels/pysparkkernel-{1}'.format(args.os_user,
args.cluster_name))
+
'/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user,
args.cluster_name))
local('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkkernel '
-
'/home/{0}/.local/share/jupyter/kernels/sparkkernel-{1}'.format(args.os_user,
args.cluster_name))
+
'/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user,
args.cluster_name))
local('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkrkernel '
-
'/home/{0}/.local/share/jupyter/kernels/sparkrkernel-{1}'.format(args.os_user,
args.cluster_name))
+
'/home/{0}/.local/share/jupyter/kernels/sparkrkernel_{1}'.format(args.os_user,
args.cluster_name))
local('mkdir -p /home/' + args.os_user + '/.sparkmagic')
local('cp -f /tmp/sparkmagic_config_template.json /home/' +
args.os_user + '/.sparkmagic/config.json')
local('sed -i \'s|LIVY_HOST|{0}|g\'
/home/{1}/.sparkmagic/config.json'.format(
diff --git
a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
index b7db038..cf0639c 100644
---
a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
+++
b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
@@ -176,11 +176,11 @@ def install_sparkamagic_kernels(args):
local('sed -i \'s|SparkR|{0}|g\'
/home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
sparkr_kernel_name, args.os_user))
local('sudo mv -f /home/{0}/.local/share/jupyter/kernels/pysparkkernel
'
-
'/home/{0}/.local/share/jupyter/kernels/pysparkkernel-{1}'.format(args.os_user,
args.cluster_name))
+
'/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user,
args.cluster_name))
local('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkkernel '
-
'/home/{0}/.local/share/jupyter/kernels/sparkkernel-{1}'.format(args.os_user,
args.cluster_name))
+
'/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user,
args.cluster_name))
local('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkrkernel '
-
'/home/{0}/.local/share/jupyter/kernels/sparkrkernel-{1}'.format(args.os_user,
args.cluster_name))
+
'/home/{0}/.local/share/jupyter/kernels/sparkrkernel_{1}'.format(args.os_user,
args.cluster_name))
local('mkdir -p /home/' + args.os_user + '/.sparkmagic')
local('cp -f /tmp/sparkmagic_config_template.json /home/' +
args.os_user + '/.sparkmagic/config.json')
spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]