This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-1408
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 2a476673982675ad11e44959aaa881d274c599d7
Author: owlleg6 <[email protected]>
AuthorDate: Tue May 17 17:50:37 2022 +0300

    added files and func to actions lib
---
 .../src/general/lib/azure/actions_lib.py           | 29 +++++++++
 .../scripts/azure/dataengine-service_configure.py  |  0
 .../scripts/azure/dataengine-service_create.py     |  0
 .../scripts/azure/dataengine-service_prepare.py    | 74 ++++++++++++++++++++++
 4 files changed, 103 insertions(+)

diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py 
b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 59acc388f..f68e2de48 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1494,3 +1494,32 @@ def find_des_jars(all_jars, des_path):
     except Exception as err:
         print('Error:', str(err))
         sys.exit(1)
+
+def create_hdinsight_cluster(resource_group_name, instance_name, 
cluster_parameters):
+    try:
+        client_1 = AzureActions.__init__().hdinsight_client
+        print('Starting to create HDInsight Spark cluster 
{}'.format('hdinsight'))
+        result = client_1.clusters.begin_create(resource_group_name, 
instance_name, cluster_parameters)
+
+        return result
+    except Exception as err:
+        logging.info(
+            "Unable to create HDInsight Spark cluster: " + str(err) + "\n 
Traceback: " + traceback.print_exc(file=sys.stdout))
+        append_result(str({"error": "Unable to create HDInsight Spark cluster",
+                           "error_message": str(err) + "\n Traceback: " + 
traceback.print_exc(
+                               file=sys.stdout)}))
+        traceback.print_exc(file=sys.stdout)
+
+
+def terminate_hdinsight_cluster(resource_group_name, instance_name, 
cluster_parameters):
+    try:
+        client_1 = AzureActions.__init__().hdinsight_client
+        client_1.clusters.begin_delete(resource_group_name, instance_name, 
cluster_parameters)
+
+    except Exception as err:
+        logging.info(
+            "Unable to delete HDInsight Spark cluster: " + str(err) + "\n 
Traceback: " + traceback.print_exc(file=sys.stdout))
+        append_result(str({"error": "Unable to create HDInsight Spark cluster",
+                           "error_message": str(err) + "\n Traceback: " + 
traceback.print_exc(
+                               file=sys.stdout)}))
+        traceback.print_exc(file=sys.stdout)
\ No newline at end of file
diff --git 
a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_configure.py
 
b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_configure.py
new file mode 100644
index 000000000..e69de29bb
diff --git 
a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_create.py
 
b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_create.py
new file mode 100644
index 000000000..e69de29bb
diff --git 
a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
 
b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
new file mode 100644
index 000000000..87ac3ab88
--- /dev/null
+++ 
b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_prepare.py
@@ -0,0 +1,74 @@
+import datalab.actions_lib
+import datalab.fab
+import datalab.meta_lib
+import json
+from datalab.logger import logging
+import multiprocessing
+import os
+import sys
+import traceback
+import subprocess
+from Crypto.PublicKey import RSA
+from fabric import *
+from azure.mgmt.hdinsight.models import *
+
+
+
+def create_cluster_parameters():
+    # Returns cluster parameters
+
+    return ClusterCreateParametersExtended(
+        location=LOCATION,
+        tags={},
+        properties=ClusterCreateProperties(
+            cluster_version="4.0",
+            os_type=OSType.linux,
+            tier=Tier.standard,
+            cluster_definition=ClusterDefinition(
+                kind="Spark",
+                configurations={
+                    "gateway": {
+                        "restAuthCredential.isEnabled": "true",
+                        "restAuthCredential.username": CLUSTER_LOGIN_USER_NAME,
+                        "restAuthCredential.password": PASSWORD
+                    }
+                }
+            ),
+            compute_profile=ComputeProfile(
+                roles=[
+                    Role(
+                        name="headnode",
+                        target_instance_count=2,
+                        
hardware_profile=HardwareProfile(vm_size="Standard_E8_V3"),
+                        os_profile=OsProfile(
+                            
linux_operating_system_profile=LinuxOperatingSystemProfile(
+                                username=SSH_USER_NAME,
+                                password=PASSWORD
+                            )
+                        )
+                    ),
+                    Role(
+                        name="workernode",
+                        target_instance_count=2,
+                        
hardware_profile=HardwareProfile(vm_size="Standard_E8_V3"),
+                        os_profile=OsProfile(
+                            
linux_operating_system_profile=LinuxOperatingSystemProfile(
+                                username=SSH_USER_NAME,
+                                password=PASSWORD
+                            )
+                        )
+                    )
+                ]
+            ),
+            storage_profile=StorageProfile(
+                storageaccounts=[
+                    StorageAccount(
+                        name=STORAGE_ACCOUNT_NAME + BLOB_ENDPOINT_SUFFIX,
+                        key=STORAGE_ACCOUNT_KEY,
+                        container=CONTAINER_NAME.lower(),
+                        is_default=True
+                    )
+                ]
+            )
+        )
+    ) 
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to