This is an automated email from the ASF dual-hosted git repository.

yasith pushed a commit to branch resource-mgmt-rest-api
in repository https://gitbox.apache.org/repos/asf/airavata.git

commit 03162e622699252826495b99824ef0e4c9ce2ade
Author: yasithdev <[email protected]>
AuthorDate: Wed Oct 29 16:17:10 2025 -0500

    update sdk to handle input storageId and output storageId. use input 
storageId as default value for output storageId
---
 .../airavata_experiments/airavata.py               | 34 +++++++++-----
 .../airavata_experiments/md/applications.py        | 53 ++++++++++++++++++++++
 .../clients/utils/data_model_creation_util.py      |  6 ++-
 .../clients/utils/experiment_handler_util.py       | 34 ++++++++------
 .../samples/create_launch_echo_experiment.py       |  3 +-
 .../samples/create_launch_gaussian_experiment.py   |  3 +-
 6 files changed, 104 insertions(+), 29 deletions(-)

diff --git a/dev-tools/airavata-python-sdk/airavata_experiments/airavata.py 
b/dev-tools/airavata-python-sdk/airavata_experiments/airavata.py
index f5ca757de1..0048dfc898 100644
--- a/dev-tools/airavata-python-sdk/airavata_experiments/airavata.py
+++ b/dev-tools/airavata-python-sdk/airavata_experiments/airavata.py
@@ -112,7 +112,8 @@ class AiravataOperator:
       experiment_model: ExperimentModel,
       computation_resource_name: str,
       group: str,
-      storageId: str,
+      inputStorageId: str,
+      outputStorageId: str,
       node_count: int,
       total_cpu_count: int,
       queue_name: str,
@@ -133,7 +134,8 @@ class AiravataOperator:
         userConfigData.computationalResourceScheduling = computRes
 
         userConfigData.groupResourceProfileId = groupResourceProfileId
-        userConfigData.storageId = storageId
+        userConfigData.storageId = inputStorageId
+        userConfigData.outputStorageResourceId = outputStorageId
 
         userConfigData.experimentDataDir = experiment_dir_path
         userConfigData.airavataAutoSchedule = auto_schedule
@@ -535,7 +537,8 @@ class AiravataOperator:
       group: str = "Default",
       *,
       gateway_id: str | None = None,
-      sr_host: str | None = None,
+      input_sr_host: str | None = None,
+      output_sr_host: str | None = None,
       auto_schedule: bool = False,
   ) -> LaunchState:
     """
@@ -545,7 +548,8 @@ class AiravataOperator:
     # preprocess args (str)
     print("[AV] Preprocessing args...")
     gateway_id = str(gateway_id or self.default_gateway_id())
-    sr_host = str(sr_host or self.default_sr_hostname())
+    input_sr_host = str(input_sr_host or self.default_sr_hostname())
+    output_sr_host = str(output_sr_host or input_sr_host or 
self.default_sr_hostname())
     mount_point = Path(self.default_gateway_data_store_dir()) / self.user_id
     server_url = urlparse(self.connection_svc_url()).netloc
 
@@ -558,7 +562,8 @@ class AiravataOperator:
     assert len(gateway_id) > 0, f"Invalid gateway_id: {gateway_id}"
     assert len(queue_name) > 0, f"Invalid queue_name: {queue_name}"
     assert len(group) > 0, f"Invalid group name: {group}"
-    assert len(sr_host) > 0, f"Invalid sr_host: {sr_host}"
+    assert len(input_sr_host) > 0, f"Invalid input_sr_host: {input_sr_host}"
+    assert len(output_sr_host) > 0, f"Invalid output_sr_host: {output_sr_host}"
     assert len(project) > 0, f"Invalid project_name: {project}"
     assert len(mount_point.as_posix()) > 0, f"Invalid mount_point: 
{mount_point}"
 
@@ -585,10 +590,14 @@ class AiravataOperator:
     data_inputs.update({"agent_id": data_inputs.get("agent_id", 
str(uuid.uuid4()))})
     data_inputs.update({"server_url": server_url})
 
-    # setup runtime params
-    print("[AV] Setting up runtime params...")
-    storage = self.get_storage(sr_host)
-    sr_id = storage.storageResourceId
+    # setup storage
+    print("[AV] Setting up storage...")
+    input_storage = self.get_storage(input_sr_host)
+    output_storage = self.get_storage(output_sr_host)
+    assert input_storage is not None, f"Invalid input_storage: {input_storage}"
+    assert output_storage is not None, f"Invalid output_storage: 
{output_storage}"
+    input_sr_id = input_storage.storageResourceId
+    output_sr_id = output_storage.storageResourceId
 
     # setup application interface
     print("[AV] Setting up application interface...")
@@ -607,7 +616,7 @@ class AiravataOperator:
     # setup experiment directory
     print("[AV] Setting up experiment directory...")
     exp_dir = self.make_experiment_dir(
-        sr_host=storage.hostName,
+        sr_host=input_storage.hostName,
         project_name=project,
         experiment_name=experiment_name,
     )
@@ -620,7 +629,8 @@ class AiravataOperator:
         experiment_model=experiment,
         computation_resource_name=computation_resource_name,
         group=group,
-        storageId=sr_id,
+        inputStorageId=input_sr_id,
+        outputStorageId=output_sr_id,
         node_count=node_count,
         total_cpu_count=cpu_count,
         wall_time_limit=walltime,
@@ -630,7 +640,7 @@ class AiravataOperator:
     )
 
     def register_input_file(file: Path) -> str:
-      return str(self.register_input_file(file.name, sr_host, sr_id, 
gateway_id, file.name, abs_path))
+      return str(self.register_input_file(file.name, input_sr_host, 
input_sr_id, gateway_id, file.name, abs_path))
     
     # set up experiment inputs
     print("[AV] Setting up experiment inputs...")
diff --git 
a/dev-tools/airavata-python-sdk/airavata_experiments/md/applications.py 
b/dev-tools/airavata-python-sdk/airavata_experiments/md/applications.py
index 19e67d6ea0..76bc11b179 100644
--- a/dev-tools/airavata-python-sdk/airavata_experiments/md/applications.py
+++ b/dev-tools/airavata-python-sdk/airavata_experiments/md/applications.py
@@ -154,6 +154,59 @@ class AlphaFold2(ExperimentApp):
     obj.tasks = []
     return obj
 
+class VizFold_MSA(ExperimentApp):
+  """
+  VizFold lets you compute the 3D structure of a protein (using OpenFold),
+  and visualize its residue-to-residue attention scores using arc diagrams.
+  """
+
+  def __init__(
+      self,
+  ) -> None:
+    super().__init__(app_id="VizFold-MSA")
+
+  @classmethod
+  def initialize(
+      cls,
+      name: str,
+      protein: str,
+  ) -> Experiment[ExperimentApp]:
+    app = cls()
+    obj = Experiment[ExperimentApp](name, app).with_inputs(
+        protein=protein,
+    )
+    obj.input_mapping = {
+        "Protein": ("protein", "str"),
+    }
+    obj.tasks = []
+    return obj
+
+class VizFold_Fold(ExperimentApp):
+  """
+  VizFold lets you compute the 3D structure of a protein (using OpenFold),
+  and visualize its residue-to-residue attention scores using arc diagrams.
+  """
+
+  def __init__(
+      self,
+  ) -> None:
+    super().__init__(app_id="VizFold-Fold")
+
+  @classmethod
+  def initialize(
+      cls,
+      name: str,
+      protein: str,
+  ) -> Experiment[ExperimentApp]:
+    app = cls()
+    obj = Experiment[ExperimentApp](name, app).with_inputs(
+        protein=protein,
+    )
+    obj.input_mapping = {
+        "Protein": ("protein", "str"),
+    }
+    obj.tasks = []
+    return obj
 
 class AMBER(ExperimentApp):
   """
diff --git 
a/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/data_model_creation_util.py
 
b/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/data_model_creation_util.py
index 65dfa7df07..bf93f88db2 100644
--- 
a/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/data_model_creation_util.py
+++ 
b/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/data_model_creation_util.py
@@ -76,7 +76,8 @@ class DataModelCreationUtil(object):
             experiment_model: ExperimentModel,
             computation_resource_name: str,
             group_resource_profile_name: str,
-            storageId: str,
+            inputStorageId: str,
+            outputStorageId: str,
             node_count: int,
             total_cpu_count: int,
             queue_name: str,
@@ -97,7 +98,8 @@ class DataModelCreationUtil(object):
         userConfigData.computationalResourceScheduling = computRes
 
         userConfigData.groupResourceProfileId = groupResourceProfileId
-        userConfigData.storageId = storageId
+        userConfigData.storageId = inputStorageId
+        userConfigData.outputStorageResourceId = outputStorageId
 
         userConfigData.experimentDataDir = experiment_dir_path
         userConfigData.airavataAutoSchedule = auto_schedule
diff --git 
a/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/experiment_handler_util.py
 
b/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/experiment_handler_util.py
index 442d3e90ad..9557da3db8 100644
--- 
a/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/experiment_handler_util.py
+++ 
b/dev-tools/airavata-python-sdk/airavata_sdk/clients/utils/experiment_handler_util.py
@@ -83,6 +83,7 @@ class ExperimentHandlerUtil(object):
         group_name: str = "Default",
         application_name: str = "Default Application",
         project_name: str = "Default Project",
+        output_storage_host: str | None = None,
     ):
         execution_id = self.airavata_util.get_execution_id(application_name)
         assert execution_id is not None
@@ -91,14 +92,20 @@ class ExperimentHandlerUtil(object):
         resource_host_id = 
self.airavata_util.get_resource_host_id(computation_resource_name)
         group_resource_profile_id = 
self.airavata_util.get_group_resource_profile_id(group_name)
 
-        storage_host = self.settings.STORAGE_RESOURCE_HOST
-        assert storage_host is not None
+        input_storage_host = self.settings.STORAGE_RESOURCE_HOST
+        assert input_storage_host is not None
 
         sftp_port = self.settings.SFTP_PORT
         assert sftp_port is not None
 
-        storage_id = self.airavata_util.get_storage_resource_id(storage_host)
-        assert storage_id is not None
+        input_storage_id = 
self.airavata_util.get_storage_resource_id(input_storage_host)
+        assert input_storage_id is not None
+
+        if output_storage_host is not None:
+            output_storage_id = 
self.airavata_util.get_storage_resource_id(output_storage_host)
+        else:
+            output_storage_id = input_storage_id
+        assert output_storage_id is not None
 
         assert project_name is not None
         assert application_name is not None
@@ -112,8 +119,8 @@ class ExperimentHandlerUtil(object):
             description=description,
         )
 
-        logger.info("connnecting to file upload endpoint %s : %s", 
storage_host, sftp_port)
-        sftp_connector = SFTPConnector(host=storage_host,
+        logger.info("connnecting to file upload endpoint %s : %s", 
input_storage_host, sftp_port)
+        sftp_connector = SFTPConnector(host=input_storage_host,
                                        port=sftp_port,
                                        username=self.user_id,
                                        password=self.access_token)
@@ -136,7 +143,8 @@ class ExperimentHandlerUtil(object):
         experiment = 
self.data_model_client.configure_computation_resource_scheduling(experiment_model=experiment,
                                                                                
       computation_resource_name=computation_resource_name,
                                                                                
       group_resource_profile_name=group_name,
-                                                                               
       storageId=storage_id,
+                                                                               
       inputStorageId=input_storage_id,
+                                                                               
       outputStorageId=output_storage_id,
                                                                                
       node_count=int(node_count),
                                                                                
       total_cpu_count=int(cpu_count),
                                                                                
       wall_time_limit=int(walltime),
@@ -151,8 +159,8 @@ class ExperimentHandlerUtil(object):
                     data_uris = []
                     for x in input_file_mapping[key]:
                         data_uri = 
self.data_model_client.register_input_file(file_identifier=x,
-                                                                              
storage_name=storage_host,
-                                                                              
storageId=storage_id,
+                                                                              
storage_name=input_storage_host,
+                                                                              
storageId=input_storage_id,
                                                                               
input_file_name=x,
                                                                               
uploaded_storage_path=path)
                         data_uris.append(data_uri)
@@ -160,8 +168,8 @@ class ExperimentHandlerUtil(object):
                 else:
                     x = input_file_mapping[key]
                     data_uri = 
self.data_model_client.register_input_file(file_identifier=x,
-                                                                          
storage_name=storage_host,
-                                                                          
storageId=storage_id,
+                                                                          
storage_name=input_storage_host,
+                                                                          
storageId=input_storage_id,
                                                                           
input_file_name=x,
                                                                           
uploaded_storage_path=path)
                     new_file_mapping[key] = data_uri
@@ -177,8 +185,8 @@ class ExperimentHandlerUtil(object):
                 data_uris = []
                 for x in input_files:
                     data_uri = 
self.data_model_client.register_input_file(file_identifier=x,
-                                                                          
storage_name=storage_host,
-                                                                          
storageId=storage_id,
+                                                                          
storage_name=input_storage_host,
+                                                                          
storageId=input_storage_id,
                                                                           
input_file_name=x,
                                                                           
uploaded_storage_path=path)
                     data_uris.append(data_uri)
diff --git 
a/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_echo_experiment.py
 
b/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_echo_experiment.py
index 53801a74c9..a85a1d8c85 100644
--- 
a/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_echo_experiment.py
+++ 
b/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_echo_experiment.py
@@ -71,7 +71,8 @@ path = Settings().GATEWAY_DATA_STORE_DIR + path_suffix
 experiment = 
data_model_client.configure_computation_resource_scheduling(experiment_model=experiment,
                                                                          
computation_resource_name="karst.uits.iu.edu",
                                                                          
group_resource_profile_name="Default Gateway Profile",
-                                                                         
storageId="pgadev.scigap.org",
+                                                                         
inputStorageId="pgadev.scigap.org",
+                                                                         
outputStorageId="pgadev.scigap.org",
                                                                          
node_count=1,
                                                                          
total_cpu_count=16,
                                                                          
wall_time_limit=15,
diff --git 
a/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_gaussian_experiment.py
 
b/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_gaussian_experiment.py
index 92b7c3b2e8..4640aed630 100644
--- 
a/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_gaussian_experiment.py
+++ 
b/dev-tools/airavata-python-sdk/airavata_sdk/samples/create_launch_gaussian_experiment.py
@@ -82,7 +82,8 @@ path = fb.upload_files(api_server_client, 
credential_store_client, token, gatewa
 experiment = 
data_model_client.configure_computation_resource_scheduling(experiment_model=experiment,
                                                                          
computation_resource_name="karst.uits.iu.edu",
                                                                          
group_resource_profile_name="Default Gateway Profile",
-                                                                         
storageId="pgadev.scigap.org",
+                                                                         
inputStorageId="pgadev.scigap.org",
+                                                                         
outputStorageId="pgadev.scigap.org",
                                                                          
node_count=1,
                                                                          
total_cpu_count=16,
                                                                          
wall_time_limit=15,

Reply via email to