This is an automated email from the ASF dual-hosted git repository.

isjarana pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airavata-cerebrum.git


The following commit(s) were added to refs/heads/main by this push:
     new 67d4ff0  merge previous repo
67d4ff0 is described below

commit 67d4ff0ec3434822ba9a4ae29b7f982c77d84183
Author: Isuru Ranawaka <[email protected]>
AuthorDate: Sun Dec 3 11:12:41 2023 -0500

    merge previous repo
---
 .gitignore                                         | 160 ++++++
 LICENSE                                            | 201 ++++++++
 cybershuttle/__init__.py                           |   0
 cybershuttle/annotations/__init__.py               |   0
 cybershuttle/annotations/decorator.py              | 138 +++++
 requirements.txt                                   |   2 +
 resources/BMTK_v1_demo/BMTK_V1.ipynb               | 561 +++++++++++++++++++++
 .../Computing_Platform-analyis.drawio.png          | Bin 0 -> 398056 bytes
 resources/BMTK_v1_demo/model_simulation.png        | Bin 0 -> 63138 bytes
 resources/BMTK_v1_demo/neuro-apis.drawio.png       | Bin 0 -> 61769 bytes
 resources/BMTK_v1_demo/requirements.txt            |  49 ++
 resources/BMTK_v1_demo/settings.ini                |  44 ++
 resources/BMTK_v1_demo/workflow.drawio.png         | Bin 0 -> 32846 bytes
 setup.cfg                                          |   0
 setup.py                                           |  29 ++
 15 files changed, 1184 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..68bc17f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,160 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+#   For a library or package, you might want to ignore these files since the 
code is
+#   intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in 
version control.
+#   However, in case of collaboration, if having platform-specific 
dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that 
don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+#   Similar to Pipfile.lock, it is generally recommended to include 
poetry.lock in version control.
+#   This is especially recommended for binary packages to ensure 
reproducibility, and is more
+#   commonly ignored for libraries.
+#   
https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock 
in version control.
+#pdm.lock
+#   pdm stores project-wide configurations in .pdm.toml, but it is recommended 
to not include it
+#   in version control.
+#   https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and 
github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+#  JetBrains specific template is maintained in a separate JetBrains.gitignore 
that can
+#  be found at 
https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+#  and can be added to the global gitignore or merged into this file.  For a 
more nuclear
+#  option (not recommended) you can uncomment the following to ignore the 
entire idea folder.
+#.idea/
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..261eeb9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/cybershuttle/__init__.py b/cybershuttle/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cybershuttle/annotations/__init__.py 
b/cybershuttle/annotations/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cybershuttle/annotations/decorator.py 
b/cybershuttle/annotations/decorator.py
new file mode 100644
index 0000000..4c398d5
--- /dev/null
+++ b/cybershuttle/annotations/decorator.py
@@ -0,0 +1,138 @@
+import zipfile
+import os
+import random
+import string
+import shutil
+
+from airavata_sdk.clients.utils.experiment_handler_util import 
ExperimentHandlerUtil
+from ipywidgets import widgets
+from IPython.display import display
+
+
+class CybershuttleHPCRun(object):
+
+    def __init__(self, output='./'):
+        configFile = "./settings.ini"
+        self.experiment_handler = ExperimentHandlerUtil(configFile)
+        self.output = output
+        self.input_path = ""
+        self.selected_compute_resource_name = ""
+        self.selected_queue_name = ""
+
+    def __call__(self, func):
+        def wrapper(*args, **kwargs):
+            # Capture the decorator arguments and function arguments here
+
+            self.input_path = kwargs['input']
+
+            # Call the decorated function
+            print("Executing user provided function...")
+            func(*args, **kwargs)
+            print("Function execution succeeded...")
+            self._select_compute_resources()
+
+        return wrapper
+
+    def _run_on_hpc(self, local_input_path, compute_resource_name, queue_name, 
output_path):
+
+        allen_v1 = "allen_v1_" + self._generate_random_string(4)
+
+        print("Experiment name: ", allen_v1)
+        print("local_data_directory: ", local_input_path)
+        print("output_data_directory: ", output_path)
+        print("scheduled compute resource: ", compute_resource_name)
+        print("scheduled queue: ", queue_name)
+
+        file_mapping = self._create_BMTK_file_mapping(local_input_path)
+
+        print("Creating experiment ....")
+        self.experiment_handler.launch_experiment(experiment_name=allen_v1,
+                                                  
description='allen_v1_experminent',
+                                                  
local_input_path=local_input_path,
+                                                  
input_file_mapping=file_mapping,
+                                                  
computation_resource_name=compute_resource_name,
+                                                  queue_name=queue_name, 
output_path=output_path)
+
+        zip_path = local_input_path + "/all_input.zip"
+        self._unzip_and_delete(zip_path)
+
+    def _create_BMTK_file_mapping(self, input_folder_path):
+        file_mapping = {}
+        self._create_zip(input_folder_path, "all_input.zip")
+        input_file_list = self._get_file_list(input_folder_path)
+        for x in input_file_list:
+            if x.endswith(".zip"):
+                file_mapping['Network Inputs'] = x
+            elif x.endswith(".json"):
+                file_mapping['Simulation Config File'] = x
+        return file_mapping
+
+    def _create_zip(self, folder_path, zip_file_name):
+        with zipfile.ZipFile(zip_file_name, 'w', zipfile.ZIP_DEFLATED) as zipf:
+            for root, _, files in os.walk(folder_path):
+                for file in files:
+                    file_path = os.path.join(root, file)
+                    zipf.write(file_path, arcname=os.path.relpath(file_path, 
folder_path))
+
+        # Delete subfolders
+        for root, dirs, _ in os.walk(folder_path, topdown=False):
+            for dir in dirs:
+                dir_path = os.path.join(root, dir)
+                shutil.rmtree(dir_path)
+
+        # Move zip file to root folder
+        zip_file_path = os.path.join(folder_path, zip_file_name)
+        shutil.move(zip_file_name, zip_file_path)
+
+    def _get_file_list(self, folder_path):
+        file_list = []
+        for root, _, files in os.walk(folder_path):
+            for file in files:
+                file_list.append(file)
+        return file_list
+
+    def _generate_random_string(self, length):
+        characters = string.ascii_lowercase
+        random_string = ''.join(random.choice(characters) for _ in 
range(length))
+        return random_string
+
+    def _select_compute_resources(self):
+        exp_conf = self.experiment_handler.experiment_conf
+        options = exp_conf.COMPUTE_HOST_DOMAIN.split(',')
+        print("select compute resource")
+        dropdown = widgets.Dropdown(
+            options=options,
+            value=options[0],
+            description='Select Compute Resources',
+            disabled=False,
+        )
+        dropdown.observe(self._select_compute_resources_queue, names="value")
+        display(dropdown)
+
+    def _select_compute_resources_queue(self, compute_resource):
+        selected_compute_resource = compute_resource.new
+        self.selected_compute_resource_name = selected_compute_resource
+
+        print("Select Compute Resources Queue ", 
self.selected_compute_resource_name)
+
+        queues = self.experiment_handler.queue_names(selected_compute_resource)
+        queue = widgets.Dropdown(
+            options=queues,
+            value=queues[0],
+            description='Select Compute Resources Queue',
+            disabled=False,
+        )
+        queue.observe(self._on_select_compute_queue, names="value")
+        display(queue)
+
+    def _on_select_compute_queue(self, queue_name):
+        selected_queue_name = queue_name.new
+        self.selected_queue_name = selected_queue_name
+        print("selected compute resource queue ", self.selected_queue_name)
+        self._run_on_hpc(self.input_path, self.selected_compute_resource_name, 
self.selected_queue_name,
+                         output_path=self.output)
+
+    def _unzip_and_delete(self, zip_file_path):
+        with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
+            zip_ref.extractall(os.path.dirname(zip_file_path))
+        os.remove(zip_file_path)
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..64bfed7
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,2 @@
+airavata-python-sdk==1.1.6
+ipywidgets
\ No newline at end of file
diff --git a/resources/BMTK_v1_demo/BMTK_V1.ipynb 
b/resources/BMTK_v1_demo/BMTK_V1.ipynb
new file mode 100644
index 0000000..962f6e0
--- /dev/null
+++ b/resources/BMTK_v1_demo/BMTK_V1.ipynb
@@ -0,0 +1,561 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAABFsAAAEmCAYAAAC9JJ+wAAAKxmlDQ1BJQ0MgUHJvZmlsZQAASImVlwdUU+kSgP97b3qhBSIgJfQmSCeAlNADKL3aCEkgocSYEJpdWVzBtSAixbKCiyIKrgWQtSAWbItiw4JukEVEWRcLNlTeBR5h973z3jtvcub8Xybzz8x/z/w5cwGgEjhicQasAkCmKEsSGejDiE9IZOAHAASwgAqMAZbDlYpZ4eGhAJWp9e/y/h7qjcpt6/FY//77fxVVHl/KBQAKRzmZJ+Vmonwc1WGuWJIFALIPtRvlZInH+TLK6hK0QJR7xjl1kofHOXmCMZgJn+hIX5Q1ASBQOBxJKgAUY9TOyOamonEofijbinhCEcrod+DJFXB4KKN5wazMzCXjLEfZPPkvcVL/FjNZEZPDSVXw5FkmhOAnlIozOHn/5+P435KZIZvKYYoqRSAJikRX
 [...]
+      "text/plain": [
+       "<IPython.core.display.Image object>"
+      ]
+     },
+     "execution_count": 26,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from IPython import display\n",
+    "display.Image(\"model_simulation.png\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAC4sAAAchCAYAAAAPGOG9AAAgAElEQVR4XuydCdxW47r/bxmLrUQIGaMMyazsU0KK0+5oMo8hU0WkiGMeEtqnkjnDJplrSzKmCBl2psy2Mk8RwiFs+//5rf958vS8a637Ws+z1nqf9+17fT59nLOfe93D9x7Xu37XdS/z73//+98OgwAEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAgXpFYBnE4vWqP2kMBCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAIGAAGJxBgIEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACEIAABCAAgXpIALF4PexUmgQB
 [...]
+      "text/plain": [
+       "<IPython.core.display.Image object>"
+      ]
+     },
+     "execution_count": 27,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from IPython import display\n",
+    "display.Image(\"Computing_Platform-analyis.drawio.png\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 28,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAY4AAAGTCAYAAAA/TPU5AAAAAXNSR0IArs4c6QAARZ90RVh0bXhmaWxlACUzQ214ZmlsZSUyMGhvc3QlM0QlMjJhcHAuZGlhZ3JhbXMubmV0JTIyJTIwbW9kaWZpZWQlM0QlMjIyMDIzLTA2LTIxVDE0JTNBMzUlM0EyNi44NTFaJTIyJTIwYWdlbnQlM0QlMjJNb3ppbGxhJTJGNS4wJTIwKE1hY2ludG9zaCUzQiUyMEludGVsJTIwTWFjJTIwT1MlMjBYJTIwMTBfMTVfNyklMjBBcHBsZVdlYktpdCUyRjUzNy4zNiUyMChLSFRNTCUyQyUyMGxpa2UlMjBHZWNrbyklMjBDaHJvbWUlMkYxMTQuMC4wLjAlMjBTYWZhcmklMkY1MzcuMzYlMjIlMjBldGFnJTNEJTIyLU5SU01DeG02QjBFQWhld01rMEwl
 [...]
+      "text/plain": [
+       "<IPython.core.display.Image object>"
+      ]
+     },
+     "execution_count": 28,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from IPython import display\n",
+    "display.Image(\"neuro-apis.drawio.png\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 29,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAAc0AAAEjCAYAAABQC1rvAAAAAXNSR0IArs4c6QAAGoh0RVh0bXhmaWxlACUzQ214ZmlsZSUyMGhvc3QlM0QlMjJhcHAuZGlhZ3JhbXMubmV0JTIyJTIwbW9kaWZpZWQlM0QlMjIyMDIzLTA2LTIxVDE1JTNBMzclM0EwNS42OTNaJTIyJTIwYWdlbnQlM0QlMjJNb3ppbGxhJTJGNS4wJTIwKE1hY2ludG9zaCUzQiUyMEludGVsJTIwTWFjJTIwT1MlMjBYJTIwMTBfMTVfNyklMjBBcHBsZVdlYktpdCUyRjUzNy4zNiUyMChLSFRNTCUyQyUyMGxpa2UlMjBHZWNrbyklMjBDaHJvbWUlMkYxMTQuMC4wLjAlMjBTYWZhcmklMkY1MzcuMzYlMjIlMjBldGFnJTNEJTIyYjFnaDZUWG1tTU9UTS16ZGVocjYl
 [...]
+      "text/plain": [
+       "<IPython.core.display.Image object>"
+      ]
+     },
+     "execution_count": 29,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "from IPython import display\n",
+    "display.Image(\"workflow.drawio.png\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [],
+   "source": [
+    "from cybershuttle.annotations.decorator import CybershuttleHPCRun\n",
+    "import pandas as pd\n",
+    "import numpy as np\n",
+    "import math\n",
+    "import json\n",
+    "import matplotlib.pyplot as plt\n",
+    "import h5py\n",
+    "import glob\n",
+    "import logging\n",
+    "import os\n",
+    "\n",
+    "\n",
+    "from bmtk.analyzer.spike_trains import plot_rates_boxplot, plot_rates, 
plot_raster\n",
+    "from bmtk.utils import sonata\n",
+    "from bmtk.utils.reports import SpikeTrains\n",
+    "\n",
+    "logger = 
logging.getLogger(\"airavata_sdk.clients.utils.experiment_handler_util\")\n",
+    "\n",
+    "logger.setLevel(logging.INFO)\n",
+    "logging.basicConfig(level=os.environ.get(\"LOGLEVEL\", \"INFO\"))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 31,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Click on Login URI  
https://neuroscience.cybershuttle.org/auth/login-desktop/?show-code=true\n";,
+      "Copy paste the access token········\n"
+     ]
+    }
+   ],
+   "source": [
+    "@CybershuttleHPCRun(output=\"./output\")\n",
+    "def load_v1_network_validate_run(input='./inputs'):\n",
+    "    net = sonata.File(\n",
+    "    data_files=input+'/network/l4_nodes.h5',\n",
+    "    data_type_files=input+'/network/l4_node_types.csv')\n",
+    "    l4_nodes_df = net.nodes['l4'].to_dataframe(index_by_id=False)\n",
+    "    l4_nodes_df[['layer', 'ei', 'model_name',  'dynamics_params', 
'model_template', 'model_type','node_id']]  \\\n",
+    "    .groupby(['layer', 'ei', 'model_name', 'model_template', 
'model_type', 'dynamics_params'])   \\\n",
+    "    .agg('count')  \\\n",
+    "    .rename(columns={'node_id':'node_count'})\n",
+    "    l4_edges_df = pd.read_csv(input+'/network/l4_l4_edge_types.csv', 
sep=' ').sort_values(['dynamics_params', 'target_query'])\n",
+    "    print(l4_nodes_df)\n",
+    "    with h5py.File(input+'/network/l4_l4_edges.h5', 'r') as h5:\n",
+    "      edge_ids, edge_counts = 
np.unique(h5['/edges/l4_to_l4/edge_type_id'][()], return_counts=True)\n",
+    "      edge_counts_df = pd.DataFrame({'edge_type_id': edge_ids, 'counts': 
edge_counts})\n",
+    "      l4_edges_df = l4_edges_df.merge(edge_counts_df, how='left', 
on='edge_type_id')\n",
+    "\n",
+    "    l4_edges_df.set_index(['edge_type_id', 'target_query', 
'source_query', 'dynamics_params'])\n",
+    "    print(l4_edges_df)\n",
+    "    print('Valid SONATA inputs:')\n",
+    "    for spikes_file in glob.glob(input+'/inputs/spikes.*.h5'):\n",
+    "       print('->', spikes_file)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 32,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Executing user provided function...\n",
+      "      node_type_id  node_id  tuning_angle           x           y       
    z  \\\n",
+      "0              100        0      0.000000  119.485001  384.989275  
-56.577812   \n",
+      "1              100        1      3.050847  -73.669430  417.107342  
166.282127   \n",
+      "2              100        2      6.101695   54.743024  403.567362  
124.182278   \n",
+      "3              100        3      9.152542   14.297729  396.574152   
48.710376   \n",
+      "4              100        4     12.203390 -129.647833  347.253059  
-58.759062   \n",
+      "...            ...      ...           ...         ...         ...       
  ...   \n",
+      "2410           105     2410    353.513514   12.805991  358.970289 
-167.841390   \n",
+      "2411           105     2411    355.135135  -43.409817  316.332169  
134.711173   \n",
+      "2412           105     2412    356.756757   80.453753  324.341894   
-2.457055   \n",
+      "2413           105     2413    358.378378 -138.086114  364.519788   
11.219260   \n",
+      "2414           105     2414    360.000000  -72.441261  319.505839  
-50.285325   \n",
+      "\n",
+      "                         dynamics_params  layer    model_type cell_line 
 \\\n",
+      "0     501282204_glif_lif_asc_config.json  VisL4  point_neuron     i4Sst 
  \n",
+      "1     501282204_glif_lif_asc_config.json  VisL4  point_neuron     i4Sst 
  \n",
+      "2     501282204_glif_lif_asc_config.json  VisL4  point_neuron     i4Sst 
  \n",
+      "3     501282204_glif_lif_asc_config.json  VisL4  point_neuron     i4Sst 
  \n",
+      "4     501282204_glif_lif_asc_config.json  VisL4  point_neuron     i4Sst 
  \n",
+      "...                                  ...    ...           ...       ... 
  \n",
+      "2410  478958894_glif_lif_asc_config.json  VisL4  point_neuron   i4Pvalb 
  \n",
+      "2411  478958894_glif_lif_asc_config.json  VisL4  point_neuron   i4Pvalb 
  \n",
+      "2412  478958894_glif_lif_asc_config.json  VisL4  point_neuron   i4Pvalb 
  \n",
+      "2413  478958894_glif_lif_asc_config.json  VisL4  point_neuron   i4Pvalb 
  \n",
+      "2414  478958894_glif_lif_asc_config.json  VisL4  point_neuron   i4Pvalb 
  \n",
+      "\n",
+      "             model_template       model_name ei  \n",
+      "0     nest:glif_lif_asc_psc    Sst_501282204  i  \n",
+      "1     nest:glif_lif_asc_psc    Sst_501282204  i  \n",
+      "2     nest:glif_lif_asc_psc    Sst_501282204  i  \n",
+      "3     nest:glif_lif_asc_psc    Sst_501282204  i  \n",
+      "4     nest:glif_lif_asc_psc    Sst_501282204  i  \n",
+      "...                     ...              ... ..  \n",
+      "2410  nest:glif_lif_asc_psc  Pvalb_478958894  i  \n",
+      "2411  nest:glif_lif_asc_psc  Pvalb_478958894  i  \n",
+      "2412  nest:glif_lif_asc_psc  Pvalb_478958894  i  \n",
+      "2413  nest:glif_lif_asc_psc  Pvalb_478958894  i  \n",
+      "2414  nest:glif_lif_asc_psc  Pvalb_478958894  i  \n",
+      "\n",
+      "[2415 rows x 13 columns]\n",
+      "    edge_type_id           target_query           source_query  \\\n",
+      "0            107    cell_line=='e4Rorb'    cell_line=='e4Rorb'   \n",
+      "1            113    cell_line=='e4Rorb'  cell_line=='e4Scnn1a'   \n",
+      "2            125    cell_line=='e4Rorb'   cell_line=='e4other'   \n",
+      "3            108  cell_line=='e4Scnn1a'    cell_line=='e4Rorb'   \n",
+      "4            114  cell_line=='e4Scnn1a'  cell_line=='e4Scnn1a'   \n",
+      "5            126  cell_line=='e4Scnn1a'   cell_line=='e4other'   \n",
+      "6            110   cell_line=='e4other'    cell_line=='e4Rorb'   \n",
+      "7            116   cell_line=='e4other'  cell_line=='e4Scnn1a'   \n",
+      "8            128   cell_line=='e4other'   cell_line=='e4other'   \n",
+      "9            109   cell_line=='i4Htr3a'    cell_line=='e4Rorb'   \n",
+      "10           115   cell_line=='i4Htr3a'  cell_line=='e4Scnn1a'   \n",
+      "11           127   cell_line=='i4Htr3a'   cell_line=='e4other'   \n",
+      "12           111   cell_line=='i4Pvalb'    cell_line=='e4Rorb'   \n",
+      "13           117   cell_line=='i4Pvalb'  cell_line=='e4Scnn1a'   \n",
+      "14           129   cell_line=='i4Pvalb'   cell_line=='e4other'   \n",
+      "15           106     cell_line=='i4Sst'    cell_line=='e4Rorb'   \n",
+      "16           112     cell_line=='i4Sst'  cell_line=='e4Scnn1a'   \n",
+      "17           124     cell_line=='i4Sst'   cell_line=='e4other'   \n",
+      "18           101    cell_line=='e4Rorb'     cell_line=='i4Sst'   \n",
+      "19           119    cell_line=='e4Rorb'   cell_line=='i4Htr3a'   \n",
+      "20           131    cell_line=='e4Rorb'   cell_line=='i4Pvalb'   \n",
+      "21           102  cell_line=='e4Scnn1a'     cell_line=='i4Sst'   \n",
+      "22           120  cell_line=='e4Scnn1a'   cell_line=='i4Htr3a'   \n",
+      "23           132  cell_line=='e4Scnn1a'   cell_line=='i4Pvalb'   \n",
+      "24           104   cell_line=='e4other'     cell_line=='i4Sst'   \n",
+      "25           122   cell_line=='e4other'   cell_line=='i4Htr3a'   \n",
+      "26           134   cell_line=='e4other'   cell_line=='i4Pvalb'   \n",
+      "27           103   cell_line=='i4Htr3a'     cell_line=='i4Sst'   \n",
+      "28           121   cell_line=='i4Htr3a'   cell_line=='i4Htr3a'   \n",
+      "29           133   cell_line=='i4Htr3a'   cell_line=='i4Pvalb'   \n",
+      "30           105   cell_line=='i4Pvalb'     cell_line=='i4Sst'   \n",
+      "31           123   cell_line=='i4Pvalb'   cell_line=='i4Htr3a'   \n",
+      "32           135   cell_line=='i4Pvalb'   cell_line=='i4Pvalb'   \n",
+      "33           100     cell_line=='i4Sst'     cell_line=='i4Sst'   \n",
+      "34           118     cell_line=='i4Sst'   cell_line=='i4Htr3a'   \n",
+      "35           130     cell_line=='i4Sst'   cell_line=='i4Pvalb'   \n",
+      "\n",
+      "   dynamics_params  syn_weight  delay  weight_sigma       
weight_function  \\\n",
+      "0         e2e.json    5.154132    1.3          30.0      
DirectionRule_EE   \n",
+      "1         e2e.json    5.154132    1.3          30.0      
DirectionRule_EE   \n",
+      "2         e2e.json    5.154132    1.3          30.0      
DirectionRule_EE   \n",
+      "3         e2e.json   13.687266    1.3          30.0      
DirectionRule_EE   \n",
+      "4         e2e.json   13.687266    1.3          30.0      
DirectionRule_EE   \n",
+      "5         e2e.json   13.687266    1.3          30.0      
DirectionRule_EE   \n",
+      "6         e2e.json    5.443508    1.3          30.0      
DirectionRule_EE   \n",
+      "7         e2e.json    5.443508    1.3          30.0      
DirectionRule_EE   \n",
+      "8         e2e.json    5.443508    1.3          30.0      
DirectionRule_EE   \n",
+      "9         e2i.json    1.027934    1.5          90.0  
DirectionRule_others   \n",
+      "10        e2i.json    1.027934    1.5          90.0  
DirectionRule_others   \n",
+      "11        e2i.json    1.027934    1.5          90.0  
DirectionRule_others   \n",
+      "12        e2i.json    5.238083    1.2          90.0  
DirectionRule_others   \n",
+      "13        e2i.json    5.238083    1.2          90.0  
DirectionRule_others   \n",
+      "14        e2i.json    5.238083    1.2          90.0  
DirectionRule_others   \n",
+      "15        e2i.json    3.388120    1.5          90.0  
DirectionRule_others   \n",
+      "16        e2i.json    3.388120    1.5          90.0  
DirectionRule_others   \n",
+      "17        e2i.json    3.388120    1.5          90.0  
DirectionRule_others   \n",
+      "18        i2e.json   -0.258945    1.5          90.0  
DirectionRule_others   \n",
+      "19        i2e.json   -0.258945    1.5          90.0  
DirectionRule_others   \n",
+      "20        i2e.json   -0.571464    0.9          90.0  
DirectionRule_others   \n",
+      "21        i2e.json   -0.687651    1.5          90.0  
DirectionRule_others   \n",
+      "22        i2e.json   -0.687651    1.5          90.0  
DirectionRule_others   \n",
+      "23        i2e.json   -1.517574    0.9          90.0  
DirectionRule_others   \n",
+      "24        i2e.json   -0.273483    1.5          90.0  
DirectionRule_others   \n",
+      "25        i2e.json   -0.273483    1.5          90.0  
DirectionRule_others   \n",
+      "26        i2e.json   -0.603548    0.9          90.0  
DirectionRule_others   \n",
+      "27        i2i.json   -0.578256    1.5          50.0  
DirectionRule_others   \n",
+      "28        i2i.json   -0.411452    1.5          50.0  
DirectionRule_others   \n",
+      "29        i2i.json   -0.455933    1.2          50.0  
DirectionRule_others   \n",
+      "30        i2i.json   -1.120146    1.5          50.0  
DirectionRule_others   \n",
+      "31        i2i.json   -0.403253    1.5          50.0  
DirectionRule_others   \n",
+      "32        i2i.json   -1.523399    1.6          50.0  
DirectionRule_others   \n",
+      "33        i2i.json   -0.549796    1.5          50.0  
DirectionRule_others   \n",
+      "34        i2i.json   -1.172899    1.5          50.0  
DirectionRule_others   \n",
+      "35        i2i.json   -1.539430    1.2          50.0  
DirectionRule_others   \n",
+      "\n",
+      "    model_template  counts  \n",
+      "0   static_synapse   26060  \n",
+      "1   static_synapse   29915  \n",
+      "2   static_synapse   32074  \n",
+      "3   static_synapse   29609  \n",
+      "4   static_synapse   34341  \n",
+      "5   static_synapse   36462  \n",
+      "6   static_synapse   32077  \n",
+      "7   static_synapse   36294  \n",
+      "8   static_synapse   38328  \n",
+      "9   static_synapse    5802  \n",
+      "10  static_synapse    6578  \n",
+      "11  static_synapse    7004  \n",
+      "12  static_synapse   13020  \n",
+      "13  static_synapse   15298  \n",
+      "14  static_synapse   16067  \n",
+      "15  static_synapse   11195  \n",
+      "16  static_synapse   12668  \n",
+      "17  static_synapse   13479  \n",
+      "18  static_synapse    5559  \n",
+      "19  static_synapse    2907  \n",
+      "20  static_synapse   14115  \n",
+      "21  static_synapse    6451  \n",
+      "22  static_synapse    3283  \n",
+      "23  static_synapse   16103  \n",
+      "24  static_synapse    6640  \n",
+      "25  static_synapse    3573  \n",
+      "26  static_synapse   16844  \n",
+      "27  static_synapse    1953  \n",
+      "28  static_synapse      32  \n",
+      "29  static_synapse     952  \n",
+      "30  static_synapse    6944  \n",
+      "31  static_synapse      81  \n",
+      "32  static_synapse    6725  \n",
+      "33  static_synapse     414  \n",
+      "34  static_synapse    1547  \n",
+      "35  static_synapse     224  \n",
+      "Valid SONATA inputs:\n",
+      "-> ./inputs/inputs/spikes.gratings.45deg_2Hz.h5\n",
+      "-> ./inputs/inputs/spikes.gratings.45deg_4Hz.h5\n",
+      "-> ./inputs/inputs/spikes.gratings.90deg_4Hz.h5\n",
+      "-> ./inputs/inputs/spikes.gratings.0deg_2Hz.h5\n",
+      "-> ./inputs/inputs/spikes.gratings.90deg_2Hz.h5\n",
+      "-> ./inputs/inputs/spikes.gratings.0deg_4Hz.h5\n",
+      "Function execution succeeded...\n",
+      "select compute resource\n"
+     ]
+    },
+    {
+     "data": {
+      "application/vnd.jupyter.widget-view+json": {
+       "model_id": "d89d813f6a9540a6afbe541b019fa676",
+       "version_major": 2,
+       "version_minor": 0
+      },
+      "text/plain": [
+       "Dropdown(description='Select Compute Resources', options=('Bridges2', 
'Expanse'), value='Bridges2')"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Select Compute Resources Queue  Expanse\n"
+     ]
+    },
+    {
+     "data": {
+      "application/vnd.jupyter.widget-view+json": {
+       "model_id": "29b514f4e8104469a9fedd706c0859b6",
+       "version_major": 2,
+       "version_minor": 0
+      },
+      "text/plain": [
+       "Dropdown(description='Select Compute Resources Queue', 
options=('compute', 'gpu', 'gpu-debug', 'gpu-shared', '…"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "selected compute resource queue  shared\n",
+      "Experiment name:  allen_v1_ybxt\n",
+      "local_data_directory:  ./inputs\n",
+      "output_data_directory:  ./output\n",
+      "scheduled compute resource:  Expanse\n",
+      "scheduled queue:  shared\n",
+      "Creating experiment ....\n"
+     ]
+    },
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "INFO:airavata_sdk.clients:creating experiment allen_v1_ybxt\n",
+      "INFO:airavata_sdk.clients:connnecting to file upload endpoint 
gf4.ucs.indiana.edu : 9003\n",
+      "INFO:airavata_sdk.clients:Input files uploaded to 
[email protected]/Default_Project/allen_v1_ybxt_2023_06_21_12_20_03/\n",
+      "INFO:airavata_sdk.clients:configuring inputs ......\n",
+      "INFO:airavata_sdk.clients:experiment launched id: 
allen_v1_ybxt_6294f141-57a5-471f-a77a-5fd95a36a730\n",
+      "INFO:airavata_sdk.clients:For more information visit 
https://neuroscience-cybershuttle.org/workspace/experiments/allen_v1_ybxt_6294f141-57a5-471f-a77a-5fd95a36a730\n";,
+      "INFO:airavata_sdk.clients:Initial state EXECUTING\n",
+      "INFO:airavata_sdk.clients:State EXECUTING\n",
+      "INFO:airavata_sdk.clients:State JOB_ACTIVE\n",
+      "INFO:airavata_sdk.clients:State JOB_ACTIVE\n",
+      "INFO:airavata_sdk.clients:State JOB_ACTIVE\n",
+      "INFO:airavata_sdk.clients:State COMPLETED\n",
+      "INFO:airavata_sdk.clients:Completed\n"
+     ]
+    }
+   ],
+   "source": [
+    "load_v1_network_validate_run(input='./inputs')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [],
+   "source": [
+    "def plot_tuning_angle_fr(input_path,spikes_path, fr_window=(500.0, 
3000.0), avg_window=15):\n",
+    "    dur_secs = (fr_window[1] - fr_window[0]) / 1000.0\n",
+    "\n",
+    "    net = sonata.File(\n",
+    "        data_files=input_path+'/network/l4_nodes.h5',\n",
+    "        data_type_files=input_path+'/network/l4_node_types.csv'\n",
+    "    )\n",
+    "    nodes_df = net.nodes['l4'].to_dataframe(index_by_id=False)\n",
+    "    nodes_df = nodes_df[['node_id', 'node_type_id', 'model_name', 
'tuning_angle', 'model_type', 'layer', 'ei']]\n",
+    "    \n",
+    "    spikes = SpikeTrains.load(spikes_path)\n",
+    "    spikes_df = spikes.to_dataframe(population='l4')\n",
+    "\n",
+    "    fr_df = 
spikes_df['node_ids'][spikes_df.timestamps.between(fr_window[0],fr_window[1])] 
\\\n",
+    "                                  .value_counts()  \\\n",
+    "                                  .rename_axis('node_id')  \\\n",
+    "                                  .reset_index(name='spike_counts')\n",
+    "    \n",
+    "    fr_df['firing_rates'] = fr_df['spike_counts'].values / dur_secs\n",
+    "    fr_df['node_id'] = fr_df['node_id'].astype(np.uint64)\n",
+    "    fr_df = fr_df.merge(nodes_df, how='right', on='node_id')       \n",
+    "    fr_df['spike_counts'] = fr_df['spike_counts'].fillna(0.0)   \n",
+    "    fr_df['firing_rates'] = fr_df['firing_rates'].fillna(0.0)\n",
+    "    \n",
+    "    def create_subplot(ax, grp_df, label):\n",
+    "        ax.scatter(grp_df['tuning_angle'], grp_df['firing_rates'], 
s=2)\n",
+    "        \n",
+    "        fr_avgs = pd.DataFrame({\n",
+    "            'tuning_rounded': np.floor(grp_df['tuning_angle']),\n",
+    "            'firing_rates': grp_df['firing_rates']\n",
+    "        }).groupby('tuning_rounded').agg(np.mean)\n",
+    "        \n",
+    "        if avg_window and len(fr_avgs['firing_rates']) > avg_window:\n",
+    "            tuning_even_spaced = pd.DataFrame({\n",
+    "                'tuning_rounded':np.arange(360)\n",
+    "            })\n",
+    "            fr_avgs=fr_avgs.merge(tuning_even_spaced, how='right', 
on='tuning_rounded')\n",
+    "            fr_avgs['firing_rates_avg'] = 
fr_avgs['firing_rates'].rolling(avg_window, min_periods=1).mean()\n",
+    "\n",
+    "        ax.plot(fr_avgs['firing_rates_avg'], c='r', linewidth=3, 
label=label)\n",
+    "        ax.legend(fontsize=10, loc='upper right')\n",
+    "        ax.set_xlabel('Tuning Angle (deg)')\n",
+    "        ax.set_ylabel('Firing Rate (Hz)')\n",
+    "\n",
+    "    # plot excitatory cells by layer\n",
+    "    nrows = 2\n",
+    "    fig, axes = plt.subplots(nrows, 1, figsize=(10, 5))\n",
+    "    for r, layer in enumerate(['VisL4']):\n",
+    "        exc_df = fr_df[(fr_df['ei'] == 'e') & (fr_df['layer'] == 
layer)]\n",
+    "        create_subplot(axes[r], exc_df, 'exc; {}'.format(layer))\n",
+    "\n",
+    "    # plot inhibitory cells\n",
+    "    inh_df = fr_df[fr_df['ei'] == 'i']\n",
+    "    create_subplot(axes[r+1], inh_df, 'inh; ALL')\n",
+    "\n",
+    "    for r in range(nrows):\n",
+    "        if r != (nrows - 1):\n",
+    "            axes[r].set_xticklabels([])\n",
+    "            \n",
+    "    plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 34,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [
+    {
+     "data": {
+      "image/png": 
"iVBORw0KGgoAAAANSUhEUgAAA1cAAAHACAYAAABOPpIiAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAADth0lEQVR4nOydd3gU1frHv1vSk00IkEoSQi+hNxGkK4oNVOwK2BuiiFexAirYUK9XEPVKsf1UEJErYKPXSK8SSgKhpEFIr7s7vz+W2czOTt2Snd28n+fJA7t75pz3vKfNmZn3OzqGYRgQBEEQBEEQBEEQbqH3tQEEQRAEQRAEQRCBAG2uCIIgCIIgCIIgPABtrgiCIAiCIAiCIDwAba4IgiAIgiAIgiA8AG2uCIIgCIIgCIIgPABtrgiCIAiCIAiCIDwAba4IgiAIgiAIgiA8AG2uCIIgCIIgCIIgPIDR1wZoEavV
 [...]
+      "text/plain": [
+       "<Figure size 1000x500 with 2 Axes>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    
"plot_tuning_angle_fr('./inputs','./output/allen_v1_ybxt_2023_06_21_12_20_03/ARCHIVE/output_feedforward/spikes.h5')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "pycharm": {
+     "name": "#%%\n"
+    }
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "venv",
+   "language": "python",
+   "name": "venv"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.8.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
\ No newline at end of file
diff --git a/resources/BMTK_v1_demo/Computing_Platform-analyis.drawio.png 
b/resources/BMTK_v1_demo/Computing_Platform-analyis.drawio.png
new file mode 100644
index 0000000..f138ae1
Binary files /dev/null and 
b/resources/BMTK_v1_demo/Computing_Platform-analyis.drawio.png differ
diff --git a/resources/BMTK_v1_demo/model_simulation.png 
b/resources/BMTK_v1_demo/model_simulation.png
new file mode 100644
index 0000000..7cfaba2
Binary files /dev/null and b/resources/BMTK_v1_demo/model_simulation.png differ
diff --git a/resources/BMTK_v1_demo/neuro-apis.drawio.png 
b/resources/BMTK_v1_demo/neuro-apis.drawio.png
new file mode 100644
index 0000000..90b0dfb
Binary files /dev/null and b/resources/BMTK_v1_demo/neuro-apis.drawio.png differ
diff --git a/resources/BMTK_v1_demo/requirements.txt 
b/resources/BMTK_v1_demo/requirements.txt
new file mode 100644
index 0000000..5c537c2
--- /dev/null
+++ b/resources/BMTK_v1_demo/requirements.txt
@@ -0,0 +1,49 @@
+airavata-python-sdk==1.1.6
+attrs==23.1.0
+bcrypt==4.0.1
+bmtk==1.0.7
+certifi==2023.5.7
+cffi==1.15.1
+charset-normalizer==3.1.0
+contourpy==1.1.0
+cryptography==41.0.1
+cycler==0.11.0
+fonttools==4.40.0
+h5py==3.8.0
+idna==3.4
+imageio==2.31.1
+jsonschema==4.17.3
+kiwisolver==1.4.4
+lazy_loader==0.2
+matplotlib==3.7.1
+mpmath==1.3.0
+nest==0.4.1
+networkx==3.1
+numpy==1.24.3
+oauthlib==3.2.2
+packaging==23.1
+pandas==2.0.2
+paramiko==3.2.0
+Pillow==9.5.0
+pycparser==2.21
+PyJWT==2.7.0
+PyNaCl==1.5.0
+pyparsing==3.0.9
+pyrsistent==0.19.3
+python-dateutil==2.8.2
+pytz==2023.3
+PyWavelets==1.4.1
+requests==2.31.0
+requests-oauthlib==1.3.1
+scikit-image==0.21.0
+scipy==1.10.1
+scp==0.14.5
+six==1.16.0
+sympy==1.12
+thrift==0.16.0
+thrift-connector==0.24
+tifffile==2023.4.12
+tqdm==4.65.0
+tzdata==2023.3
+urllib3==2.0.3
+
diff --git a/resources/BMTK_v1_demo/settings.ini 
b/resources/BMTK_v1_demo/settings.ini
new file mode 100644
index 0000000..9df5953
--- /dev/null
+++ b/resources/BMTK_v1_demo/settings.ini
@@ -0,0 +1,44 @@
+[APIServer]
+API_HOST = 156.56.104.12
+API_PORT = 9930
+API_SECURE = True
+[Gateway]
+GATEWAY_ID = neuroscience-cybershuttle
+GATEWAY_DATA_STORE_RESOURCE_ID = 
"gf4.ucs.indiana.edu_61552681-96f0-462a-a36c-a62a010bffc6",
+GATEWAY_DATA_STORE_DIR = 
/var/www/portals/gateway-user-data/neuroscience-cybershuttle/
+GATEWAY_DATA_STORE_HOSTNAME =  gf4.ucs.indiana.edu
+FILE_UPLOAD_TEMP_DIR = 
/var/www/portals/gateway-user-data/neuroscience-cybershuttle/tmp/
+[ProfileServer]
+PROFILE_SERVICE_HOST = 156.56.104.12
+PROFILE_SERVICE_PORT = 8962
+PROFILE_SERVICE_SECURE = False
+[SharingServer]
+SHARING_API_HOST = 156.56.104.12
+SHARING_API_PORT = 7878
+SHARING_API_SECURE = False
+[CredentialStoreServer]
+CREDENTIAL_STORE_API_HOST = 156.56.104.12
+CREDENTIAL_STORE_API_PORT = 8960
+CREDENTIAL_STORE_API_SECURE = True
+[Thrift]
+THRIFT_CLIENT_POOL_KEEPALIVE = 5
+[KeycloakServer]
+CLIENT_ID = CHANGE_ME
+CLIENT_SECRET = CHANGE_ME
+TOKEN_URL = 
https://iam.scigap.org/auth/realms/neuroscience-cybershuttle/protocol/openid-connect/token
+USER_INFO_URL = 
https://iam.scigap.org/auth/realms/neuroscience-cybershuttle/protocol/openid-connect/userinfo
+VERIFY_SSL = False
+CERTIFICATE_FILE_PATH = PATH_TO_CERTIFICATE
+LOGIN_DESKTOP_URI = 
https://neuroscience.cybershuttle.org/auth/login-desktop/?show-code=true
+[ExperimentConf]
+APPLICATION_NAME = BMTK_V1_L4
+PROJECT_NAME = Default Project
+COMPUTE_HOST_DOMAIN = Bridges2,Expanse
+GROUP_RESOURCE_PROFILE_NAME = Allen GRP
+STORAGE_RESOURCE_HOST = gf4.ucs.indiana.edu
+SFTP_PORT = 9003
+NODE_COUNT = 1
+TOTAL_CPU_COUNT = 1
+WALL_TIME_LIMIT = 30
+QUEUE_NAME = shared
+MONITOR_STATUS = True
diff --git a/resources/BMTK_v1_demo/workflow.drawio.png 
b/resources/BMTK_v1_demo/workflow.drawio.png
new file mode 100644
index 0000000..2e1a7ea
Binary files /dev/null and b/resources/BMTK_v1_demo/workflow.drawio.png differ
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..e69de29
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..f0909d6
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,29 @@
+import os
+
+from setuptools import setup, find_packages
+
+
+def read(fname):
+    with open(os.path.join(os.path.dirname(__file__), fname)) as f:
+        return f.read()
+
+from pathlib import Path
+
+this_directory = Path(__file__).parent
+long_description = (this_directory / "README.md").read_text()
+
+setup(
+    name='cybershuttle-neuro-lib',
+    long_description_content_type="text/markdown",
+    version='0.0.1',
+    packages=find_packages(),
+    package_data={'': ['*.pem','*.md']},
+    include_package_data=True,
+    url='https://neuroscience.cybershuttle.org/',
+    license='Apache License 2.0',
+    author='Cybershuttle Developers',
+    author_email='[email protected]',
+    install_requires=['airavata-python-sdk==1.1.6', 'ipywidgets'],
+    description='Cybershuttle neuroscience libraries',
+    long_description=long_description,
+)
\ No newline at end of file

Reply via email to