ARIA-1 Parser test suite * This commit additionally fixes many parser bugs revealed by the test suite, which includes adding validations that were missing.
* A new "extensions" tox suite is introduced. * The /tests/parser cases were refactored into /tests/topology and /tests/extensions. * The Hello World example was fixed and refactored, as it in fact had invalid TOSCA (it previously passed due to a missing validation). * Parser performance was greatly improved by: 1. Switching to the YAML C library 2. Aggressive caching of parsed presentations 3. The ability to skip importing the TOSCA profile 4. The ability to skip validation of normative types 5. A new deepcopy_fast util 6. A new BlockingExecutor that is faster for single-threaded use * Unicode is now fully supported for all validation and log messages. This requires the use a unicode (u'' notation) for all .format specs. * Additionally, PyLint comment directives have been standardized by pushing them to column 100. Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/89b9f130 Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/89b9f130 Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/89b9f130 Branch: refs/heads/ARIA-1-parser-test-suite Commit: 89b9f130d698d1d80e10dcf7adbc9f52a83159ab Parents: e71ddc9 Author: Tal Liron <tal.li...@gmail.com> Authored: Thu Aug 17 17:50:27 2017 -0500 Committer: Tal Liron <tal.li...@gmail.com> Committed: Fri Nov 24 12:25:58 2017 -0600 ---------------------------------------------------------------------- .travis.yml | 22 +- Makefile | 3 + README.rst | 53 +- appveyor.yml | 1 + aria/__init__.py | 18 +- aria/cli/commands/services.py | 2 +- aria/cli/utils.py | 4 +- aria/modeling/functions.py | 2 +- aria/modeling/mixins.py | 4 +- aria/modeling/orchestration.py | 2 +- aria/modeling/service_common.py | 8 +- aria/modeling/service_instance.py | 2 +- aria/modeling/service_template.py | 4 +- aria/modeling/utils.py | 2 +- aria/orchestrator/context/common.py | 6 +- aria/orchestrator/context/operation.py | 12 +- aria/orchestrator/context/workflow.py | 4 +- aria/orchestrator/decorators.py | 2 +- aria/orchestrator/execution_plugin/common.py | 12 +- .../execution_plugin/ctx_proxy/client.py | 4 +- .../execution_plugin/ctx_proxy/server.py | 10 +- .../execution_plugin/instantiation.py | 20 +- aria/orchestrator/execution_plugin/local.py | 4 +- .../execution_plugin/ssh/operations.py | 46 +- .../orchestrator/execution_plugin/ssh/tunnel.py | 4 +- aria/orchestrator/plugin.py | 12 +- aria/orchestrator/topology/instance_handler.py | 106 +-- aria/orchestrator/topology/template_handler.py | 91 +- aria/orchestrator/topology/topology.py | 10 +- aria/orchestrator/workflows/api/task.py | 10 +- aria/orchestrator/workflows/api/task_graph.py | 8 +- .../workflows/builtin/execute_operation.py | 2 +- aria/orchestrator/workflows/core/engine.py | 2 +- .../workflows/core/events_handler.py | 4 +- .../workflows/core/graph_compiler.py | 4 +- aria/orchestrator/workflows/events_logging.py | 18 +- aria/orchestrator/workflows/exceptions.py | 8 +- aria/orchestrator/workflows/executor/celery.py | 2 +- aria/orchestrator/workflows/executor/dry.py | 6 +- aria/orchestrator/workflows/executor/process.py | 8 +- aria/orchestrator/workflows/executor/thread.py | 2 +- aria/parser/consumption/context.py | 6 +- aria/parser/consumption/inputs.py | 2 +- aria/parser/consumption/presentation.py | 252 +++-- aria/parser/consumption/validation.py | 2 +- aria/parser/loading/file.py | 15 +- aria/parser/loading/literal.py | 3 + aria/parser/loading/loader.py | 3 + aria/parser/loading/location.py | 27 +- aria/parser/loading/request.py | 14 +- aria/parser/loading/uri.py | 47 +- aria/parser/presentation/__init__.py | 14 +- aria/parser/presentation/context.py | 23 +- aria/parser/presentation/field_validators.py | 23 +- aria/parser/presentation/fields.py | 125 +-- aria/parser/presentation/presentation.py | 14 +- aria/parser/presentation/presenter.py | 8 +- aria/parser/presentation/source.py | 2 +- aria/parser/presentation/utils.py | 28 +- aria/parser/reading/__init__.py | 5 +- aria/parser/reading/context.py | 3 - aria/parser/reading/exceptions.py | 6 - aria/parser/reading/jinja.py | 4 +- aria/parser/reading/json.py | 2 +- aria/parser/reading/locator.py | 20 +- aria/parser/reading/reader.py | 11 +- aria/parser/reading/source.py | 4 +- aria/parser/reading/yaml.py | 42 +- aria/parser/specification.py | 2 +- aria/parser/validation/issue.py | 22 +- aria/storage/filesystem_rapi.py | 4 +- aria/utils/caching.py | 2 +- aria/utils/collections.py | 31 +- aria/utils/formatting.py | 2 +- aria/utils/threading.py | 153 +++- aria/utils/uris.py | 2 +- aria/utils/versions.py | 4 +- .../clearwater/clearwater-single-existing.yaml | 12 +- examples/hello-world/hello-world.yaml | 34 +- examples/hello-world/scripts/start.sh | 2 +- .../use-cases/non-normative-types.yaml | 2 + .../profiles/aria-1.0/aria-1.0.yaml | 3 + .../profiles/tosca-simple-1.0/artifacts.yaml | 8 + .../profiles/tosca-simple-1.0/capabilities.yaml | 12 + .../profiles/tosca-simple-1.0/data.yaml | 14 + .../profiles/tosca-simple-1.0/groups.yaml | 1 + .../profiles/tosca-simple-1.0/interfaces.yaml | 3 + .../profiles/tosca-simple-1.0/nodes.yaml | 14 + .../profiles/tosca-simple-1.0/policies.yaml | 5 + .../tosca-simple-1.0/relationships.yaml | 8 + .../tosca-simple-nfv-1.0/artifacts.yaml | 1 + .../tosca-simple-nfv-1.0/capabilities.yaml | 3 + .../profiles/tosca-simple-nfv-1.0/data.yaml | 10 + .../profiles/tosca-simple-nfv-1.0/nodes.yaml | 25 +- .../tosca-simple-nfv-1.0/relationships.yaml | 2 + .../simple_nfv_v1_0/presenter.py | 4 +- .../simple_v1_0/__init__.py | 12 +- .../simple_v1_0/assignments.py | 27 +- .../simple_v1_0/data_types.py | 111 ++- .../simple_v1_0/definitions.py | 58 +- .../aria_extension_tosca/simple_v1_0/misc.py | 18 +- .../simple_v1_0/modeling/__init__.py | 56 +- .../simple_v1_0/modeling/capabilities.py | 56 +- .../simple_v1_0/modeling/copy.py | 2 +- .../simple_v1_0/modeling/data_types.py | 65 +- .../simple_v1_0/modeling/functions.py | 135 +-- .../simple_v1_0/modeling/groups.py | 43 + .../simple_v1_0/modeling/interfaces.py | 160 ++-- .../simple_v1_0/modeling/parameters.py | 45 +- .../simple_v1_0/modeling/requirements.py | 105 ++- .../modeling/substitution_mappings.py | 51 +- .../simple_v1_0/presentation/extensible.py | 2 +- .../simple_v1_0/presentation/field_getters.py | 22 +- .../presentation/field_validators.py | 142 ++- .../simple_v1_0/presentation/types.py | 2 +- .../simple_v1_0/presenter.py | 6 +- .../simple_v1_0/templates.py | 23 +- .../aria_extension_tosca/simple_v1_0/types.py | 52 +- requirements.in | 2 +- requirements.txt | 14 +- setup.py | 4 +- tests/end2end/test_hello_world.py | 2 +- tests/extensions/__init__.py | 14 + .../extensions/aria_extension_tosca/__init__.py | 14 + .../aria_extension_tosca/aria_v1_0/__init__.py | 14 + .../aria_v1_0/test_profile.py | 22 + .../extensions/aria_extension_tosca/conftest.py | 45 + .../simple_nfv_v1_0/__init__.py | 14 + .../simple_nfv_v1_0/test_profile.py | 20 + .../simple_v1_0/__init__.py | 14 + .../aria_extension_tosca/simple_v1_0/data.py | 82 ++ .../simple_v1_0/functions/__init__.py | 14 + .../functions/test_function_concat.py | 102 +++ .../functions/test_function_get_artifact.py | 156 ++++ .../functions/test_function_get_input.py | 94 ++ .../test_function_get_nodes_of_type.py | 70 ++ .../test_function_get_operation_output.py | 84 ++ .../functions/test_function_token.py | 119 +++ .../test_functions_modelable_entity.py | 247 +++++ .../simple_v1_0/templates/__init__.py | 14 + .../simple_v1_0/templates/common/__init__.py | 14 + .../simple_v1_0/templates/common/test_copy.py | 68 ++ .../templates/common/test_template_interface.py | 914 +++++++++++++++++++ .../common/test_template_parameters.py | 781 ++++++++++++++++ .../test_template_parameters_properties.py | 132 +++ .../templates/common/test_templates.py | 128 +++ .../templates/node_template/__init__.py | 14 + .../test_node_template_artifacts.py | 307 +++++++ .../test_node_template_directives.py | 77 ++ ...est_node_template_node_filter_constraints.py | 346 +++++++ .../test_node_template_node_filters.py | 313 +++++++ .../test_node_template_requirements.py | 853 +++++++++++++++++ .../simple_v1_0/templates/test_group.py | 159 ++++ .../simple_v1_0/templates/test_policy.py | 272 ++++++ .../templates/test_substitution_mappings.py | 449 +++++++++ .../templates/test_topology_template.py | 61 ++ .../simple_v1_0/test_dsl_definitions.py | 47 + .../simple_v1_0/test_imports.py | 200 ++++ .../simple_v1_0/test_metadata.py | 98 ++ .../simple_v1_0/test_names.py | 57 ++ .../simple_v1_0/test_profile.py | 20 + .../simple_v1_0/test_repositories.py | 179 ++++ .../simple_v1_0/test_service_template.py | 22 + .../simple_v1_0/types/__init__.py | 14 + .../simple_v1_0/types/common/__init__.py | 14 + .../types/common/test_type_interfaces.py | 469 ++++++++++ .../types/common/test_type_parameters.py | 418 +++++++++ .../common/test_type_parameters_inheritance.py | 114 +++ .../common/test_type_parameters_properties.py | 312 +++++++ .../simple_v1_0/types/common/test_types.py | 185 ++++ .../simple_v1_0/types/node_type/__init__.py | 14 + .../node_type/test_node_type_capabilities.py | 302 ++++++ .../test_node_type_relationship_interfaces.py | 54 ++ .../node_type/test_node_type_requirements.py | 361 ++++++++ .../simple_v1_0/types/test_artifact_type.py | 74 ++ .../simple_v1_0/types/test_capability_type.py | 85 ++ .../simple_v1_0/types/test_data_type.py | 68 ++ .../simple_v1_0/types/test_group_type.py | 85 ++ .../simple_v1_0/types/test_interface_type.py | 149 +++ .../simple_v1_0/types/test_policy_type.py | 123 +++ .../simple_v1_0/types/test_relationship_type.py | 85 ++ tests/instantiation/__init__.py | 14 - tests/instantiation/test_configuration.py | 172 ---- tests/mechanisms/__init__.py | 14 + tests/mechanisms/parsing/__init__.py | 75 ++ tests/mechanisms/parsing/aria.py | 78 ++ tests/mechanisms/utils.py | 71 ++ tests/mechanisms/web_server.py | 84 ++ tests/parser/__init__.py | 14 - tests/parser/service_templates.py | 86 -- tests/parser/test_reqs_caps.py | 29 - tests/parser/test_tosca_simple_v1_0/__init__.py | 14 - .../presentation/__init__.py | 0 .../presentation/test_types.py | 23 - .../test_tosca_simple_v1_0/test_end2end.py | 112 --- tests/parser/utils.py | 67 -- tests/requirements.txt | 3 +- .../node-cellar/node-cellar.yaml | 8 +- .../types/shorthand-1/shorthand-1.yaml | 23 - .../types/typequalified-1/typequalified-1.yaml | 23 - tests/topology/__init__.py | 14 + tests/topology/service_templates.py | 70 ++ tests/topology/test_configuration.py | 173 ++++ tests/topology/test_end2end.py | 112 +++ tests/topology/test_reqs_caps.py | 29 + tests/topology/utils.py | 69 ++ tests/utils/test_versions.py | 8 +- tox.ini | 41 +- 208 files changed, 12177 insertions(+), 1674 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/.travis.yml ---------------------------------------------------------------------- diff --git a/.travis.yml b/.travis.yml index b264ab3..c6c63fa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,6 @@ # We need to set "sudo: true" in order to use a virtual machine instead of a container, because # SSH tests fail in the container. See: # https://docs.travis-ci.com/user/reference/overview/#Virtualization-environments - dist: trusty sudo: true @@ -23,16 +22,17 @@ python: - '2.7' env: - # The PYTEST_PROCESSES environment var is used in tox.ini to override the --numprocesses argument - # for PyTest's xdist plugin. The reason this is necessary is that conventional Travis environments - # may report a large amount of available CPUs, but they they are greatly restricted. Through trial - # and error we found that more than 1 process may result in failures. - - PYTEST_PROCESSES=1 TOX_ENV=pylint_core - - PYTEST_PROCESSES=1 TOX_ENV=pylint_tests - - PYTEST_PROCESSES=1 TOX_ENV=core - - PYTEST_PROCESSES=1 TOX_ENV=e2e - - PYTEST_PROCESSES=1 TOX_ENV=ssh - - PYTEST_PROCESSES=1 TOX_ENV=docs + # The CONCURRENCY environment var is used in tox.ini to override the --numprocesses argument + # for PyTest's xdist plugin and the --jobs argument for PyLint. The reason this is necessary is + # that in automatic concurrency mode the Travis environments may report a large amount of + # available cores, but concurrent tests may fail. + - CONCURRENCY=1 TOX_ENV=pylint_core + - CONCURRENCY=1 TOX_ENV=pylint_tests + - CONCURRENCY=1 TOX_ENV=core + - CONCURRENCY=1 TOX_ENV=extensions + - CONCURRENCY=1 TOX_ENV=e2e + - CONCURRENCY=1 TOX_ENV=ssh + - CONCURRENCY=1 TOX_ENV=docs before_install: # Create SSH keys for SSH tests http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/Makefile ---------------------------------------------------------------------- diff --git a/Makefile b/Makefile index e68538e..584ca45 100644 --- a/Makefile +++ b/Makefile @@ -31,6 +31,7 @@ clean: -find . -maxdepth 1 -type f -name '.coverage' -delete -find . -type f -name '*.pyc' -delete -find . -type d -name '__pycache__' -prune -exec rm -rf {} \; 2>/dev/null + -find . -type d -name '*.egg-info' -exec rm -rf {} \; 2>/dev/null install: pip install .[ssh] @@ -54,10 +55,12 @@ test: tox -e pylint_core \ -e pylint_tests \ -e core \ + -e extensions \ -e e2e \ -e ssh \ -e docs ./requirements.txt: ./requirements.in pip install --upgrade "pip-tools>=1.9.0" + rm ./requirements.txt pip-compile --output-file ./requirements.txt ./requirements.in http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/README.rst ---------------------------------------------------------------------- diff --git a/README.rst b/README.rst index c905277..42b5fb3 100644 --- a/README.rst +++ b/README.rst @@ -63,7 +63,7 @@ and run:: yum install -y python-devel gcc libffi-devel openssl-devel pip install apache-ariatosca[ssh] -**Archlinux**:: +**Arch Linux**:: pacman -Syu --noconfirm python2 gcc libffi openssl pip2 install apache-ariatosca[ssh] @@ -73,7 +73,7 @@ and run:: # no additional system requirements are needed pip install apache-ariatosca[ssh] -**MacOS**:: +**macOS**:: # TODO @@ -112,26 +112,14 @@ To uninstall and clean your environment, follow these steps:: aria service-templates delete my-service-template -Contribution ------------- - -You are welcome and encouraged to participate and contribute to the ARIA project. - -Please see our guide to -`Contributing to ARIA -<https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__. - -Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__ -section). - - Resources --------- -- `ARIA homepage <http://ariatosca.incubator.apache.org/>`__ -- `ARIA wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__ -- `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__ -- `ARIA revisions released <https://dist.apache.org/repos/dist/dev/incubator/ariatosca//>`__ +- `Main site <http://ariatosca.incubator.apache.org/>`__ +- `API and CLI documentation <http://ariatosca.incubator.apache.org/docs/html/>`__ +- `Wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__ +- `Releases <https://dist.apache.org/repos/dist/dev/incubator/ariatosca//>`__ +- `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__ - Dev mailing list: d...@ariatosca.incubator.apache.org - User mailing list: u...@ariatosca.incubator.apache.org @@ -150,6 +138,27 @@ License ARIA is licensed under the `Apache License 2.0 <https://github.com/apache/incubator-ariatosca/blob/master/LICENSE>`__. + +Contribution +------------ + +You are welcome and encouraged to participate and contribute to the ARIA project. + +Please see our guide to +`Contributing to ARIA +<https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__. + +Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__ +section). + + +Code of Conduct +--------------- + +The ARIA TOSCA Project follows +`the Apache Code of Conduct <https://www.apache.org/foundation/policies/conduct.html>`__. + + .. |Build Status| image:: https://img.shields.io/travis/apache/incubator-ariatosca/master.svg :target: https://travis-ci.org/apache/incubator-ariatosca .. |Appveyor Build Status| image:: https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg @@ -165,9 +174,3 @@ ARIA is licensed under the :target: https://github.com/apache/incubator-ariatosca/pulls .. |Closed Pull Requests| image:: https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg :target: https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed - - -Code of Conduct ---------------- - -The ARIA TOSCA Project follows `The Apache Code of Conduct <https://www.apache.org/foundation/policies/conduct.html>`__. http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/appveyor.yml ---------------------------------------------------------------------- diff --git a/appveyor.yml b/appveyor.yml index f7d70e6..3e0385a 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -20,6 +20,7 @@ environment: - PYTHON: "C:\\Python27" PYTHON_VERSION: 2.7.8 PYTHON_ARCH: 32 + CONCURRENCY: 1 build: false http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/__init__.py ---------------------------------------------------------------------- diff --git a/aria/__init__.py b/aria/__init__.py index 9bd6f8f..acaf81b 100644 --- a/aria/__init__.py +++ b/aria/__init__.py @@ -23,9 +23,8 @@ import pkg_resources aria_package_name = 'apache-ariatosca' __version__ = pkg_resources.get_distribution(aria_package_name).version - -from .orchestrator.decorators import workflow, operation # pylint: disable=wrong-import-position -from . import ( # pylint: disable=wrong-import-position +from .orchestrator.decorators import (workflow, operation) # pylint: disable=wrong-import-position +from . import ( # pylint: disable=wrong-import-position extension, utils, parser, @@ -47,14 +46,11 @@ __all__ = ( def install_aria_extensions(strict=True): """ - Iterates all Python packages with names beginning with ``aria_extension_`` and all - ``aria_extension`` entry points and loads them. - - It then invokes all registered extension functions. + Loads all Python packages with names beginning with ``aria_extension_`` and calls their + ``aria_extension`` initialization entry points if they have them. - :param strict: if set to ``True``, Tries to load extensions with - dependency versions under consideration. Otherwise tries to load the - required package without version consideration. Defaults to True. + :param strict: if ``True`` tries to load extensions while taking into account the versions + of their dependencies, otherwise ignores versions :type strict: bool """ for loader, module_name, _ in iter_modules(): @@ -62,7 +58,7 @@ def install_aria_extensions(strict=True): loader.find_module(module_name).load_module(module_name) for entry_point in pkg_resources.iter_entry_points(group='aria_extension'): # It should be possible to enable non strict loading - use the package - # that is already installed inside the environment, and forgo the + # that is already installed inside the environment, and forego the # version demand if strict: entry_point.load() http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/cli/commands/services.py ---------------------------------------------------------------------- diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py index 6752899..32622a9 100644 --- a/aria/cli/commands/services.py +++ b/aria/cli/commands/services.py @@ -137,7 +137,7 @@ def list(service_template_name, @aria.pass_logger def create(service_template_name, service_name, - inputs, # pylint: disable=redefined-outer-name + inputs, # pylint: disable=redefined-outer-name model_storage, resource_storage, plugin_manager, http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/cli/utils.py ---------------------------------------------------------------------- diff --git a/aria/cli/utils.py b/aria/cli/utils.py index 697ff37..1b5d666 100644 --- a/aria/cli/utils.py +++ b/aria/cli/utils.py @@ -58,7 +58,7 @@ def check_overriding_storage_exceptions(e, model_class, name): 'There already a exists a {model_class} with the same name' \ .format(model_class=model_class, name=name, linesep=os.linesep) trace = sys.exc_info()[2] - raise type(e), type(e)(new_message), trace # pylint: disable=raising-non-exception + raise type(e), type(e)(new_message), trace # pylint: disable=raising-non-exception def download_file(url): @@ -107,7 +107,7 @@ def generate_progress_handler(file_path, action='', max_bar_length=80): filled_length = min(bar_length, int(round(bar_length * read_bytes / float(total_bytes)))) percents = min(100.00, round(100.00 * (read_bytes / float(total_bytes)), 2)) - bar = '#' * filled_length + '-' * (bar_length - filled_length) # pylint: disable=blacklisted-name + bar = '#' * filled_length + '-' * (bar_length - filled_length) # pylint: disable=blacklisted-name # The \r caret makes sure the cursor moves back to the beginning of the line sys.stdout.write('\r{0} {1} |{2}| {3}%'.format(action, file_name, bar, percents)) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/functions.py ---------------------------------------------------------------------- diff --git a/aria/modeling/functions.py b/aria/modeling/functions.py index 554bbfb..f3f0f22 100644 --- a/aria/modeling/functions.py +++ b/aria/modeling/functions.py @@ -66,7 +66,7 @@ class Evaluation(object): self.final = final -def evaluate(value, container_holder, report_issues=False): # pylint: disable=too-many-branches +def evaluate(value, container_holder, report_issues=False): # pylint: disable=too-many-branches """ Recursively attempts to call ``__evaluate__``. If an evaluation occurred will return an :class:`Evaluation`, otherwise it will be ``None``. If any evaluation is non-final, then the http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/mixins.py ---------------------------------------------------------------------- diff --git a/aria/modeling/mixins.py b/aria/modeling/mixins.py index d58c25a..eb1ac83 100644 --- a/aria/modeling/mixins.py +++ b/aria/modeling/mixins.py @@ -201,7 +201,7 @@ class ParameterMixin(TemplateModelMixin, caching.HasCachedMethods): @property @caching.cachedmethod - def container(self): # pylint: disable=too-many-return-statements,too-many-branches + def container(self): # pylint: disable=too-many-return-statements,too-many-branches """ The logical container for this parameter, which would be another model: service, node, group, or policy (or their templates). @@ -319,7 +319,7 @@ class ParameterMixin(TemplateModelMixin, caching.HasCachedMethods): type_name = canonical_type_name(value) if type_name is None: type_name = full_type_name(value) - return cls(name=name, # pylint: disable=unexpected-keyword-arg + return cls(name=name, # pylint: disable=unexpected-keyword-arg type_name=type_name, value=value, description=description) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/orchestration.py ---------------------------------------------------------------------- diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py index 4d4f0fe..da91295 100644 --- a/aria/modeling/orchestration.py +++ b/aria/modeling/orchestration.py @@ -436,7 +436,7 @@ class TaskBase(mixins.ModelMixin): return self.node or self.relationship @orm.validates('max_attempts') - def validate_max_attempts(self, _, value): # pylint: disable=no-self-use + def validate_max_attempts(self, _, value): # pylint: disable=no-self-use """ Validates that max attempts is either -1 or a positive number. """ http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/service_common.py ---------------------------------------------------------------------- diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py index d1f6b00..6ca80ee 100644 --- a/aria/modeling/service_common.py +++ b/aria/modeling/service_common.py @@ -22,7 +22,8 @@ ARIA modeling service common module from sqlalchemy import ( Column, Text, - Boolean + Boolean, + PickleType ) from sqlalchemy.ext.declarative import declared_attr @@ -90,7 +91,7 @@ class InputBase(ParameterMixin): """) @classmethod - def wrap(cls, name, value, description=None, required=True): # pylint: disable=arguments-differ + def wrap(cls, name, value, description=None, required=True): # pylint: disable=arguments-differ input = super(InputBase, cls).wrap(name, value, description) input.required = required return input @@ -587,12 +588,11 @@ class MetadataBase(TemplateModelMixin): :ivar name: name :vartype name: basestring :ivar value: value - :vartype value: basestring """ __tablename__ = 'metadata' - value = Column(Text) + value = Column(PickleType) @property def as_raw(self): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/service_instance.py ---------------------------------------------------------------------- diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py index 01c4da9..b0e426c 100644 --- a/aria/modeling/service_instance.py +++ b/aria/modeling/service_instance.py @@ -319,7 +319,7 @@ class NodeBase(InstanceModelMixin): # region one_to_one relationships @declared_attr - def host(cls): # pylint: disable=method-hidden + def host(cls): # pylint: disable=method-hidden """ Node in which we are hosted (can be ``None``). http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/service_template.py ---------------------------------------------------------------------- diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py index cd0adb4..0933407 100644 --- a/aria/modeling/service_template.py +++ b/aria/modeling/service_template.py @@ -1415,7 +1415,7 @@ class InterfaceTemplateBase(TemplateModelMixin): ('name', self.name), ('description', self.description), ('type_name', self.type.name), - ('inputs', formatting.as_raw_dict(self.inputs)), # pylint: disable=no-member + ('inputs', formatting.as_raw_dict(self.inputs)), # pylint: disable=no-member # TODO fix self.properties reference ('operation_templates', formatting.as_raw_list(self.operation_templates)))) @@ -1714,7 +1714,7 @@ class PluginSpecificationBase(TemplateModelMixin): return relationship.many_to_one(cls, 'service_template') @declared_attr - def plugin(cls): # pylint: disable=method-hidden + def plugin(cls): # pylint: disable=method-hidden """ Matched plugin. http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/modeling/utils.py ---------------------------------------------------------------------- diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py index 6e851f2..1b6b375 100644 --- a/aria/modeling/utils.py +++ b/aria/modeling/utils.py @@ -35,7 +35,7 @@ class ModelJSONEncoder(JSONEncoder): # Just here to make sure Sphinx doesn't grab the base constructor's docstring super(ModelJSONEncoder, self).__init__(*args, **kwargs) - def default(self, o): # pylint: disable=method-hidden + def default(self, o): # pylint: disable=method-hidden from .mixins import ModelMixin if isinstance(o, ModelMixin): if hasattr(o, 'value'): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/context/common.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py index 3c5f618..90205fd 100644 --- a/aria/orchestrator/context/common.py +++ b/aria/orchestrator/context/common.py @@ -108,9 +108,9 @@ class BaseContext(object): execution_id=self._execution_id) def __repr__(self): - return ( - '{name}(name={self.name}, ' - 'deployment_id={self._service_id}, ' + return ( # pylint: disable=redundant-keyword-arg + u'{name}(name={self.name}, ' + u'deployment_id={self._service_id}, ' .format(name=self.__class__.__name__, self=self)) @contextmanager http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/context/operation.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py index 8613ec3..7f6612e 100644 --- a/aria/orchestrator/context/operation.py +++ b/aria/orchestrator/context/operation.py @@ -40,10 +40,10 @@ class BaseOperationContext(common.BaseContext): self._register_logger(task_id=self.task.id, level=logger_level) def __repr__(self): - details = 'function={task.function}; ' \ - 'operation_arguments={task.arguments}'\ + details = u'function={task.function}; ' \ + u'operation_arguments={task.arguments}'\ .format(task=self.task) - return '{name}({0})'.format(details, name=self.name) + return u'{name}({0})'.format(details, name=self.name) @property def task(self): @@ -65,9 +65,9 @@ class BaseOperationContext(common.BaseContext): """ if self.task.plugin is None: return None - plugin_workdir = '{0}/plugins/{1}/{2}'.format(self._workdir, - self.service.id, - self.task.plugin.name) + plugin_workdir = u'{0}/plugins/{1}/{2}'.format(self._workdir, + self.service.id, + self.task.plugin.name) file.makedirs(plugin_workdir) return plugin_workdir http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/context/workflow.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py index 738d2fd..5a323a6 100644 --- a/aria/orchestrator/context/workflow.py +++ b/aria/orchestrator/context/workflow.py @@ -73,7 +73,7 @@ class WorkflowContext(BaseContext): """ Iterates over nodes templates. """ - key = 'service_{0}'.format(self.model.node_template.model_cls.name_column_name()) + key = u'service_{0}'.format(self.model.node_template.model_cls.name_column_name()) return self.model.node_template.iter( filters={ @@ -86,7 +86,7 @@ class WorkflowContext(BaseContext): """ Iterates over nodes. """ - key = 'service_{0}'.format(self.model.node.model_cls.name_column_name()) + key = u'service_{0}'.format(self.model.node.model_cls.name_column_name()) return self.model.node.iter( filters={ key: getattr(self.service, self.service.name_column_name()) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/decorators.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/decorators.py b/aria/orchestrator/decorators.py index 4b163d6..4de0397 100644 --- a/aria/orchestrator/decorators.py +++ b/aria/orchestrator/decorators.py @@ -80,6 +80,6 @@ def operation(func=None, toolbelt=False, suffix_template='', logging_handlers=No def _generate_name(func_name, ctx, suffix_template, **custom_kwargs): - return '{func_name}.{suffix}'.format( + return u'{func_name}.{suffix}'.format( func_name=func_name, suffix=suffix_template.format(ctx=ctx, **custom_kwargs) or generate_uuid(variant='uuid')) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/common.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/common.py b/aria/orchestrator/execution_plugin/common.py index ce6746c..1c279d3 100644 --- a/aria/orchestrator/execution_plugin/common.py +++ b/aria/orchestrator/execution_plugin/common.py @@ -35,13 +35,13 @@ def download_script(ctx, script_path): split = script_path.split('://') schema = split[0] suffix = script_path.split('/')[-1] - file_descriptor, dest_script_path = tempfile.mkstemp(suffix='-{0}'.format(suffix)) + file_descriptor, dest_script_path = tempfile.mkstemp(suffix=u'-{0}'.format(suffix)) os.close(file_descriptor) try: if schema in ('http', 'https'): response = requests.get(script_path) if response.status_code == 404: - ctx.task.abort('Failed to download script: {0} (status code: {1})' + ctx.task.abort(u'Failed to download script: {0} (status code: {1})' .format(script_path, response.status_code)) content = response.text with open(dest_script_path, 'wb') as f: @@ -84,7 +84,7 @@ def create_process_config(script_path, process, operation_kwargs, quote_json_env if isinstance(v, (dict, list, tuple, bool, int, float)): v = json.dumps(v) if quote_json_env_vars: - v = "'{0}'".format(v) + v = u"'{0}'".format(v) if is_windows(): # These <k,v> environment variables will subsequently # be used in a subprocess.Popen() call, as the `env` parameter. @@ -102,9 +102,9 @@ def create_process_config(script_path, process, operation_kwargs, quote_json_env command = script_path command_prefix = process.get('command_prefix') if command_prefix: - command = '{0} {1}'.format(command_prefix, command) + command = u'{0} {1}'.format(command_prefix, command) if args: - command = ' '.join([command] + [str(a) for a in args]) + command = u' '.join([command] + [str(a) for a in args]) process['command'] = command return process @@ -150,5 +150,5 @@ def check_error(ctx, error_check_func=None, reraise=False): error_check_func() # if this function is called from within an ``except`` clause, a re-raise maybe required if reraise: - raise # pylint: disable=misplaced-bare-raise + raise # pylint: disable=misplaced-bare-raise return _error http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/ctx_proxy/client.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/client.py b/aria/orchestrator/execution_plugin/ctx_proxy/client.py index 84d66f1..a569c78 100644 --- a/aria/orchestrator/execution_plugin/ctx_proxy/client.py +++ b/aria/orchestrator/execution_plugin/ctx_proxy/client.py @@ -32,7 +32,7 @@ CTX_SOCKET_URL = 'CTX_SOCKET_URL' class _RequestError(RuntimeError): def __init__(self, ex_message, ex_type, ex_traceback): - super(_RequestError, self).__init__(self, '{0}: {1}'.format(ex_type, ex_message)) + super(_RequestError, self).__init__(self, u'{0}: {1}'.format(ex_type, ex_message)) self.ex_type = ex_type self.ex_message = ex_message self.ex_traceback = ex_traceback @@ -45,7 +45,7 @@ def _http_request(socket_url, request, method, timeout): response = opener.open(request, timeout=timeout) if response.code != 200: - raise RuntimeError('Request failed: {0}'.format(response)) + raise RuntimeError(u'Request failed: {0}'.format(response)) return json.loads(response.read()) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/ctx_proxy/server.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py index 91b95d9..d8aa8fb 100644 --- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py +++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py @@ -37,7 +37,7 @@ class CtxProxy(object): self.ctx = ctx self._ctx_patcher = ctx_patcher self.port = _get_unused_port() - self.socket_url = 'http://localhost:{0}'.format(self.port) + self.socket_url = 'http://localhost:{0:d}'.format(self.port) self.server = None self._started = Queue.Queue(1) self.thread = self._start_server() @@ -73,7 +73,7 @@ class CtxProxy(object): def address_string(self): return self.client_address[0] - def log_request(*args, **kwargs): # pylint: disable=no-method-argument + def log_request(*args, **kwargs): # pylint: disable=no-method-argument if not self.quiet: return wsgiref.simple_server.WSGIRequestHandler.log_request(*args, **kwargs) @@ -110,7 +110,7 @@ class CtxProxy(object): self.server.server_close() def _request_handler(self): - request = bottle.request.body.read() # pylint: disable=no-member + request = bottle.request.body.read() # pylint: disable=no-member response = self._process(request) return bottle.LocalResponse( body=json.dumps(response, cls=modeling.utils.ModelJSONEncoder), @@ -195,7 +195,7 @@ def _process_arguments(obj, args): # Modify object attribute setattr(obj, modifying_key, modifying_value) else: - raise CtxError('Cannot modify `{0}` of `{1!r}`'.format(modifying_key, obj)) + raise CtxError(u'Cannot modify `{0}` of `{1!r}`'.format(modifying_key, obj)) return obj @@ -233,7 +233,7 @@ def _process_next_operation(obj, args, modifying): obj[arg] = {} return obj[arg], args - raise CtxParsingError('Cannot parse argument: `{0!r}`'.format(arg)) + raise CtxParsingError(u'Cannot parse argument: `{0!r}`'.format(arg)) def _get_unused_port(): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/instantiation.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py index 8b52015..d859043 100644 --- a/aria/orchestrator/execution_plugin/instantiation.py +++ b/aria/orchestrator/execution_plugin/instantiation.py @@ -64,8 +64,8 @@ def _configure_local(operation): """ from . import operations - operation.function = '{0}.{1}'.format(operations.__name__, - operations.run_script_locally.__name__) + operation.function = u'{0}.{1}'.format(operations.__name__, + operations.run_script_locally.__name__) def _configure_remote(operation, reporter): @@ -105,7 +105,7 @@ def _configure_remote(operation, reporter): # Make sure we have a user if fabric_env.get('user') is None: - reporter.report('must configure "ssh.user" for "{0}"'.format(operation.implementation), + reporter.report(u'must configure "ssh.user" for "{0}"'.format(operation.implementation), level=reporter.Issue.BETWEEN_TYPES) # Make sure we have an authentication value @@ -120,8 +120,8 @@ def _configure_remote(operation, reporter): operation.arguments['fabric_env'] = Argument.wrap('fabric_env', fabric_env, 'Fabric configuration.') - operation.function = '{0}.{1}'.format(operations.__name__, - operations.run_script_with_ssh.__name__) + operation.function = u'{0}.{1}'.format(operations.__name__, + operations.run_script_with_ssh.__name__) def _get_process(operation, reporter): @@ -144,7 +144,7 @@ def _get_process(operation, reporter): elif k == 'env': _validate_type(v, dict, 'process.env', reporter) else: - reporter.report('unsupported configuration parameter: "process.{0}"'.format(k), + reporter.report(u'unsupported configuration parameter: "process.{0}"'.format(k), level=reporter.Issue.BETWEEN_TYPES) return value @@ -175,7 +175,7 @@ def _get_ssh(operation, reporter): elif k == 'address': _validate_type(v, basestring, 'ssh.address', reporter) else: - reporter.report('unsupported configuration parameter: "ssh.{0}"'.format(k), + reporter.report(u'unsupported configuration parameter: "ssh.{0}"'.format(k), level=reporter.Issue.BETWEEN_TYPES) return value @@ -185,7 +185,7 @@ def _validate_type(value, the_type, name, reporter): return if not isinstance(value, the_type): reporter.report( - '"{0}" configuration is not a {1}: {2}'.format( + u'"{0}" configuration is not a {1}: {2}'.format( name, utils.type.full_type_name(the_type), utils.formatting.safe_repr(value)), level=reporter.Issue.BETWEEN_TYPES) @@ -202,7 +202,7 @@ def _coerce_bool(value, name, reporter): return False else: reporter.report( - '"{0}" configuration is not "true" or "false": {1}'.format( + u'"{0}" configuration is not "true" or "false": {1}'.format( name, utils.formatting.safe_repr(value)), level=reporter.Issue.BETWEEN_TYPES) @@ -212,6 +212,6 @@ def _dict_to_list_of_strings(the_dict, name, reporter): value = [] for k in sorted(the_dict): v = the_dict[k] - _validate_type(v, basestring, '{0}.{1}'.format(name, k), reporter) + _validate_type(v, basestring, u'{0}.{1}'.format(name, k), reporter) value.append(v) return value http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/local.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/local.py b/aria/orchestrator/execution_plugin/local.py index 04b9ecd..abb5b52 100644 --- a/aria/orchestrator/execution_plugin/local.py +++ b/aria/orchestrator/execution_plugin/local.py @@ -78,7 +78,7 @@ def _execute_func(script_path, ctx, process, operation_kwargs): command = process['command'] env = os.environ.copy() env.update(process['env']) - ctx.logger.info('Executing: {0}'.format(command)) + ctx.logger.info(u'Executing: {0}'.format(command)) with ctx_proxy.server.CtxProxy(ctx, common.patch_ctx) as proxy: env[ctx_proxy.client.CTX_SOCKET_URL] = proxy.socket_url running_process = subprocess.Popen( @@ -95,7 +95,7 @@ def _execute_func(script_path, ctx, process, operation_kwargs): exit_code = running_process.wait() stdout_consumer.join() stderr_consumer.join() - ctx.logger.info('Execution done (exit_code={0}): {1}'.format(exit_code, command)) + ctx.logger.info(u'Execution done (exit_code={0}): {1}'.format(exit_code, command)) def error_check_func(): if exit_code: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/ssh/operations.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/ssh/operations.py b/aria/orchestrator/execution_plugin/ssh/operations.py index c40e783..759f1d2 100644 --- a/aria/orchestrator/execution_plugin/ssh/operations.py +++ b/aria/orchestrator/execution_plugin/ssh/operations.py @@ -48,7 +48,7 @@ def run_commands(ctx, commands, fabric_env, use_sudo, hide_output, **_): with fabric.api.settings(_hide_output(ctx, groups=hide_output), **_fabric_env(ctx, fabric_env, warn_only=True)): for command in commands: - ctx.logger.info('Running command: {0}'.format(command)) + ctx.logger.info(u'Running command: {0}'.format(command)) run = fabric.api.sudo if use_sudo else fabric.api.run result = run(command) if result.failed: @@ -70,8 +70,8 @@ def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **k # there may be race conditions with other operations that # may be running in parallel, so we pass -p to make sure # we get 0 exit code if the directory already exists - fabric.api.run('mkdir -p {0} && mkdir -p {1}'.format(paths.remote_scripts_dir, - paths.remote_work_dir)) + fabric.api.run(u'mkdir -p {0} && mkdir -p {1}'.format(paths.remote_scripts_dir, + paths.remote_work_dir)) # this file has to be present before using ctx fabric.api.put(_PROXY_CLIENT_PATH, paths.remote_ctx_path) process = common.create_process_config( @@ -82,7 +82,7 @@ def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **k fabric.api.put(paths.local_script_path, paths.remote_script_path) with ctx_proxy.server.CtxProxy(ctx, _patch_ctx) as proxy: local_port = proxy.port - with fabric.context_managers.cd(process.get('cwd', paths.remote_work_dir)): # pylint: disable=not-context-manager + with fabric.context_managers.cd(process.get('cwd', paths.remote_work_dir)): # pylint: disable=not-context-manager with tunnel.remote(ctx, local_port=local_port) as remote_port: local_socket_url = proxy.socket_url remote_socket_url = local_socket_url.replace(str(local_port), str(remote_port)) @@ -93,8 +93,8 @@ def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **k remote_socket_url=remote_socket_url) fabric.api.put(env_script, paths.remote_env_script_path) try: - command = 'source {0} && {1}'.format(paths.remote_env_script_path, - process['command']) + command = u'source {0} && {1}'.format(paths.remote_env_script_path, + process['command']) run = fabric.api.sudo if use_sudo else fabric.api.run run(command) except exceptions.TaskException: @@ -136,8 +136,8 @@ def _hide_output(ctx, groups): """ Hides Fabric's output for every 'entity' in `groups` """ groups = set(groups or []) if not groups.issubset(constants.VALID_FABRIC_GROUPS): - ctx.task.abort('`hide_output` must be a subset of {0} (Provided: {1})' - .format(', '.join(constants.VALID_FABRIC_GROUPS), ', '.join(groups))) + ctx.task.abort(u'`hide_output` must be a subset of {0} (Provided: {1})' + .format(u', '.join(constants.VALID_FABRIC_GROUPS), u', '.join(groups))) return fabric.api.hide(*groups) @@ -165,16 +165,16 @@ def _fabric_env(ctx, fabric_env, warn_only): def _write_environment_script_file(process, paths, local_socket_url, remote_socket_url): env_script = StringIO.StringIO() env = process['env'] - env['PATH'] = '{0}:$PATH'.format(paths.remote_ctx_dir) - env['PYTHONPATH'] = '{0}:$PYTHONPATH'.format(paths.remote_ctx_dir) - env_script.write('chmod +x {0}\n'.format(paths.remote_script_path)) - env_script.write('chmod +x {0}\n'.format(paths.remote_ctx_path)) + env['PATH'] = u'{0}:$PATH'.format(paths.remote_ctx_dir) + env['PYTHONPATH'] = u'{0}:$PYTHONPATH'.format(paths.remote_ctx_dir) + env_script.write(u'chmod +x {0}\n'.format(paths.remote_script_path)) + env_script.write(u'chmod +x {0}\n'.format(paths.remote_ctx_path)) env.update({ ctx_proxy.client.CTX_SOCKET_URL: remote_socket_url, - 'LOCAL_{0}'.format(ctx_proxy.client.CTX_SOCKET_URL): local_socket_url + u'LOCAL_{0}'.format(ctx_proxy.client.CTX_SOCKET_URL): local_socket_url }) for key, value in env.iteritems(): - env_script.write('export {0}={1}\n'.format(key, value)) + env_script.write(u'export {0}={1}\n'.format(key, value)) return env_script @@ -184,12 +184,12 @@ class _Paths(object): self.local_script_path = local_script_path self.remote_ctx_dir = base_dir self.base_script_path = os.path.basename(self.local_script_path) - self.remote_ctx_path = '{0}/ctx'.format(self.remote_ctx_dir) - self.remote_scripts_dir = '{0}/scripts'.format(self.remote_ctx_dir) - self.remote_work_dir = '{0}/work'.format(self.remote_ctx_dir) - random_suffix = ''.join(random.choice(string.ascii_lowercase + string.digits) - for _ in range(8)) - remote_path_suffix = '{0}-{1}'.format(self.base_script_path, random_suffix) - self.remote_env_script_path = '{0}/env-{1}'.format(self.remote_scripts_dir, - remote_path_suffix) - self.remote_script_path = '{0}/{1}'.format(self.remote_scripts_dir, remote_path_suffix) + self.remote_ctx_path = u'{0}/ctx'.format(self.remote_ctx_dir) + self.remote_scripts_dir = u'{0}/scripts'.format(self.remote_ctx_dir) + self.remote_work_dir = u'{0}/work'.format(self.remote_ctx_dir) + random_suffix = u''.join(random.choice(string.ascii_lowercase + string.digits) + for _ in range(8)) + remote_path_suffix = u'{0}-{1}'.format(self.base_script_path, random_suffix) + self.remote_env_script_path = u'{0}/env-{1}'.format(self.remote_scripts_dir, + remote_path_suffix) + self.remote_script_path = u'{0}/{1}'.format(self.remote_scripts_dir, remote_path_suffix) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/execution_plugin/ssh/tunnel.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/execution_plugin/ssh/tunnel.py b/aria/orchestrator/execution_plugin/ssh/tunnel.py index e76d525..05ea4ed 100644 --- a/aria/orchestrator/execution_plugin/ssh/tunnel.py +++ b/aria/orchestrator/execution_plugin/ssh/tunnel.py @@ -64,10 +64,10 @@ def remote(ctx, local_port, remote_port=0, local_host='localhost', remote_bind_a try: channel.close() except Exception as ex2: - close_error = ' (While trying to close channel: {0})'.format(ex2) + close_error = u' (While trying to close channel: {0})'.format(ex2) else: close_error = '' - ctx.task.abort('[{0}] rtunnel: cannot connect to {1}:{2} ({3}){4}' + ctx.task.abort(u'[{0}] rtunnel: cannot connect to {1}:{2} ({3}){4}' .format(fabric.api.env.host_string, local_host, local_port, e, close_error)) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/plugin.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/plugin.py b/aria/orchestrator/plugin.py index 756a28e..4f29e4f 100644 --- a/aria/orchestrator/plugin.py +++ b/aria/orchestrator/plugin.py @@ -67,8 +67,8 @@ class PluginManager(object): if len(self._model.plugin.list(filters={'package_name': plugin.package_name, 'package_version': plugin.package_version})): raise exceptions.PluginAlreadyExistsError( - 'Plugin {0}, version {1} already exists'.format(plugin.package_name, - plugin.package_version)) + u'Plugin {0}, version {1} already exists'.format(plugin.package_name, + plugin.package_version)) self._install_wagon(source=source, prefix=self.get_plugin_dir(plugin)) self._model.plugin.put(plugin) return plugin @@ -120,8 +120,8 @@ class PluginManager(object): """ if not zipfile.is_zipfile(source): raise exceptions.InvalidPluginError( - 'Archive {0} is of an unsupported type. Only ' - 'zip/wgn is allowed'.format(source)) + u'Archive {0} is of an unsupported type. Only ' + u'zip/wgn is allowed'.format(source)) with zipfile.ZipFile(source, 'r') as zip_file: infos = zip_file.infolist() try: @@ -130,8 +130,8 @@ class PluginManager(object): zip_file.getinfo(package_json_path) except (KeyError, ValueError, IndexError): raise exceptions.InvalidPluginError( - 'Failed to validate plugin {0} ' - '(package.json was not found in archive)'.format(source)) + u'Failed to validate plugin {0} ' + u'(package.json was not found in archive)'.format(source)) def _install_wagon(self, source, prefix): pip_freeze_output = self._pip_freeze() http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/topology/instance_handler.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/topology/instance_handler.py b/aria/orchestrator/topology/instance_handler.py index 51f26c6..fad00b9 100644 --- a/aria/orchestrator/topology/instance_handler.py +++ b/aria/orchestrator/topology/instance_handler.py @@ -34,18 +34,18 @@ class Artifact(common.InstanceHandlerBase): out_stream.write(out_stream.node_style(self._model.name)) out_stream.write(out_stream.meta_style(self._model.description)) with out_stream.indent(): - out_stream.write('Artifact type: {0}'.format(out_stream.type_style( + out_stream.write(u'Artifact type: {0}'.format(out_stream.type_style( self._model.type.name))) - out_stream.write('Source path: {0}'.format( + out_stream.write(u'Source path: {0}'.format( out_stream.literal_style(self._model.source_path))) if self._model.target_path is not None: - out_stream.write('Target path: {0}'.format( + out_stream.write(u'Target path: {0}'.format( out_stream.literal_style(self._model.target_path))) if self._model.repository_url is not None: - out_stream.write('Repository URL: {0}'.format( + out_stream.write(u'Repository URL: {0}'.format( out_stream.literal_style(self._model.repository_url))) if self._model.repository_credential: - out_stream.write('Repository credential: {0}'.format( + out_stream.write(u'Repository credential: {0}'.format( out_stream.literal_style(self._model.repository_credential))) self._topology.dump(self._model.properties, out_stream, title='Properties') @@ -60,11 +60,11 @@ class Capability(common.InstanceHandlerBase): def dump(self, out_stream): out_stream.write(out_stream.node_style(self._model.name)) with out_stream.indent(): - out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name))) - out_stream.write('Occurrences: {0:d} ({1:d}{2})'.format( + out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name))) + out_stream.write(u'Occurrences: {0:d} ({1:d}{2})'.format( self._model.occurrences, self._model.min_occurrences or 0, - ' to {0:d}'.format(self._model.max_occurrences) + u' to {0:d}'.format(self._model.max_occurrences) if self._model.max_occurrences is not None else ' or more')) self._topology.dump(self._model.properties, out_stream, title='Properties') @@ -81,9 +81,9 @@ class Group(common.ActorHandlerBase): **kwargs) def dump(self, out_stream): - out_stream.write('Group: {0}'.format(out_stream.node_style(self._model.name))) + out_stream.write(u'Group: {0}'.format(out_stream.node_style(self._model.name))) with out_stream.indent(): - out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name))) + out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name))) self._topology.dump(self._model.properties, out_stream, title='Properties') self._topology.dump(self._model.interfaces, out_stream, title='Interfaces') if self._model.nodes: @@ -111,7 +111,7 @@ class Interface(common.ActorHandlerBase): if self._model.description: out_stream.write(out_stream.meta_style(self._model.description)) with out_stream.indent(): - out_stream.write('Interface type: {0}'.format( + out_stream.write(u'Interface type: {0}'.format( out_stream.type_style(self._model.type.name))) self._topology.dump(self._model.inputs, out_stream, title='Inputs') self._topology.dump(self._model.operations, out_stream, title='Operations') @@ -134,7 +134,7 @@ class Node(common.ActorHandlerBase): def validate(self, **kwargs): if len(self._model.name) > context.ID_MAX_LENGTH: self._topology.report( - '"{0}" has an ID longer than the limit of {1:d} characters: {2:d}'.format( + u'"{0}" has an ID longer than the limit of {1:d} characters: {2:d}'.format( self._model.name, context.ID_MAX_LENGTH, len(self._model.name)), level=self._topology.Issue.BETWEEN_INSTANCES) @@ -146,10 +146,10 @@ class Node(common.ActorHandlerBase): self._model.outbound_relationships) def dump(self, out_stream): - out_stream.write('Node: {0}'.format(out_stream.node_style(self._model.name))) + out_stream.write(u'Node: {0}'.format(out_stream.node_style(self._model.name))) with out_stream.indent(): - out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name))) - out_stream.write('Template: {0}'.format( + out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name))) + out_stream.write(u'Template: {0}'.format( out_stream.node_style(self._model.node_template.name))) self._topology.dump(self._model.properties, out_stream, title='Properties') self._topology.dump(self._model.attributes, out_stream, title='Attributes') @@ -170,11 +170,11 @@ class Node(common.ActorHandlerBase): for capability in self._model.capabilities.itervalues(): if not capability.has_enough_relationships: self._topology.report( - 'capability "{0}" of node "{1}" requires at least {2:d} ' - 'relationships but has {3:d}'.format(capability.name, - self._model.name, - capability.min_occurrences, - capability.occurrences), + u'capability "{0}" of node "{1}" requires at least {2:d} ' + u'relationships but has {3:d}'.format(capability.name, + self._model.name, + capability.min_occurrences, + capability.occurrences), level=self._topology.Issue.BETWEEN_INSTANCES) satisfied = False return satisfied @@ -197,8 +197,8 @@ class Node(common.ActorHandlerBase): satisfied = self._satisfy_capability( target_node_capability, target_node_template, requirement_template) else: - self._topology.report('requirement "{0}" of node "{1}" has no target node template'. - format(requirement_template.name, self._model.name), + self._topology.report(u'requirement "{0}" of node "{1}" has no target node template' + .format(requirement_template.name, self._model.name), level=self._topology.Issue.BETWEEN_INSTANCES) satisfied = False return satisfied @@ -237,16 +237,16 @@ class Node(common.ActorHandlerBase): return True else: self._topology.report( - 'requirement "{0}" of node "{1}" targets node ' - 'template "{2}" but its instantiated nodes do not ' - 'have enough capacity'.format( + u'requirement "{0}" of node "{1}" targets node ' + u'template "{2}" but its instantiated nodes do not ' + u'have enough capacity'.format( requirement_template.name, self._model.name, target_node_template.name), level=self._topology.Issue.BETWEEN_INSTANCES) return False else: self._topology.report( - 'requirement "{0}" of node "{1}" targets node template ' - '"{2}" but it has no instantiated nodes'.format( + u'requirement "{0}" of node "{1}" targets node template ' + u'"{2}" but it has no instantiated nodes'.format( requirement_template.name, self._model.name, target_node_template.name), level=self._topology.Issue.BETWEEN_INSTANCES) return False @@ -258,8 +258,8 @@ class Node(common.ActorHandlerBase): if not self._model.node_template.is_target_node_template_valid( requirement_template.target_node_template): self._topology.report( - 'requirement "{0}" of node template "{1}" is for node ' - 'template "{2}" but it does not match constraints'.format( + u'requirement "{0}" of node template "{1}" is for node ' + u'template "{2}" but it does not match constraints'.format( requirement_template.name, requirement_template.target_node_template.name, self._model.node_template.name), @@ -359,28 +359,28 @@ class Operation(common.ActorHandlerBase): out_stream.write(out_stream.meta_style(self._model.description)) with out_stream.indent(): if self._model.implementation is not None: - out_stream.write('Implementation: {0}'.format( + out_stream.write(u'Implementation: {0}'.format( out_stream.literal_style(self._model.implementation))) if self._model.dependencies: out_stream.write( - 'Dependencies: {0}'.format(', '.join((str(out_stream.literal_style(v)) - for v in self._model.dependencies)))) + u'Dependencies: {0}'.format(u', '.join((str(out_stream.literal_style(v)) + for v in self._model.dependencies)))) self._topology.dump(self._model.inputs, out_stream, title='Inputs') if self._model.executor is not None: - out_stream.write('Executor: {0}'.format(out_stream.literal_style( + out_stream.write(u'Executor: {0}'.format(out_stream.literal_style( self._model.executor))) if self._model.max_attempts is not None: - out_stream.write('Max attempts: {0}'.format(out_stream.literal_style( + out_stream.write(u'Max attempts: {0}'.format(out_stream.literal_style( self._model.max_attempts))) if self._model.retry_interval is not None: - out_stream.write('Retry interval: {0}'.format( + out_stream.write(u'Retry interval: {0}'.format( out_stream.literal_style(self._model.retry_interval))) if self._model.plugin is not None: - out_stream.write('Plugin: {0}'.format( + out_stream.write(u'Plugin: {0}'.format( out_stream.literal_style(self._model.plugin.name))) self._topology.dump(self._model.configurations, out_stream, title='Configuration') if self._model.function is not None: - out_stream.write('Function: {0}'.format(out_stream.literal_style( + out_stream.write(u'Function: {0}'.format(out_stream.literal_style( self._model.function))) self._topology.dump(self._model.arguments, out_stream, title='Arguments') @@ -418,7 +418,7 @@ class Operation(common.ActorHandlerBase): self._model.arguments.keys()) if used_reserved_names: self._topology.report( - 'using reserved arguments in operation "{0}": {1}'.format( + u'using reserved arguments in operation "{0}": {1}'.format( self._model.name, formatting.string_list_as_string(used_reserved_names)), level=self._topology.Issue.EXTERNAL) @@ -431,9 +431,9 @@ class Policy(common.InstanceHandlerBase): self._topology.validate(self._model.properties, **kwargs) def dump(self, out_stream): - out_stream.write('Policy: {0}'.format(out_stream.node_style(self._model.name))) + out_stream.write(u'Policy: {0}'.format(out_stream.node_style(self._model.name))) with out_stream.indent(): - out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name))) + out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name))) self._topology.dump(self._model.properties, out_stream, title='Properties') if self._model.nodes: out_stream.write('Target nodes:') @@ -460,21 +460,21 @@ class Relationship(common.ActorHandlerBase): def dump(self, out_stream): if self._model.name: - out_stream.write('{0} ->'.format(out_stream.node_style(self._model.name))) + out_stream.write(u'{0} ->'.format(out_stream.node_style(self._model.name))) else: out_stream.write('->') with out_stream.indent(): - out_stream.write('Node: {0}'.format(out_stream.node_style( + out_stream.write(u'Node: {0}'.format(out_stream.node_style( self._model.target_node.name))) if self._model.target_capability: - out_stream.write('Capability: {0}'.format(out_stream.node_style( + out_stream.write(u'Capability: {0}'.format(out_stream.node_style( self._model.target_capability.name))) if self._model.type is not None: - out_stream.write('Relationship type: {0}'.format( + out_stream.write(u'Relationship type: {0}'.format( out_stream.type_style(self._model.type.name))) if (self._model.relationship_template is not None and self._model.relationship_template.name): - out_stream.write('Relationship template: {0}'.format( + out_stream.write(u'Relationship template: {0}'.format( out_stream.node_style(self._model.relationship_template.name))) self._topology.dump(self._model.properties, out_stream, title='Properties') self._topology.dump(self._model.interfaces, out_stream, title='Interfaces') @@ -549,7 +549,7 @@ class Substitution(common.InstanceHandlerBase): def dump(self, out_stream): out_stream.write('Substitution:') with out_stream.indent(): - out_stream.write('Node type: {0}'.format(out_stream.type_style( + out_stream.write(u'Node type: {0}'.format(out_stream.type_style( self._model.node_type.name))) self._topology.dump(self._model.mappings, out_stream, title='Mappings') @@ -562,19 +562,19 @@ class SubstitutionMapping(common.InstanceHandlerBase): def validate(self, **_): if (self._model.capability is None) and (self._model.requirement_template is None): self._topology.report( - 'mapping "{0}" refers to neither capability nor a requirement' - ' in node: {1}'.format( + u'mapping "{0}" refers to neither capability nor a requirement' + u' in node: {1}'.format( self._model.name, formatting.safe_repr(self._model.node_style.name)), level=self._topology.Issue.BETWEEN_TYPES) def dump(self, out_stream): if self._model.capability is not None: - out_stream.write('{0} -> {1}.{2}'.format( + out_stream.write(u'{0} -> {1}.{2}'.format( out_stream.node_style(self._model.name), out_stream.node_style(self._model.capability.node.name), out_stream.node_style(self._model.capability.name))) else: - out_stream.write('{0} -> {1}.{2}'.format( + out_stream.write(u'{0} -> {1}.{2}'.format( out_stream.node_style(self._model.name), out_stream.node_style(self._model.node.name), out_stream.node_style(self._model.requirement_template.name))) @@ -583,7 +583,7 @@ class SubstitutionMapping(common.InstanceHandlerBase): class Metadata(common.InstanceHandlerBase): def dump(self, out_stream): - out_stream.write('{0}: {1}'.format( + out_stream.write(u'{0}: {1}'.format( out_stream.property_style(self._model.name), out_stream.literal_style(self._model.value))) @@ -601,12 +601,12 @@ class _Parameter(common.InstanceHandlerBase): def dump(self, out_stream): if self._model.type_name is not None: - out_stream.write('{0}: {1} ({2})'.format( + out_stream.write(u'{0}: {1} ({2})'.format( out_stream.property_style(self._model.name), out_stream.literal_style(formatting.as_raw(self._model.value)), out_stream.type_style(self._model.type_name))) else: - out_stream.write('{0}: {1}'.format( + out_stream.write(u'{0}: {1}'.format( out_stream.property_style(self._model.name), out_stream.literal_style(formatting.as_raw(self._model.value)))) if self._model.description: