Repository: incubator-ariatosca Updated Branches: refs/heads/ARIA-1-parser-test-suite ce4a18371 -> 0cf1deafc
Fixes Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/0cf1deaf Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/0cf1deaf Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/0cf1deaf Branch: refs/heads/ARIA-1-parser-test-suite Commit: 0cf1deafc25bb21377be5f0153ae639f877af8a3 Parents: ce4a183 Author: Tal Liron <tal.li...@gmail.com> Authored: Fri Nov 24 12:22:52 2017 -0600 Committer: Tal Liron <tal.li...@gmail.com> Committed: Fri Nov 24 12:22:52 2017 -0600 ---------------------------------------------------------------------- aria/__init__.py | 5 +- aria/modeling/service_instance.py | 2 +- aria/parser/consumption/presentation.py | 50 +- aria/parser/presentation/presentation.py | 1 + aria/parser/reading/yaml.py | 11 +- aria/utils/collections.py | 25 +- .../simple_v1_0/assignments.py | 19 +- .../simple_v1_0/modeling/__init__.py | 14 +- .../simple_v1_0/modeling/data_types.py | 6 +- test_ssh.py | 528 ------------------- .../extensions/aria_extension_tosca/conftest.py | 3 + tests/mechanisms/parsing/__init__.py | 10 +- tests/mechanisms/utils.py | 15 +- 13 files changed, 110 insertions(+), 579 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/__init__.py ---------------------------------------------------------------------- diff --git a/aria/__init__.py b/aria/__init__.py index acaf81b..980a2bb 100644 --- a/aria/__init__.py +++ b/aria/__init__.py @@ -57,9 +57,8 @@ def install_aria_extensions(strict=True): if module_name.startswith('aria_extension_'): loader.find_module(module_name).load_module(module_name) for entry_point in pkg_resources.iter_entry_points(group='aria_extension'): - # It should be possible to enable non strict loading - use the package - # that is already installed inside the environment, and forego the - # version demand + # It should be possible to enable non strict loading - use the package that is already + # installed inside the environment, and forgo the version demand if strict: entry_point.load() else: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/modeling/service_instance.py ---------------------------------------------------------------------- diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py index b0e426c..21c1029 100644 --- a/aria/modeling/service_instance.py +++ b/aria/modeling/service_instance.py @@ -510,7 +510,7 @@ class NodeBase(InstanceModelMixin): @classmethod def determine_state(cls, op_name, is_transitional): """ - :returns the state the node should be in as a result of running the operation on this node. + :return: the state the node should be in as a result of running the operation on this node. E.g. if we are running tosca.interfaces.node.lifecycle.Standard.create, then the resulting state should either 'creating' (if the task just started) or 'created' http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/parser/consumption/presentation.py ---------------------------------------------------------------------- diff --git a/aria/parser/consumption/presentation.py b/aria/parser/consumption/presentation.py index b1f943d..0f0b380 100644 --- a/aria/parser/consumption/presentation.py +++ b/aria/parser/consumption/presentation.py @@ -46,19 +46,19 @@ class Read(Consumer): def consume(self): # Present the main location and all imports recursively - main, results = self._present_all() + main_result, all_results = self._present_all() # Merge presentations - main.merge(results, self.context) + main_result.merge(all_results, self.context) # Cache merged presentations if self.context.presentation.cache: - for result in results: + for result in all_results: result.cache() - self.context.presentation.presenter = main.presentation - if main.canonical_location is not None: - self.context.presentation.location = main.canonical_location + self.context.presentation.presenter = main_result.presentation + if main_result.canonical_location is not None: + self.context.presentation.location = main_result.canonical_location def dump(self): if self.context.has_arg_switch('yaml'): @@ -73,11 +73,18 @@ class Read(Consumer): self.context.presentation.presenter._dump(self.context) def _handle_exception(self, e): - if isinstance(e, _Skip): + if isinstance(e, _CancelPresentation): return super(Read, self)._handle_exception(e) def _present_all(self): + """ + Presents all locations, including all nested imports, from the main location. Uses a thread + pool executor for best performance. + + The main presentation is returned separately for easier access. + """ + location = self.context.presentation.location if location is None: @@ -87,7 +94,7 @@ class Read(Consumer): executor = self.context.presentation.create_executor() try: # This call may recursively submit tasks to the executor if there are imports - main = self._present(location, None, None, executor) + main_result = self._present(location, None, None, executor) # Wait for all tasks to complete executor.drain() @@ -96,15 +103,22 @@ class Read(Consumer): for e in executor.exceptions: self._handle_exception(e) - results = executor.returns or [] + all_results = executor.returns or [] finally: executor.close() - results.insert(0, main) + all_results.insert(0, main_result) - return main, results + return main_result, all_results def _present(self, location, origin_canonical_location, origin_presenter_class, executor): + """ + Presents a single location. If the location has imports, those are submitted to the thread + pool executor. + + Supports a presentation cache based on the canonical location as cache key. + """ + # Link the context to this thread self.context.set_thread_local() @@ -118,7 +132,7 @@ class Read(Consumer): # Skip self imports if canonical_location == origin_canonical_location: - raise _Skip() + raise _CancelPresentation() if self.context.presentation.cache: # Is the presentation in the global cache? @@ -154,9 +168,10 @@ class Read(Consumer): loader = self.context.loading.loader_source.get_loader(self.context.loading, location, origin_canonical_location) - canonical_location = None - if origin_canonical_location is not None: + # The cache key is is a combination of the canonical location of the origin, which is + # globally absolute and never changes, and our location, which might be relative to + # the origin's location cache_key = (origin_canonical_location, location) try: canonical_location = CANONICAL_LOCATION_CACHE[cache_key] @@ -210,6 +225,11 @@ class Read(Consumer): class _Result(object): + """ + The result of a :meth:`Read._present` call. Contains the read presentation itself, as well as + extra fields to help caching and keep track of merging. + """ + def __init__(self, presentation, canonical_location, origin_canonical_location): self.presentation = presentation self.canonical_location = canonical_location @@ -261,5 +281,5 @@ class _Result(object): PRESENTATION_CACHE[self.canonical_location] = self.presentation -class _Skip(Exception): +class _CancelPresentation(Exception): pass http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/parser/presentation/presentation.py ---------------------------------------------------------------------- diff --git a/aria/parser/presentation/presentation.py b/aria/parser/presentation/presentation.py index e1104e5..762ecba 100644 --- a/aria/parser/presentation/presentation.py +++ b/aria/parser/presentation/presentation.py @@ -199,6 +199,7 @@ class Presentation(PresentationBase): """ def _validate(self, context): + # Allow the skipping of normative type validation (for improved performance) if (not context.presentation.configuration.get('validate_normative', True)) \ and self._get_extension('normative'): return http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/parser/reading/yaml.py ---------------------------------------------------------------------- diff --git a/aria/parser/reading/yaml.py b/aria/parser/reading/yaml.py index 77f8144..d843859 100644 --- a/aria/parser/reading/yaml.py +++ b/aria/parser/reading/yaml.py @@ -10,8 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ...utils.yaml import yaml # @UnresolvedImport - +from ...utils.yaml import yaml from ...utils.collections import OrderedDict from .reader import Reader from .locator import Locator @@ -19,9 +18,12 @@ from .exceptions import ReaderSyntaxError from .locator import (LocatableString, LocatableInt, LocatableFloat) -MERGE_TAG = u'tag:yaml.org,2002:merge' +# YAML mapping tag MAP_TAG = u'tag:yaml.org,2002:map' +# This is an internal tag used by ruamel.yaml for merging nodes +MERGE_TAG = u'tag:yaml.org,2002:merge' + # Add our types to RoundTripRepresenter yaml.representer.RoundTripRepresenter.add_representer( @@ -33,6 +35,9 @@ yaml.representer.RoundTripRepresenter.add_representer( def construct_yaml_map(self, node): + """ + Replacement for ruamel.yaml's constructor that uses OrderedDict instead of dict. + """ data = OrderedDict() yield data value = self.construct_mapping(node) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/utils/collections.py ---------------------------------------------------------------------- diff --git a/aria/utils/collections.py b/aria/utils/collections.py index cfd8fda..2126d59 100644 --- a/aria/utils/collections.py +++ b/aria/utils/collections.py @@ -34,7 +34,7 @@ except ImportError: def cls_name(cls): module = str(cls.__module__) name = str(cls.__name__) - return name if module == '__builtin__' else '%s.%s' % (module, name) + return name if module == '__builtin__' else '{0}.{1}'.format(module, name) class FrozenList(list): @@ -145,7 +145,8 @@ class StrictList(list): def _wrap(self, value): if (self.value_class is not None) and (not isinstance(value, self.value_class)): - raise TypeError('value must be a "%s": %s' % (cls_name(self.value_class), repr(value))) + raise TypeError('value must be a "{0}": {1}' + .format(cls_name(self.value_class), repr(value))) if self.wrapper_function is not None: value = self.wrapper_function(value) return value @@ -209,7 +210,8 @@ class StrictDict(OrderedDict): def __getitem__(self, key): if (self.key_class is not None) and (not isinstance(key, self.key_class)): - raise TypeError('key must be a "%s": %s' % (cls_name(self.key_class), repr(key))) + raise TypeError('key must be a "{0}": {1}' + .format(cls_name(self.key_class), repr(key))) value = super(StrictDict, self).__getitem__(key) if self.unwrapper_function is not None: value = self.unwrapper_function(value) @@ -217,9 +219,11 @@ class StrictDict(OrderedDict): def __setitem__(self, key, value, **_): if (self.key_class is not None) and (not isinstance(key, self.key_class)): - raise TypeError('key must be a "%s": %s' % (cls_name(self.key_class), repr(key))) + raise TypeError('key must be a "{0}": {1}' + .format(cls_name(self.key_class), repr(key))) if (self.value_class is not None) and (not isinstance(value, self.value_class)): - raise TypeError('value must be a "%s": %s' % (cls_name(self.value_class), repr(value))) + raise TypeError('value must be a "{0}": {1}' + .format(cls_name(self.value_class), repr(value))) if self.wrapper_function is not None: value = self.wrapper_function(value) return super(StrictDict, self).__setitem__(key, value) @@ -228,6 +232,14 @@ class StrictDict(OrderedDict): def merge(dict_a, dict_b, copy=True, strict=False, path=None): """ Merges dicts, recursively. + + :param dict_a: target dict (will be modified) + :param dict_b: source dict (will not be modified) + :param copy: if True, will use :func:`deepcopy_fast` on each merged element + :param strict: if True, will raise a ValueError if there are key conflicts, otherwise will + override exiting values + :param path: for internal use in strict mode + :return: dict_a, after the merge """ # TODO: a.add_yaml_merge(b), @@ -244,7 +256,8 @@ def merge(dict_a, dict_b, copy=True, strict=False, path=None): merge(value_a, value_b, copy, strict, path) elif value_a != value_b: if strict: - raise ValueError('dict merge conflict at %s' % '.'.join(path + [str(key)])) + raise ValueError('dict merge conflict at {0}' + .format('.'.join(path + [str(key)]))) else: dict_a[key] = deepcopy_fast(value_b) if copy else value_b else: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/extensions/aria_extension_tosca/simple_v1_0/assignments.py ---------------------------------------------------------------------- diff --git a/extensions/aria_extension_tosca/simple_v1_0/assignments.py b/extensions/aria_extension_tosca/simple_v1_0/assignments.py index 55b7e8d..6248483 100644 --- a/extensions/aria_extension_tosca/simple_v1_0/assignments.py +++ b/extensions/aria_extension_tosca/simple_v1_0/assignments.py @@ -146,14 +146,12 @@ class InterfaceAssignment(ExtensiblePresentation): if isinstance(self._container._container, RequirementAssignment): # In RequirementAssignment - requirement_definition = self._container._container._get_definition(context) - if requirement_definition is not None: - relationship_definition = requirement_definition.relationship - if relationship_definition is not None: - interface_definitions = relationship_definition.interfaces - if interface_definitions is not None: - if self._name in interface_definitions: - return interface_definitions[self._name]._get_type(context) + relationship_definition = \ + self._container._container._get_relationship_definition(context) + interface_definitions = relationship_definition.interfaces \ + if relationship_definition is not None else None + if (interface_definitions is not None) and (self._name in interface_definitions): + return interface_definitions[self._name]._get_type(context) interface_definitions = the_type._get_interfaces(context) \ if the_type is not None else None @@ -312,6 +310,11 @@ class RequirementAssignment(ExtensiblePresentation): return None @cachedmethod + def _get_relationship_definition(self, context): + requirement_definition = self._get_definition(context) + return requirement_definition.relationship if requirement_definition is not None else None + + @cachedmethod def _get_capability(self, context): capability = self.capability http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py ---------------------------------------------------------------------- diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py index 17b94fc..6c305c3 100644 --- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py +++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py @@ -664,18 +664,18 @@ def create_constraint(context, node_filter, constraint_clause, property_name, ca constraint_key = constraint_clause._raw.keys()[0] - the_type = constraint_clause._get_type(context) + value_type = constraint_clause._get_type(context) - def coerce_constraint(constraint, the_type=the_type): - if the_type is not None: - return coerce_value(context, node_filter, the_type, None, None, constraint, + def coerce_constraint(constraint, value_type=value_type): + if value_type is not None: + return coerce_value(context, node_filter, value_type, None, None, constraint, constraint_key) else: return constraint - def coerce_constraints(constraints, the_type=the_type): - if the_type is not None: - return tuple(coerce_constraint(constraint, the_type) for constraint in constraints) + def coerce_constraints(constraints, value_type=value_type): + if value_type is not None: + return tuple(coerce_constraint(constraint, value_type) for constraint in constraints) else: return constraints http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py ---------------------------------------------------------------------- diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py index 25e53c6..31865b9 100644 --- a/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py +++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py @@ -320,9 +320,9 @@ def apply_constraint_to_value(context, presentation, constraint_clause, value): def get_data_type_value(context, presentation, field_name, type_name): value = getattr(presentation, field_name) if value is not None: - the_type = get_type_by_name(context, type_name, 'data_types') - if the_type is not None: - return coerce_data_type_value(context, presentation, the_type, None, None, value, None) + data_type = get_type_by_name(context, type_name, 'data_types') + if data_type is not None: + return coerce_data_type_value(context, presentation, data_type, None, None, value, None) else: context.validation.report(u'field "{0}" in "{1}" refers to unknown data type "{2}"' .format(field_name, presentation._fullname, type_name), http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/test_ssh.py ---------------------------------------------------------------------- diff --git a/test_ssh.py b/test_ssh.py deleted file mode 100644 index 5256cf8..0000000 --- a/test_ssh.py +++ /dev/null @@ -1,528 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import contextlib -import json -import logging -import os - -import pytest - -import fabric.api -from fabric.contrib import files -from fabric import context_managers - -from aria.modeling import models -from aria.orchestrator import events -from aria.orchestrator import workflow -from aria.orchestrator.workflows import api -from aria.orchestrator.workflows.executor import process -from aria.orchestrator.workflows.core import engine, graph_compiler -from aria.orchestrator.workflows.exceptions import ExecutorException -from aria.orchestrator.exceptions import TaskAbortException, TaskRetryException -from aria.orchestrator.execution_plugin import operations -from aria.orchestrator.execution_plugin import constants -from aria.orchestrator.execution_plugin.exceptions import ProcessException, TaskException -from aria.orchestrator.execution_plugin.ssh import operations as ssh_operations - -from tests import mock, storage, resources -from tests.orchestrator.workflows.helpers import events_collector - -_CUSTOM_BASE_DIR = '/tmp/new-aria-ctx' - -import tests -KEY_FILENAME = os.path.join(tests.ROOT_DIR, 'tests/resources/keys/test') - -_FABRIC_ENV = { - 'disable_known_hosts': True, - 'user': 'test', - 'key_filename': KEY_FILENAME -} - - -import mockssh -@pytest.fixture(scope='session') -def server(): - with mockssh.Server({'test': KEY_FILENAME}) as s: - yield s - - -#@pytest.mark.skipif(not os.environ.get('TRAVIS'), reason='actual ssh server required') -class TestWithActualSSHServer(object): - - def test_run_script_basic(self): - expected_attribute_value = 'some_value' - props = self._execute(env={'test_value': expected_attribute_value}) - assert props['test_value'].value == expected_attribute_value - - @pytest.mark.skip(reason='sudo privileges are required') - def test_run_script_as_sudo(self): - self._execute(use_sudo=True) - with self._ssh_env(): - assert files.exists('/opt/test_dir') - fabric.api.sudo('rm -rf /opt/test_dir') - - def test_run_script_default_base_dir(self): - props = self._execute() - assert props['work_dir'].value == '{0}/work'.format(constants.DEFAULT_BASE_DIR) - - @pytest.mark.skip(reason='Re-enable once output from process executor can be captured') - @pytest.mark.parametrize('hide_groups', [[], ['everything']]) - def test_run_script_with_hide(self, hide_groups): - self._execute(hide_output=hide_groups) - output = 'TODO' - expected_log_message = ('[localhost] run: source {0}/scripts/' - .format(constants.DEFAULT_BASE_DIR)) - if hide_groups: - assert expected_log_message not in output - else: - assert expected_log_message in output - - def test_run_script_process_config(self): - expected_env_value = 'test_value_env' - expected_arg1_value = 'test_value_arg1' - expected_arg2_value = 'test_value_arg2' - expected_cwd = '/tmp' - expected_base_dir = _CUSTOM_BASE_DIR - props = self._execute( - env={'test_value_env': expected_env_value}, - process={ - 'args': [expected_arg1_value, expected_arg2_value], - 'cwd': expected_cwd, - 'base_dir': expected_base_dir - }) - assert props['env_value'].value == expected_env_value - assert len(props['bash_version'].value) > 0 - assert props['arg1_value'].value == expected_arg1_value - assert props['arg2_value'].value == expected_arg2_value - assert props['cwd'].value == expected_cwd - assert props['ctx_path'].value == '{0}/ctx'.format(expected_base_dir) - - def test_run_script_command_prefix(self): - props = self._execute(process={'command_prefix': 'bash -i'}) - assert 'i' in props['dollar_dash'].value - - def test_run_script_reuse_existing_ctx(self): - expected_test_value_1 = 'test_value_1' - expected_test_value_2 = 'test_value_2' - props = self._execute( - test_operations=['{0}_1'.format(self.test_name), - '{0}_2'.format(self.test_name)], - env={'test_value1': expected_test_value_1, - 'test_value2': expected_test_value_2}) - assert props['test_value1'].value == expected_test_value_1 - assert props['test_value2'].value == expected_test_value_2 - - def test_run_script_download_resource_plain(self, tmpdir): - resource = tmpdir.join('resource') - resource.write('content') - self._upload(str(resource), 'test_resource') - props = self._execute() - assert props['test_value'].value == 'content' - - def test_run_script_download_resource_and_render(self, tmpdir): - resource = tmpdir.join('resource') - resource.write('{{ctx.service.name}}') - self._upload(str(resource), 'test_resource') - props = self._execute() - assert props['test_value'].value == self._workflow_context.service.name - - @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 'value'}]) - def test_run_script_inputs_as_env_variables_no_override(self, value): - props = self._execute(custom_input=value) - return_value = props['test_value'].value - expected = return_value if isinstance(value, basestring) else json.loads(return_value) - assert value == expected - - @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 'value'}]) - def test_run_script_inputs_as_env_variables_process_env_override(self, value): - props = self._execute(custom_input='custom-input-value', - env={'custom_env_var': value}) - return_value = props['test_value'].value - expected = return_value if isinstance(value, basestring) else json.loads(return_value) - assert value == expected - - def test_run_script_error_in_script(self): - exception = self._execute_and_get_task_exception() - assert isinstance(exception, TaskException) - - def test_run_script_abort_immediate(self): - exception = self._execute_and_get_task_exception() - assert isinstance(exception, TaskAbortException) - assert exception.message == 'abort-message' - - def test_run_script_retry(self): - exception = self._execute_and_get_task_exception() - assert isinstance(exception, TaskRetryException) - assert exception.message == 'retry-message' - - def test_run_script_abort_error_ignored_by_script(self): - exception = self._execute_and_get_task_exception() - assert isinstance(exception, TaskAbortException) - assert exception.message == 'abort-message' - - def test_run_commands(self): - temp_file_path = '/tmp/very_temporary_file' - with self._ssh_env(): - if files.exists(temp_file_path): - fabric.api.run('rm {0}'.format(temp_file_path)) - self._execute(commands=['touch {0}'.format(temp_file_path)]) - with self._ssh_env(): - assert files.exists(temp_file_path) - fabric.api.run('rm {0}'.format(temp_file_path)) - - @pytest.fixture(autouse=True) - def _setup(self, request, workflow_context, executor, capfd, server): - print 'HI!!!!!!!!!!', server.port - self._workflow_context = workflow_context - self._executor = executor - self._capfd = capfd - self.test_name = request.node.originalname or request.node.name - with self._ssh_env(server): - for directory in [constants.DEFAULT_BASE_DIR, _CUSTOM_BASE_DIR]: - if files.exists(directory): - fabric.api.run('rm -rf {0}'.format(directory)) - - @contextlib.contextmanager - def _ssh_env(self, server): - with self._capfd.disabled(): - with context_managers.settings(fabric.api.hide('everything'), - host_string='localhost:{0}'.format(server.port), - **_FABRIC_ENV): - yield - - def _execute(self, - env=None, - use_sudo=False, - hide_output=None, - process=None, - custom_input='', - test_operations=None, - commands=None): - process = process or {} - if env: - process.setdefault('env', {}).update(env) - - test_operations = test_operations or [self.test_name] - - local_script_path = os.path.join(resources.DIR, 'scripts', 'test_ssh.sh') - script_path = os.path.basename(local_script_path) - self._upload(local_script_path, script_path) - - if commands: - operation = operations.run_commands_with_ssh - else: - operation = operations.run_script_with_ssh - - node = self._workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) - arguments = { - 'script_path': script_path, - 'fabric_env': _FABRIC_ENV, - 'process': process, - 'use_sudo': use_sudo, - 'custom_env_var': custom_input, - 'test_operation': '', - } - if hide_output: - arguments['hide_output'] = hide_output - if commands: - arguments['commands'] = commands - interface = mock.models.create_interface( - node.service, - 'test', - 'op', - operation_kwargs=dict( - function='{0}.{1}'.format( - operations.__name__, - operation.__name__), - arguments=arguments) - ) - node.interfaces[interface.name] = interface - - @workflow - def mock_workflow(ctx, graph): - ops = [] - for test_operation in test_operations: - op_arguments = arguments.copy() - op_arguments['test_operation'] = test_operation - ops.append(api.task.OperationTask( - node, - interface_name='test', - operation_name='op', - arguments=op_arguments)) - - graph.sequence(*ops) - return graph - tasks_graph = mock_workflow(ctx=self._workflow_context) # pylint: disable=no-value-for-parameter - graph_compiler.GraphCompiler( - self._workflow_context, self._executor.__class__).compile(tasks_graph) - eng = engine.Engine({self._executor.__class__: self._executor}) - eng.execute(self._workflow_context) - return self._workflow_context.model.node.get_by_name( - mock.models.DEPENDENCY_NODE_NAME).attributes - - def _execute_and_get_task_exception(self, *args, **kwargs): - signal = events.on_failure_task_signal - with events_collector(signal) as collected: - with pytest.raises(ExecutorException): - self._execute(*args, **kwargs) - return collected[signal][0]['kwargs']['exception'] - - def _upload(self, source, path): - self._workflow_context.resource.service.upload( - entry_id=str(self._workflow_context.service.id), - source=source, - path=path) - - @pytest.fixture - def executor(self): - result = process.ProcessExecutor() - try: - yield result - finally: - result.close() - - @pytest.fixture - def workflow_context(self, tmpdir): - workflow_context = mock.context.simple(str(tmpdir)) - workflow_context.states = [] - workflow_context.exception = None - yield workflow_context - storage.release_sqlite_storage(workflow_context.model) - - -class TestFabricEnvHideGroupsAndRunCommands(object): - - def test_fabric_env_default_override(self): - # first sanity for no override - self._run() - assert self.mock.settings_merged['timeout'] == constants.FABRIC_ENV_DEFAULTS['timeout'] - # now override - invocation_fabric_env = self.default_fabric_env.copy() - timeout = 1000000 - invocation_fabric_env['timeout'] = timeout - self._run(fabric_env=invocation_fabric_env) - assert self.mock.settings_merged['timeout'] == timeout - - def test_implicit_host_string(self, mocker): - expected_host_address = '1.1.1.1' - mocker.patch.object(self._Ctx.task.actor, 'host') - mocker.patch.object(self._Ctx.task.actor.host, 'host_address', expected_host_address) - fabric_env = self.default_fabric_env.copy() - del fabric_env['host_string'] - self._run(fabric_env=fabric_env) - assert self.mock.settings_merged['host_string'] == expected_host_address - - def test_explicit_host_string(self): - fabric_env = self.default_fabric_env.copy() - host_string = 'explicit_host_string' - fabric_env['host_string'] = host_string - self._run(fabric_env=fabric_env) - assert self.mock.settings_merged['host_string'] == host_string - - def test_override_warn_only(self): - fabric_env = self.default_fabric_env.copy() - self._run(fabric_env=fabric_env) - assert self.mock.settings_merged['warn_only'] is True - fabric_env = self.default_fabric_env.copy() - fabric_env['warn_only'] = False - self._run(fabric_env=fabric_env) - assert self.mock.settings_merged['warn_only'] is False - - def test_missing_host_string(self): - with pytest.raises(TaskAbortException) as exc_ctx: - fabric_env = self.default_fabric_env.copy() - del fabric_env['host_string'] - self._run(fabric_env=fabric_env) - assert '`host_string` not supplied' in str(exc_ctx.value) - - def test_missing_user(self): - with pytest.raises(TaskAbortException) as exc_ctx: - fabric_env = self.default_fabric_env.copy() - del fabric_env['user'] - self._run(fabric_env=fabric_env) - assert '`user` not supplied' in str(exc_ctx.value) - - def test_missing_key_or_password(self): - with pytest.raises(TaskAbortException) as exc_ctx: - fabric_env = self.default_fabric_env.copy() - del fabric_env['key_filename'] - self._run(fabric_env=fabric_env) - assert 'Access credentials not supplied' in str(exc_ctx.value) - - def test_hide_in_settings_and_non_viable_groups(self): - groups = ('running', 'stdout') - self._run(hide_output=groups) - assert set(self.mock.settings_merged['hide_output']) == set(groups) - with pytest.raises(TaskAbortException) as exc_ctx: - self._run(hide_output=('running', 'bla')) - assert '`hide_output` must be a subset of' in str(exc_ctx.value) - - def test_run_commands(self): - def test(use_sudo): - commands = ['command1', 'command2'] - self._run( - commands=commands, - use_sudo=use_sudo) - assert all(item in self.mock.settings_merged.items() for - item in self.default_fabric_env.items()) - assert self.mock.settings_merged['warn_only'] is True - assert self.mock.settings_merged['use_sudo'] == use_sudo - assert self.mock.commands == commands - self.mock.settings_merged = {} - self.mock.commands = [] - test(use_sudo=False) - test(use_sudo=True) - - def test_failed_command(self): - with pytest.raises(ProcessException) as exc_ctx: - self._run(commands=['fail']) - exception = exc_ctx.value - assert exception.stdout == self.MockCommandResult.stdout - assert exception.stderr == self.MockCommandResult.stderr - assert exception.command == self.MockCommandResult.command - assert exception.exit_code == self.MockCommandResult.return_code - - class MockCommandResult(object): - stdout = 'mock_stdout' - stderr = 'mock_stderr' - command = 'mock_command' - return_code = 1 - - def __init__(self, failed): - self.failed = failed - - class MockFabricApi(object): - - def __init__(self): - self.commands = [] - self.settings_merged = {} - - @contextlib.contextmanager - def settings(self, *args, **kwargs): - self.settings_merged.update(kwargs) - if args: - groups = args[0] - self.settings_merged.update({'hide_output': groups}) - yield - - def run(self, command): - self.commands.append(command) - self.settings_merged['use_sudo'] = False - return TestFabricEnvHideGroupsAndRunCommands.MockCommandResult(command == 'fail') - - def sudo(self, command): - self.commands.append(command) - self.settings_merged['use_sudo'] = True - return TestFabricEnvHideGroupsAndRunCommands.MockCommandResult(command == 'fail') - - def hide(self, *groups): - return groups - - def exists(self, *args, **kwargs): - raise RuntimeError - - class _Ctx(object): - INSTRUMENTATION_FIELDS = () - - class Task(object): - @staticmethod - def abort(message=None): - models.Task.abort(message) - actor = None - - class Actor(object): - host = None - - class Model(object): - @contextlib.contextmanager - def instrument(self, *args, **kwargs): - yield - task = Task - task.actor = Actor - model = Model() - logger = logging.getLogger() - - @staticmethod - @contextlib.contextmanager - def _mock_self_logging(*args, **kwargs): - yield - _Ctx.logging_handlers = _mock_self_logging - - @pytest.fixture(autouse=True) - def _setup(self, mocker): - self.default_fabric_env = { - 'host_string': 'test', - 'user': 'test', - 'key_filename': 'test', - } - self.mock = self.MockFabricApi() - mocker.patch('fabric.api', self.mock) - - def _run(self, - commands=(), - fabric_env=None, - process=None, - use_sudo=False, - hide_output=None): - operations.run_commands_with_ssh( - ctx=self._Ctx, - commands=commands, - process=process, - fabric_env=fabric_env or self.default_fabric_env, - use_sudo=use_sudo, - hide_output=hide_output) - - -class TestUtilityFunctions(object): - - def test_paths(self): - base_dir = '/path' - local_script_path = '/local/script/path.py' - paths = ssh_operations._Paths(base_dir=base_dir, - local_script_path=local_script_path) - assert paths.local_script_path == local_script_path - assert paths.remote_ctx_dir == base_dir - assert paths.base_script_path == 'path.py' - assert paths.remote_ctx_path == '/path/ctx' - assert paths.remote_scripts_dir == '/path/scripts' - assert paths.remote_work_dir == '/path/work' - assert paths.remote_env_script_path.startswith('/path/scripts/env-path.py-') - assert paths.remote_script_path.startswith('/path/scripts/path.py-') - - def test_write_environment_script_file(self): - base_dir = '/path' - local_script_path = '/local/script/path.py' - paths = ssh_operations._Paths(base_dir=base_dir, - local_script_path=local_script_path) - env = {'one': "'1'"} - local_socket_url = 'local_socket_url' - remote_socket_url = 'remote_socket_url' - env_script_lines = set([l for l in ssh_operations._write_environment_script_file( - process={'env': env}, - paths=paths, - local_socket_url=local_socket_url, - remote_socket_url=remote_socket_url - ).getvalue().split('\n') if l]) - expected_env_script_lines = set([ - 'export PATH=/path:$PATH', - 'export PYTHONPATH=/path:$PYTHONPATH', - 'chmod +x /path/ctx', - 'chmod +x {0}'.format(paths.remote_script_path), - 'export CTX_SOCKET_URL={0}'.format(remote_socket_url), - 'export LOCAL_CTX_SOCKET_URL={0}'.format(local_socket_url), - 'export one=\'1\'' - ]) - assert env_script_lines == expected_env_script_lines http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/tests/extensions/aria_extension_tosca/conftest.py ---------------------------------------------------------------------- diff --git a/tests/extensions/aria_extension_tosca/conftest.py b/tests/extensions/aria_extension_tosca/conftest.py index a2020b7..fdb2d4c 100644 --- a/tests/extensions/aria_extension_tosca/conftest.py +++ b/tests/extensions/aria_extension_tosca/conftest.py @@ -17,6 +17,9 @@ PyTest configuration module. Add support for a "--tosca-parser" CLI option. + +For more information on PyTest hooks, see the `PyTest documentation +<https://docs.pytest.org/en/latest/writing_plugins.html#pytest-hook-reference>`__. """ import pytest http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/tests/mechanisms/parsing/__init__.py ---------------------------------------------------------------------- diff --git a/tests/mechanisms/parsing/__init__.py b/tests/mechanisms/parsing/__init__.py index 2a860dc..a50abc4 100644 --- a/tests/mechanisms/parsing/__init__.py +++ b/tests/mechanisms/parsing/__init__.py @@ -27,7 +27,10 @@ class Parsed(object): self.verbose = False def assert_success(self): - __tracebackhide__ = True # pylint: disable=unused-variable + # See: https://docs.pytest.org/en/latest/example/simple.html + # #writing-well-integrated-assertion-helpers + __tracebackhide__ = True # pylint: disable=unused-variable + if len(self.issues) > 0: pytest.fail(u'did not expect parsing errors\n\n{0}\n\n{1}' .format(self.text.strip(), u'\n'.join(self.issues))) @@ -37,7 +40,10 @@ class Parsed(object): print self.text.strip() def assert_failure(self): - __tracebackhide__ = True # pylint: disable=unused-variable + # See: https://docs.pytest.org/en/latest/example/simple.html + # #writing-well-integrated-assertion-helpers + __tracebackhide__ = True # pylint: disable=unused-variable + if len(self.issues) > 0: if self.verbose: print LINE_BREAK http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/tests/mechanisms/utils.py ---------------------------------------------------------------------- diff --git a/tests/mechanisms/utils.py b/tests/mechanisms/utils.py index 3475206..45a442c 100644 --- a/tests/mechanisms/utils.py +++ b/tests/mechanisms/utils.py @@ -24,10 +24,13 @@ def matrix(*iterables, **kwargs): with the added ability to "flatten" each value by breaking up tuples and recombining them into a final flat value. - To do such recombination, use the ``counts`` argument (tuple) to specify the number of elements - per value in order. Any count greater than 1 (the default) enables recombination of that value. + To do such recombination, use the ``counts`` argument (tuple of integers) to specify the number + of elements per value in each iterable in order. Any count greater than 1 (the default) enables + recombination of the iterable's values. So, if you are combining three different iterables, then + you want ``counts`` to be a tuple of three integers. The first integer in the ``counts`` tuple + will be the number of elements in the values of the first iterable, etc. - Example:: + Detailed example:: x = ('hello', 'goodbye') y = ('Linus', 'Richard') @@ -38,12 +41,18 @@ def matrix(*iterables, **kwargs): ('goodbye', 'Richard') y = (('Linus', 'Torvalds'), ('Richard', 'Stallman')) + + # Without flattening: + matrix(x, y) -> ('hello', ('Linus', 'Torvalds')), ('hello', ('Richard', 'Stallman')), ('goodbye', ('Linus', 'Torvalds')), ('goodbye', ('Richard', 'Stallman')) + # The values in our second iterable, y, have two elements that we want to flatten, so we will + # set the second "count" value to 2: + matrix(x, y, counts=(1, 2)) -> ('hello', 'Linus', 'Torvalds'), ('hello', 'Richard', 'Stallman'),