http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py 
b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
index f271b4d..9bafeec 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
@@ -18,7 +18,8 @@ from aria.utils.formatting import pluralize
 from aria.parser.presentation import Value
 from aria.parser.validation import Issue
 
-from .data_types import coerce_value
+from .data_types import (coerce_value, get_primitive_data_type)
+from ..presentation.types import get_type_by_name
 
 
 #
@@ -65,11 +66,11 @@ def get_inherited_parameter_definitions(context, 
presentation, field_name, for_p
 
 def get_assigned_and_defined_parameter_values(context, presentation, 
field_name):
     """
-    Returns the assigned property values while making sure they are defined in 
our type.
+    Returns the assigned parameter values while making sure they are defined 
in our type.
 
-    The property definition's default value, if available, will be used if we 
did not assign it.
+    The parameter definition's default value, if available, will be used if we 
did not assign it.
 
-    Makes sure that required properties indeed end up with a value.
+    Makes sure that required parameters indeed end up with a value.
     """
 
     values = OrderedDict()
@@ -94,12 +95,20 @@ def get_assigned_and_defined_parameter_values(context, 
presentation, field_name)
     # Fill in defaults from the definitions
     if definitions:
         for name, definition in definitions.iteritems():
-            if values.get(name) is None:
+            # Note: attributes will always have a default value, even if it's 
None
+            if (name not in values) and \
+                (('default' in definition._raw) or (field_name == 
'attribute')):
                 values[name] = coerce_parameter_value(context, presentation, 
definition,
                                                       definition.default)
 
     validate_required_values(context, presentation, values, definitions)
 
+    # Fill in nulls for missing values that are *not* required
+    if definitions:
+        for name, definition in definitions.iteritems():
+            if (name not in values) and not getattr(definition, 'required', 
False):
+                values[name] = coerce_parameter_value(context, presentation, 
definition, None)
+
     return values
 
 
@@ -139,8 +148,8 @@ def validate_required_values(context, presentation, values, 
definitions):
     if not definitions:
         return
     for name, definition in definitions.iteritems():
-        if getattr(definition, 'required', False) \
-            and ((values is None) or (values.get(name) is None)):
+        if getattr(definition, 'required', False) and \
+            ((values is None) or (values.get(name) is None)):
             context.validation.report('required property "%s" is not assigned 
a value in "%s"'
                                       % (name, presentation._fullname),
                                       
locator=presentation._get_child_locator('properties'),
@@ -149,16 +158,22 @@ def validate_required_values(context, presentation, 
values, definitions):
 
 def merge_raw_parameter_definition(context, presentation, 
raw_property_definition,
                                    our_property_definition, field_name, 
property_name):
-    # Check if we changed the type
-    # TODO: allow a sub-type?
-    type1 = raw_property_definition.get('type')
-    type2 = our_property_definition.type
+    # Check if we changed the parameter type
+    type1_name = raw_property_definition.get('type')
+    type1 = get_type_by_name(context, type1_name, 'data_types')
+    if type1 is None:
+        type1 = get_primitive_data_type(type1_name)
+    our_property_definition._reset_method_cache()
+    type2 = our_property_definition._get_type(context)
+
     if type1 != type2:
-        context.validation.report(
-            'override changes type from "%s" to "%s" for property "%s" in "%s"'
-            % (type1, type2, property_name, presentation._fullname),
-            locator=presentation._get_child_locator(field_name, property_name),
-            level=Issue.BETWEEN_TYPES)
+        if not hasattr(type1, '_is_descendant') or not 
type1._is_descendant(context, type2):
+            context.validation.report(
+                'property definition type "{0}" is not a descendant of 
overridden '
+                'property definition type "{1}"' \
+                .format(type1_name, type2._name),
+                locator=presentation._get_child_locator(field_name, 
property_name),
+                level=Issue.BETWEEN_TYPES)
 
     merge(raw_property_definition, our_property_definition._raw)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py 
b/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
index 920ebed..5f9750e 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/presentation/types.py
@@ -18,7 +18,8 @@ def convert_name_to_full_type_name(context, name, 
types_dict): # pylint: disable
     """
     Converts a type name to its full type name, or else returns it unchanged.
 
-    Works by checking for ``shorthand_name`` in the types' ``_extensions`` 
field. See also
+    Works by checking for ``shorthand_name`` and ``type_qualified_name`` in 
the types'
+    ``_extensions`` field. See also
     
:class:`aria_extension_tosca.v1_0.presentation.extensible.ExtensiblePresentation`.
 
     Can be used as the conversion function argument in ``type_validator`` and
@@ -36,9 +37,10 @@ def convert_name_to_full_type_name(context, name, 
types_dict): # pylint: disable
 
 def get_type_by_name(context, name, *types_dict_names):
     """
-    Gets a type either by its full name or its shorthand name or typequalified 
name.
+    Gets a type either by its full name or its shorthand name or 
type-qualified name.
 
-    Works by checking for ``shorthand_name`` in the types' ``_extensions`` 
field. See also
+    Works by checking for ``shorthand_name`` and ``type_qualified_name`` in 
the types'
+    ``_extensions`` field. See also
     
:class:`~aria_extension_tosca.v1_0.presentation.extensible.ExtensiblePresentation`.
 
     The arguments from the third onwards are used to locate a nested field 
under

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/extensions/aria_extension_tosca/simple_v1_0/types.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/types.py 
b/extensions/aria_extension_tosca/simple_v1_0/types.py
index 0241917..43af44b 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/types.py
@@ -23,7 +23,7 @@ from aria.parser.presentation import (has_fields, 
allow_unknown_fields, primitiv
                                       list_type_validator, 
derived_from_validator,
                                       get_parent_presentation)
 
-from .assignments import ArtifactAssignment
+from .assignments import ArtifactAssignmentForType
 from .data_types import Version
 from .definitions import (PropertyDefinition, AttributeDefinition, 
InterfaceDefinition,
                           RequirementDefinition, CapabilityDefinition, 
OperationDefinition)
@@ -193,6 +193,17 @@ class DataType(ExtensiblePresentation):
         return get_data_type(context, self, 'derived_from', allow_none=True)
 
     @cachedmethod
+    def _is_descendant(self, context, the_type):
+        if the_type is None:
+            return False
+        if not hasattr(the_type, '_name'):
+            # Must be a primitive type
+            return self._get_primitive_ancestor(context) == the_type
+        if the_type._name == self._name:
+            return True
+        return self._is_descendant(context, the_type._get_parent(context))
+
+    @cachedmethod
     def _get_primitive_ancestor(self, context):
         parent = self._get_parent(context)
         if parent is not None:
@@ -388,6 +399,14 @@ class InterfaceType(ExtensiblePresentation):
                                        'interface_types')
 
     @cachedmethod
+    def _is_descendant(self, context, the_type):
+        if the_type is None:
+            return False
+        elif the_type._name == self._name:
+            return True
+        return self._is_descendant(context, the_type._get_parent(context))
+
+    @cachedmethod
     def _get_inputs(self, context):
         return FrozenDict(get_inherited_parameter_definitions(context, self, 
'inputs'))
 
@@ -606,12 +625,12 @@ class NodeType(ExtensiblePresentation):
         :type: {:obj:`basestring`: :class:`InterfaceDefinition`}
         """
 
-    @object_dict_field(ArtifactAssignment)
+    @object_dict_field(ArtifactAssignmentForType)
     def artifacts(self):
         """
         An optional list of named artifact definitions for the Node Type.
 
-        :type: {:obj:`basestring`: :class:`ArtifactAssignment`}
+        :type: {:obj:`basestring`: :class:`ArtifactAssignmentForType`}
         """
 
     @cachedmethod

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/test_ssh.py
----------------------------------------------------------------------
diff --git a/test_ssh.py b/test_ssh.py
new file mode 100644
index 0000000..5256cf8
--- /dev/null
+++ b/test_ssh.py
@@ -0,0 +1,528 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import contextlib
+import json
+import logging
+import os
+
+import pytest
+
+import fabric.api
+from fabric.contrib import files
+from fabric import context_managers
+
+from aria.modeling import models
+from aria.orchestrator import events
+from aria.orchestrator import workflow
+from aria.orchestrator.workflows import api
+from aria.orchestrator.workflows.executor import process
+from aria.orchestrator.workflows.core import engine, graph_compiler
+from aria.orchestrator.workflows.exceptions import ExecutorException
+from aria.orchestrator.exceptions import TaskAbortException, TaskRetryException
+from aria.orchestrator.execution_plugin import operations
+from aria.orchestrator.execution_plugin import constants
+from aria.orchestrator.execution_plugin.exceptions import ProcessException, 
TaskException
+from aria.orchestrator.execution_plugin.ssh import operations as ssh_operations
+
+from tests import mock, storage, resources
+from tests.orchestrator.workflows.helpers import events_collector
+
+_CUSTOM_BASE_DIR = '/tmp/new-aria-ctx'
+
+import tests
+KEY_FILENAME = os.path.join(tests.ROOT_DIR, 'tests/resources/keys/test')
+
+_FABRIC_ENV = {
+    'disable_known_hosts': True,
+    'user': 'test',
+    'key_filename': KEY_FILENAME
+}
+
+
+import mockssh
+@pytest.fixture(scope='session')
+def server():
+    with mockssh.Server({'test': KEY_FILENAME}) as s:
+        yield s
+
+
+#@pytest.mark.skipif(not os.environ.get('TRAVIS'), reason='actual ssh server 
required')
+class TestWithActualSSHServer(object):
+
+    def test_run_script_basic(self):
+        expected_attribute_value = 'some_value'
+        props = self._execute(env={'test_value': expected_attribute_value})
+        assert props['test_value'].value == expected_attribute_value
+
+    @pytest.mark.skip(reason='sudo privileges are required')
+    def test_run_script_as_sudo(self):
+        self._execute(use_sudo=True)
+        with self._ssh_env():
+            assert files.exists('/opt/test_dir')
+            fabric.api.sudo('rm -rf /opt/test_dir')
+
+    def test_run_script_default_base_dir(self):
+        props = self._execute()
+        assert props['work_dir'].value == 
'{0}/work'.format(constants.DEFAULT_BASE_DIR)
+
+    @pytest.mark.skip(reason='Re-enable once output from process executor can 
be captured')
+    @pytest.mark.parametrize('hide_groups', [[], ['everything']])
+    def test_run_script_with_hide(self, hide_groups):
+        self._execute(hide_output=hide_groups)
+        output = 'TODO'
+        expected_log_message = ('[localhost] run: source {0}/scripts/'
+                                .format(constants.DEFAULT_BASE_DIR))
+        if hide_groups:
+            assert expected_log_message not in output
+        else:
+            assert expected_log_message in output
+
+    def test_run_script_process_config(self):
+        expected_env_value = 'test_value_env'
+        expected_arg1_value = 'test_value_arg1'
+        expected_arg2_value = 'test_value_arg2'
+        expected_cwd = '/tmp'
+        expected_base_dir = _CUSTOM_BASE_DIR
+        props = self._execute(
+            env={'test_value_env': expected_env_value},
+            process={
+                'args': [expected_arg1_value, expected_arg2_value],
+                'cwd': expected_cwd,
+                'base_dir': expected_base_dir
+            })
+        assert props['env_value'].value == expected_env_value
+        assert len(props['bash_version'].value) > 0
+        assert props['arg1_value'].value == expected_arg1_value
+        assert props['arg2_value'].value == expected_arg2_value
+        assert props['cwd'].value == expected_cwd
+        assert props['ctx_path'].value == '{0}/ctx'.format(expected_base_dir)
+
+    def test_run_script_command_prefix(self):
+        props = self._execute(process={'command_prefix': 'bash -i'})
+        assert 'i' in props['dollar_dash'].value
+
+    def test_run_script_reuse_existing_ctx(self):
+        expected_test_value_1 = 'test_value_1'
+        expected_test_value_2 = 'test_value_2'
+        props = self._execute(
+            test_operations=['{0}_1'.format(self.test_name),
+                             '{0}_2'.format(self.test_name)],
+            env={'test_value1': expected_test_value_1,
+                 'test_value2': expected_test_value_2})
+        assert props['test_value1'].value == expected_test_value_1
+        assert props['test_value2'].value == expected_test_value_2
+
+    def test_run_script_download_resource_plain(self, tmpdir):
+        resource = tmpdir.join('resource')
+        resource.write('content')
+        self._upload(str(resource), 'test_resource')
+        props = self._execute()
+        assert props['test_value'].value == 'content'
+
+    def test_run_script_download_resource_and_render(self, tmpdir):
+        resource = tmpdir.join('resource')
+        resource.write('{{ctx.service.name}}')
+        self._upload(str(resource), 'test_resource')
+        props = self._execute()
+        assert props['test_value'].value == self._workflow_context.service.name
+
+    @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 
'value'}])
+    def test_run_script_inputs_as_env_variables_no_override(self, value):
+        props = self._execute(custom_input=value)
+        return_value = props['test_value'].value
+        expected = return_value if isinstance(value, basestring) else 
json.loads(return_value)
+        assert value == expected
+
+    @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 
'value'}])
+    def test_run_script_inputs_as_env_variables_process_env_override(self, 
value):
+        props = self._execute(custom_input='custom-input-value',
+                              env={'custom_env_var': value})
+        return_value = props['test_value'].value
+        expected = return_value if isinstance(value, basestring) else 
json.loads(return_value)
+        assert value == expected
+
+    def test_run_script_error_in_script(self):
+        exception = self._execute_and_get_task_exception()
+        assert isinstance(exception, TaskException)
+
+    def test_run_script_abort_immediate(self):
+        exception = self._execute_and_get_task_exception()
+        assert isinstance(exception, TaskAbortException)
+        assert exception.message == 'abort-message'
+
+    def test_run_script_retry(self):
+        exception = self._execute_and_get_task_exception()
+        assert isinstance(exception, TaskRetryException)
+        assert exception.message == 'retry-message'
+
+    def test_run_script_abort_error_ignored_by_script(self):
+        exception = self._execute_and_get_task_exception()
+        assert isinstance(exception, TaskAbortException)
+        assert exception.message == 'abort-message'
+
+    def test_run_commands(self):
+        temp_file_path = '/tmp/very_temporary_file'
+        with self._ssh_env():
+            if files.exists(temp_file_path):
+                fabric.api.run('rm {0}'.format(temp_file_path))
+        self._execute(commands=['touch {0}'.format(temp_file_path)])
+        with self._ssh_env():
+            assert files.exists(temp_file_path)
+            fabric.api.run('rm {0}'.format(temp_file_path))
+
+    @pytest.fixture(autouse=True)
+    def _setup(self, request, workflow_context, executor, capfd, server):
+        print 'HI!!!!!!!!!!', server.port
+        self._workflow_context = workflow_context
+        self._executor = executor
+        self._capfd = capfd
+        self.test_name = request.node.originalname or request.node.name
+        with self._ssh_env(server):
+            for directory in [constants.DEFAULT_BASE_DIR, _CUSTOM_BASE_DIR]:
+                if files.exists(directory):
+                    fabric.api.run('rm -rf {0}'.format(directory))
+
+    @contextlib.contextmanager
+    def _ssh_env(self, server):
+        with self._capfd.disabled():
+            with context_managers.settings(fabric.api.hide('everything'),
+                                           
host_string='localhost:{0}'.format(server.port),
+                                           **_FABRIC_ENV):
+                yield
+
+    def _execute(self,
+                 env=None,
+                 use_sudo=False,
+                 hide_output=None,
+                 process=None,
+                 custom_input='',
+                 test_operations=None,
+                 commands=None):
+        process = process or {}
+        if env:
+            process.setdefault('env', {}).update(env)
+
+        test_operations = test_operations or [self.test_name]
+
+        local_script_path = os.path.join(resources.DIR, 'scripts', 
'test_ssh.sh')
+        script_path = os.path.basename(local_script_path)
+        self._upload(local_script_path, script_path)
+
+        if commands:
+            operation = operations.run_commands_with_ssh
+        else:
+            operation = operations.run_script_with_ssh
+
+        node = 
self._workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+        arguments = {
+            'script_path': script_path,
+            'fabric_env': _FABRIC_ENV,
+            'process': process,
+            'use_sudo': use_sudo,
+            'custom_env_var': custom_input,
+            'test_operation': '',
+        }
+        if hide_output:
+            arguments['hide_output'] = hide_output
+        if commands:
+            arguments['commands'] = commands
+        interface = mock.models.create_interface(
+            node.service,
+            'test',
+            'op',
+            operation_kwargs=dict(
+                function='{0}.{1}'.format(
+                    operations.__name__,
+                    operation.__name__),
+                arguments=arguments)
+        )
+        node.interfaces[interface.name] = interface
+
+        @workflow
+        def mock_workflow(ctx, graph):
+            ops = []
+            for test_operation in test_operations:
+                op_arguments = arguments.copy()
+                op_arguments['test_operation'] = test_operation
+                ops.append(api.task.OperationTask(
+                    node,
+                    interface_name='test',
+                    operation_name='op',
+                    arguments=op_arguments))
+
+            graph.sequence(*ops)
+            return graph
+        tasks_graph = mock_workflow(ctx=self._workflow_context)  # pylint: 
disable=no-value-for-parameter
+        graph_compiler.GraphCompiler(
+            self._workflow_context, 
self._executor.__class__).compile(tasks_graph)
+        eng = engine.Engine({self._executor.__class__: self._executor})
+        eng.execute(self._workflow_context)
+        return self._workflow_context.model.node.get_by_name(
+            mock.models.DEPENDENCY_NODE_NAME).attributes
+
+    def _execute_and_get_task_exception(self, *args, **kwargs):
+        signal = events.on_failure_task_signal
+        with events_collector(signal) as collected:
+            with pytest.raises(ExecutorException):
+                self._execute(*args, **kwargs)
+        return collected[signal][0]['kwargs']['exception']
+
+    def _upload(self, source, path):
+        self._workflow_context.resource.service.upload(
+            entry_id=str(self._workflow_context.service.id),
+            source=source,
+            path=path)
+
+    @pytest.fixture
+    def executor(self):
+        result = process.ProcessExecutor()
+        try:
+            yield result
+        finally:
+            result.close()
+
+    @pytest.fixture
+    def workflow_context(self, tmpdir):
+        workflow_context = mock.context.simple(str(tmpdir))
+        workflow_context.states = []
+        workflow_context.exception = None
+        yield workflow_context
+        storage.release_sqlite_storage(workflow_context.model)
+
+
+class TestFabricEnvHideGroupsAndRunCommands(object):
+
+    def test_fabric_env_default_override(self):
+        # first sanity for no override
+        self._run()
+        assert self.mock.settings_merged['timeout'] == 
constants.FABRIC_ENV_DEFAULTS['timeout']
+        # now override
+        invocation_fabric_env = self.default_fabric_env.copy()
+        timeout = 1000000
+        invocation_fabric_env['timeout'] = timeout
+        self._run(fabric_env=invocation_fabric_env)
+        assert self.mock.settings_merged['timeout'] == timeout
+
+    def test_implicit_host_string(self, mocker):
+        expected_host_address = '1.1.1.1'
+        mocker.patch.object(self._Ctx.task.actor, 'host')
+        mocker.patch.object(self._Ctx.task.actor.host, 'host_address', 
expected_host_address)
+        fabric_env = self.default_fabric_env.copy()
+        del fabric_env['host_string']
+        self._run(fabric_env=fabric_env)
+        assert self.mock.settings_merged['host_string'] == 
expected_host_address
+
+    def test_explicit_host_string(self):
+        fabric_env = self.default_fabric_env.copy()
+        host_string = 'explicit_host_string'
+        fabric_env['host_string'] = host_string
+        self._run(fabric_env=fabric_env)
+        assert self.mock.settings_merged['host_string'] == host_string
+
+    def test_override_warn_only(self):
+        fabric_env = self.default_fabric_env.copy()
+        self._run(fabric_env=fabric_env)
+        assert self.mock.settings_merged['warn_only'] is True
+        fabric_env = self.default_fabric_env.copy()
+        fabric_env['warn_only'] = False
+        self._run(fabric_env=fabric_env)
+        assert self.mock.settings_merged['warn_only'] is False
+
+    def test_missing_host_string(self):
+        with pytest.raises(TaskAbortException) as exc_ctx:
+            fabric_env = self.default_fabric_env.copy()
+            del fabric_env['host_string']
+            self._run(fabric_env=fabric_env)
+        assert '`host_string` not supplied' in str(exc_ctx.value)
+
+    def test_missing_user(self):
+        with pytest.raises(TaskAbortException) as exc_ctx:
+            fabric_env = self.default_fabric_env.copy()
+            del fabric_env['user']
+            self._run(fabric_env=fabric_env)
+        assert '`user` not supplied' in str(exc_ctx.value)
+
+    def test_missing_key_or_password(self):
+        with pytest.raises(TaskAbortException) as exc_ctx:
+            fabric_env = self.default_fabric_env.copy()
+            del fabric_env['key_filename']
+            self._run(fabric_env=fabric_env)
+        assert 'Access credentials not supplied' in str(exc_ctx.value)
+
+    def test_hide_in_settings_and_non_viable_groups(self):
+        groups = ('running', 'stdout')
+        self._run(hide_output=groups)
+        assert set(self.mock.settings_merged['hide_output']) == set(groups)
+        with pytest.raises(TaskAbortException) as exc_ctx:
+            self._run(hide_output=('running', 'bla'))
+        assert '`hide_output` must be a subset of' in str(exc_ctx.value)
+
+    def test_run_commands(self):
+        def test(use_sudo):
+            commands = ['command1', 'command2']
+            self._run(
+                commands=commands,
+                use_sudo=use_sudo)
+            assert all(item in self.mock.settings_merged.items() for
+                       item in self.default_fabric_env.items())
+            assert self.mock.settings_merged['warn_only'] is True
+            assert self.mock.settings_merged['use_sudo'] == use_sudo
+            assert self.mock.commands == commands
+            self.mock.settings_merged = {}
+            self.mock.commands = []
+        test(use_sudo=False)
+        test(use_sudo=True)
+
+    def test_failed_command(self):
+        with pytest.raises(ProcessException) as exc_ctx:
+            self._run(commands=['fail'])
+        exception = exc_ctx.value
+        assert exception.stdout == self.MockCommandResult.stdout
+        assert exception.stderr == self.MockCommandResult.stderr
+        assert exception.command == self.MockCommandResult.command
+        assert exception.exit_code == self.MockCommandResult.return_code
+
+    class MockCommandResult(object):
+        stdout = 'mock_stdout'
+        stderr = 'mock_stderr'
+        command = 'mock_command'
+        return_code = 1
+
+        def __init__(self, failed):
+            self.failed = failed
+
+    class MockFabricApi(object):
+
+        def __init__(self):
+            self.commands = []
+            self.settings_merged = {}
+
+        @contextlib.contextmanager
+        def settings(self, *args, **kwargs):
+            self.settings_merged.update(kwargs)
+            if args:
+                groups = args[0]
+                self.settings_merged.update({'hide_output': groups})
+            yield
+
+        def run(self, command):
+            self.commands.append(command)
+            self.settings_merged['use_sudo'] = False
+            return 
TestFabricEnvHideGroupsAndRunCommands.MockCommandResult(command == 'fail')
+
+        def sudo(self, command):
+            self.commands.append(command)
+            self.settings_merged['use_sudo'] = True
+            return 
TestFabricEnvHideGroupsAndRunCommands.MockCommandResult(command == 'fail')
+
+        def hide(self, *groups):
+            return groups
+
+        def exists(self, *args, **kwargs):
+            raise RuntimeError
+
+    class _Ctx(object):
+        INSTRUMENTATION_FIELDS = ()
+
+        class Task(object):
+            @staticmethod
+            def abort(message=None):
+                models.Task.abort(message)
+            actor = None
+
+        class Actor(object):
+            host = None
+
+        class Model(object):
+            @contextlib.contextmanager
+            def instrument(self, *args, **kwargs):
+                yield
+        task = Task
+        task.actor = Actor
+        model = Model()
+        logger = logging.getLogger()
+
+    @staticmethod
+    @contextlib.contextmanager
+    def _mock_self_logging(*args, **kwargs):
+        yield
+    _Ctx.logging_handlers = _mock_self_logging
+
+    @pytest.fixture(autouse=True)
+    def _setup(self, mocker):
+        self.default_fabric_env = {
+            'host_string': 'test',
+            'user': 'test',
+            'key_filename': 'test',
+        }
+        self.mock = self.MockFabricApi()
+        mocker.patch('fabric.api', self.mock)
+
+    def _run(self,
+             commands=(),
+             fabric_env=None,
+             process=None,
+             use_sudo=False,
+             hide_output=None):
+        operations.run_commands_with_ssh(
+            ctx=self._Ctx,
+            commands=commands,
+            process=process,
+            fabric_env=fabric_env or self.default_fabric_env,
+            use_sudo=use_sudo,
+            hide_output=hide_output)
+
+
+class TestUtilityFunctions(object):
+
+    def test_paths(self):
+        base_dir = '/path'
+        local_script_path = '/local/script/path.py'
+        paths = ssh_operations._Paths(base_dir=base_dir,
+                                      local_script_path=local_script_path)
+        assert paths.local_script_path == local_script_path
+        assert paths.remote_ctx_dir == base_dir
+        assert paths.base_script_path == 'path.py'
+        assert paths.remote_ctx_path == '/path/ctx'
+        assert paths.remote_scripts_dir == '/path/scripts'
+        assert paths.remote_work_dir == '/path/work'
+        assert 
paths.remote_env_script_path.startswith('/path/scripts/env-path.py-')
+        assert paths.remote_script_path.startswith('/path/scripts/path.py-')
+
+    def test_write_environment_script_file(self):
+        base_dir = '/path'
+        local_script_path = '/local/script/path.py'
+        paths = ssh_operations._Paths(base_dir=base_dir,
+                                      local_script_path=local_script_path)
+        env = {'one': "'1'"}
+        local_socket_url = 'local_socket_url'
+        remote_socket_url = 'remote_socket_url'
+        env_script_lines = set([l for l in 
ssh_operations._write_environment_script_file(
+            process={'env': env},
+            paths=paths,
+            local_socket_url=local_socket_url,
+            remote_socket_url=remote_socket_url
+        ).getvalue().split('\n') if l])
+        expected_env_script_lines = set([
+            'export PATH=/path:$PATH',
+            'export PYTHONPATH=/path:$PYTHONPATH',
+            'chmod +x /path/ctx',
+            'chmod +x {0}'.format(paths.remote_script_path),
+            'export CTX_SOCKET_URL={0}'.format(remote_socket_url),
+            'export LOCAL_CTX_SOCKET_URL={0}'.format(local_socket_url),
+            'export one=\'1\''
+        ])
+        assert env_script_lines == expected_env_script_lines

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/end2end/test_hello_world.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_hello_world.py 
b/tests/end2end/test_hello_world.py
index 5e4d7fd..094ffc3 100644
--- a/tests/end2end/test_hello_world.py
+++ b/tests/end2end/test_hello_world.py
@@ -20,7 +20,7 @@ from .. import helpers
 
 
 def test_hello_world(testenv):
-    hello_world_template_uri = helpers.get_example_uri('hello-world', 
'helloworld.yaml')
+    hello_world_template_uri = helpers.get_example_uri('hello-world', 
'hello-world.yaml')
     service_name = testenv.install_service(hello_world_template_uri)
 
     try:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py 
b/tests/orchestrator/execution_plugin/test_ssh.py
index a96c91d..b5df939 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -29,26 +29,33 @@ from aria.orchestrator import events
 from aria.orchestrator import workflow
 from aria.orchestrator.workflows import api
 from aria.orchestrator.workflows.executor import process
-from aria.orchestrator.workflows.core import engine, graph_compiler
+from aria.orchestrator.workflows.core import (engine, graph_compiler)
 from aria.orchestrator.workflows.exceptions import ExecutorException
-from aria.orchestrator.exceptions import TaskAbortException, TaskRetryException
+from aria.orchestrator.exceptions import (TaskAbortException, 
TaskRetryException)
 from aria.orchestrator.execution_plugin import operations
 from aria.orchestrator.execution_plugin import constants
-from aria.orchestrator.execution_plugin.exceptions import ProcessException, 
TaskException
+from aria.orchestrator.execution_plugin.exceptions import (ProcessException, 
TaskException)
 from aria.orchestrator.execution_plugin.ssh import operations as ssh_operations
 
 from tests import mock, storage, resources
 from tests.orchestrator.workflows.helpers import events_collector
 
+
 _CUSTOM_BASE_DIR = '/tmp/new-aria-ctx'
 
 _FABRIC_ENV = {
     'host_string': 'localhost',
     'user': 'travis',
-    'password': 'travis'
+    # 'password': 'travis',
+    'key_filename': '/home/travis/.ssh/id_rsa'
 }
 
 
+# To help debug in case of connection failures
+logging.getLogger('paramiko.transport').addHandler(logging.StreamHandler())
+logging.getLogger('paramiko.transport').setLevel(logging.DEBUG)
+
+
 @pytest.mark.skipif(not os.environ.get('TRAVIS'), reason='actual ssh server 
required')
 class TestWithActualSSHServer(object):
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index 5a46532..260f0bf 100644
--- 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -89,7 +89,7 @@ topology_template:
       value: *DEFAULT_OPENSTACK_CREDENTIAL
 
   node_templates:
-  
+
     # Application
 
     node_cellar:
@@ -135,7 +135,7 @@ topology_template:
           properties:
             protocol: udp
             url_path: /nodecellar
-    
+
     node_cellar_database:
       description: >-
         Node Cellar MongoDB database.
@@ -151,7 +151,7 @@ topology_template:
           repository: node_cellar
 
     # Server software
-    
+
     nodejs:
       description: >-
         Node.js instance.
@@ -202,7 +202,7 @@ topology_template:
     loadbalancer:
       type: nginx.LoadBalancer
       properties:
-        algorithm: round-robin   
+        algorithm: round-robin
 
     # Hosts
 
@@ -273,7 +273,7 @@ topology_template:
           create: create_data_volume.sh
 
   groups:
-  
+
     node_cellar_group:
       type: openstack.Secured
       members:
@@ -286,7 +286,7 @@ topology_template:
             openstack_credential: { get_input: openstack_credential }
 
   policies:
-  
+
     app_scaling:
       type: aria.Scaling
       properties:
@@ -295,7 +295,7 @@ topology_template:
       targets:
         - node_cellar
         - nodejs
-  
+
     host_scaling:
       type: openstack.Scaling
       properties:
@@ -304,7 +304,7 @@ topology_template:
         default_instances: 2
       targets: # node templates or groups
         - node_cellar_group
-    
+
     juju:
       description: >-
         Juju plugin executes charms.
@@ -345,7 +345,7 @@ policy_types:
     description: >-
       Workflow to put all nodes in/out of maintenance mode. For web servers, 
this will show a "this
       site is under maintenance and we'll be back soon" web page. Database 
nodes will then close all
-      client connections cleanly and shut down services. 
+      client connections cleanly and shut down services.
     derived_from: aria.Workflow
     properties:
       implementation:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/mongodb.yaml
----------------------------------------------------------------------
diff --git 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/mongodb.yaml
 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/mongodb.yaml
index 34d0a9d..7031252 100644
--- 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/mongodb.yaml
+++ 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/mongodb.yaml
@@ -44,7 +44,7 @@ node_types:
   mongodb.Database:
     description: >-
       MongoDB database.
-      
+
       Supports importing database data if a mongodb.DatabaseDump is provided.
     derived_from: tosca.nodes.Database
     interfaces:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nodejs.yaml
----------------------------------------------------------------------
diff --git 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nodejs.yaml
 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nodejs.yaml
index 19cc7b9..2b4d451 100644
--- 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nodejs.yaml
+++ 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nodejs.yaml
@@ -30,7 +30,7 @@ node_types:
         file: https://nodejs.org/dist/v4.4.7/node-v4.4.7-linux-x64.tar.xz
         deploy_path: /opt/nodejs
     capabilities:
-      data_endpoint: # @override 
+      data_endpoint: # @override
         type: tosca.capabilities.Endpoint
         properties:
           port:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/openstack.yaml
----------------------------------------------------------------------
diff --git 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/openstack.yaml
 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/openstack.yaml
index 99ee902..6941c1a 100644
--- 
a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/openstack.yaml
+++ 
b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/openstack.yaml
@@ -31,7 +31,7 @@ node_types:
 
       You may assign an image_id or attach an openstack.Image artifact (the 
artifact
       will take precedence).
-    
+
       You may assign either flavor_id or flavor_name (flavor_id will take 
precedence).
       If neither are assigned, flavor_name has a default value.
     derived_from: tosca.nodes.Compute
@@ -87,7 +87,7 @@ node_types:
   openstack.Volume:
     description: >-
       OpenStack volume.
-      
+
       See: http://developer.openstack.org/api-ref-blockstorage-v2.html
     derived_from: tosca.nodes.BlockStorage
     properties:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/resources/service-templates/tosca-simple-1.0/types/shorthand-1/shorthand-1.yaml
----------------------------------------------------------------------
diff --git 
a/tests/resources/service-templates/tosca-simple-1.0/types/shorthand-1/shorthand-1.yaml
 
b/tests/resources/service-templates/tosca-simple-1.0/types/shorthand-1/shorthand-1.yaml
index b295f95..bb5a84e 100644
--- 
a/tests/resources/service-templates/tosca-simple-1.0/types/shorthand-1/shorthand-1.yaml
+++ 
b/tests/resources/service-templates/tosca-simple-1.0/types/shorthand-1/shorthand-1.yaml
@@ -6,17 +6,17 @@ description: >-
 topology_template:
 
   node_templates:
-  
+
     my_server:
       type: Compute
       requirements:
         - local_storage:
-            node: my_block_storage           
+            node: my_block_storage
             relationship:
               type: AttachesTo
               properties:
                 location: /path1/path2
-  
+
     my_block_storage:
       type: BlockStorage
       properties:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tests/resources/service-templates/tosca-simple-1.0/types/typequalified-1/typequalified-1.yaml
----------------------------------------------------------------------
diff --git 
a/tests/resources/service-templates/tosca-simple-1.0/types/typequalified-1/typequalified-1.yaml
 
b/tests/resources/service-templates/tosca-simple-1.0/types/typequalified-1/typequalified-1.yaml
index 5f11fd4..b54604f 100644
--- 
a/tests/resources/service-templates/tosca-simple-1.0/types/typequalified-1/typequalified-1.yaml
+++ 
b/tests/resources/service-templates/tosca-simple-1.0/types/typequalified-1/typequalified-1.yaml
@@ -6,17 +6,17 @@ description: >-
 topology_template:
 
   node_templates:
-  
+
     my_server:
       type: tosca:Compute
       requirements:
         - local_storage:
-            node: my_block_storage           
+            node: my_block_storage
             relationship:
               type: AttachesTo
               properties:
                 location: /path1/path2
-  
+
     my_block_storage:
       type: tosca:BlockStorage
       properties:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/126d4e89/tox.ini
----------------------------------------------------------------------
diff --git a/tox.ini b/tox.ini
index ff71e05..765435a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,6 +13,7 @@
 [tox]
 envlist=py27,py26,py27e2e,py26e2e,pywin,py27ssh,pylint_code,pylint_tests,docs
 processes={env:PYTEST_PROCESSES:auto}
+py26={env:PY26:python2.6}
 
 [testenv]
 whitelist_externals=
@@ -28,11 +29,11 @@ deps=
   --requirement
     tests/requirements.txt
 basepython=
-  py26: python2.6
+  py26: {[tox]py26}
   py27: python2.7
-  py26e2e: python2.6
+  py26e2e: {[tox]py26}
   py27e2e: python2.7
-  py26ssh: python2.6
+  py26ssh: {[tox]py26}
   py27ssh: python2.7
   pywin: {env:PYTHON:}\python.exe
   pylint_code: python2.7


Reply via email to