Repository: incubator-ariatosca Updated Branches: refs/heads/Unified_coerce 2c2fe8688 -> 367b18bc0 (forced update)
All tests pass :) Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/aa01cd4e Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/aa01cd4e Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/aa01cd4e Branch: refs/heads/Unified_coerce Commit: aa01cd4e9a60507bafb98707c7793c3a89424db8 Parents: dd99f0f Author: Tal Liron <[email protected]> Authored: Tue Mar 7 12:13:28 2017 -0600 Committer: Tal Liron <[email protected]> Committed: Tue Mar 7 21:22:45 2017 -0600 ---------------------------------------------------------------------- aria/modeling/bases.py | 2 +- aria/modeling/misc.py | 6 +- aria/modeling/orchestration.py | 2 +- aria/modeling/service.py | 12 +- aria/modeling/service_template.py | 8 +- aria/orchestrator/workflows/api/task.py | 2 +- aria/orchestrator/workflows/api/task_graph.py | 2 +- .../workflows/builtin/execute_operation.py | 18 +- aria/orchestrator/workflows/builtin/heal.py | 188 +++++++++---------- aria/parser/modeling/context.py | 5 +- aria/utils/uuid.py | 6 +- docs/requirements.txt | 2 +- tests/mock/models.py | 6 +- tests/mock/topology.py | 6 +- tests/orchestrator/context/test_operation.py | 9 +- .../orchestrator/execution_plugin/test_local.py | 26 +-- tests/orchestrator/execution_plugin/test_ssh.py | 29 +-- .../orchestrator/workflows/builtin/test_heal.py | 20 +- tests/storage/test_models.py | 8 +- tests/storage/test_structures.py | 12 +- 20 files changed, 187 insertions(+), 182 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/modeling/bases.py ---------------------------------------------------------------------- diff --git a/aria/modeling/bases.py b/aria/modeling/bases.py index a4db320..efcb968 100644 --- a/aria/modeling/bases.py +++ b/aria/modeling/bases.py @@ -119,7 +119,7 @@ class ModelMixin(object): return cls._create_relationship(other_table, backref_kwargs, relationship_kwargs, backreference, key=key, foreign_key=foreign_key) - + @classmethod def one_to_many_relationship(cls, child_table, http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/modeling/misc.py ---------------------------------------------------------------------- diff --git a/aria/modeling/misc.py b/aria/modeling/misc.py index 0bb5cda..105876a 100644 --- a/aria/modeling/misc.py +++ b/aria/modeling/misc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# pylint: disable=no-self-argument, no-member, abstract-method + import cPickle as pickle import logging @@ -113,7 +115,7 @@ class TypeBase(InstanceModelMixin): __tablename__ = 'type' - variant = Column(Text, nullable=False) + variant = Column(Text, nullable=False) description = Column(Text) _role = Column(Text, name='role') @@ -135,7 +137,7 @@ class TypeBase(InstanceModelMixin): return cls.foreign_key('type', nullable=True) # endregion - + @property def role(self): def get_role(the_type): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/modeling/orchestration.py ---------------------------------------------------------------------- diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py index d9d9908..c842c07 100644 --- a/aria/modeling/orchestration.py +++ b/aria/modeling/orchestration.py @@ -152,7 +152,7 @@ class ServiceUpdateBase(ModelMixin): steps = None - __tablename__ = 'service_update' + __tablename__ = 'service_update' _private_fields = ['execution_fk', 'service_fk'] http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/modeling/service.py ---------------------------------------------------------------------- diff --git a/aria/modeling/service.py b/aria/modeling/service.py index eb8acb5..bf189f7 100644 --- a/aria/modeling/service.py +++ b/aria/modeling/service.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pylint: disable=no-self-argument, no-member, abstract-method +# pylint: disable=too-many-lines, no-self-argument, no-member, abstract-method from sqlalchemy import ( Column, @@ -286,7 +286,7 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods self._dump_graph_node(context, target_node) -class NodeBase(InstanceModelMixin): +class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods """ Usually an instance of a :class:`NodeTemplate`. @@ -844,7 +844,7 @@ class SubstitutionBase(InstanceModelMixin): class SubstitutionMappingBase(InstanceModelMixin): """ Used by :class:`Substitution` to map a capability or a requirement to a node. - + Only one of `capability_template` and `requirement_template` can be set. Usually an instance of a :class:`SubstitutionTemplate`. @@ -1186,7 +1186,7 @@ class CapabilityBase(InstanceModelMixin): class InterfaceBase(InstanceModelMixin): """ A typed set of :class:`Operation`. - + Usually an instance of :class:`InterfaceTemplate`. :ivar name: Name (unique for the node, group, or relationship) @@ -1296,7 +1296,7 @@ class InterfaceBase(InstanceModelMixin): class OperationBase(InstanceModelMixin): """ An operation in a :class:`Interface`. - + Might be an instance of :class:`OperationTemplate`. :ivar name: Name (unique for the interface or service) @@ -1423,7 +1423,7 @@ class OperationBase(InstanceModelMixin): class ArtifactBase(InstanceModelMixin): """ A file associated with a :class:`Node`. - + Usually an instance of :class:`ArtifactTemplate`. :ivar name: Name (unique for the node) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/modeling/service_template.py ---------------------------------------------------------------------- diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py index ebbe904..092de51 100644 --- a/aria/modeling/service_template.py +++ b/aria/modeling/service_template.py @@ -37,7 +37,7 @@ from . import ( ) -class ServiceTemplateBase(TemplateModelMixin): +class ServiceTemplateBase(TemplateModelMixin): # pylint: disable=too-many-public-methods """ A service template is a source for creating :class:`Service` instances. @@ -827,7 +827,7 @@ class SubstitutionTemplateBase(TemplateModelMixin): class SubstitutionTemplateMappingBase(TemplateModelMixin): """ Used by :class:`SubstitutionTemplate` to map a capability or a requirement to a node. - + Only one of `capability_template` and `requirement_template` can be set. :ivar name: Exposed capability or requirement name @@ -1120,7 +1120,7 @@ class RelationshipTemplateBase(TemplateModelMixin): """ Optional addition to a :class:`RequirementTemplate` in :class:`NodeTemplate` that can be applied when the requirement is matched with a capability. - + Note that a relationship template here is not equivalent to a relationship template entity in TOSCA. For example, a TOSCA requirement specifying a relationship type instead of a template would still be represented here as a relationship template. @@ -1276,7 +1276,7 @@ class CapabilityTemplateBase(TemplateModelMixin): # endregion def satisfies_requirement(self, - context, + context, # pylint: disable=unused-argument source_node_template, requirement, target_node_template): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/orchestrator/workflows/api/task.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py index f1812b1..d434da8 100644 --- a/aria/orchestrator/workflows/api/task.py +++ b/aria/orchestrator/workflows/api/task.py @@ -61,7 +61,7 @@ class OperationTask(BaseTask): SOURCE_OPERATION = 'source' TARGET_OPERATION = 'target' - + NAME_FORMAT = '{type}:{id}->{interface}/{operation}' def __init__(self, http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/orchestrator/workflows/api/task_graph.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/workflows/api/task_graph.py b/aria/orchestrator/workflows/api/task_graph.py index 2ead4d0..92a39d2 100644 --- a/aria/orchestrator/workflows/api/task_graph.py +++ b/aria/orchestrator/workflows/api/task_graph.py @@ -17,11 +17,11 @@ Task graph. Used by users to build workflows """ -from ....utils.uuid import generate_uuid from collections import Iterable from networkx import DiGraph, topological_sort +from ....utils.uuid import generate_uuid from . import task as api_task http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/orchestrator/workflows/builtin/execute_operation.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py index e7c2085..ed4ada3 100644 --- a/aria/orchestrator/workflows/builtin/execute_operation.py +++ b/aria/orchestrator/workflows/builtin/execute_operation.py @@ -51,24 +51,24 @@ def execute_operation( """ subgraphs = {} # filtering node instances - filtered_nodes = list(_filter_node_instances( + filtered_nodes = list(_filter_nodes( context=ctx, node_template_ids=node_template_ids, node_ids=node_ids, type_names=type_names)) if run_by_dependency_order: - filtered_node_instances_ids = set(node_instance.id + filtered_node_ids = set(node_instance.id for node_instance in filtered_nodes) - for node in ctx.node_instances: - if node.id not in filtered_node_instances_ids: + for node in ctx.nodes: + if node.id not in filtered_node_ids: subgraphs[node.id] = ctx.task_graph( name='execute_operation_stub_{0}'.format(node.id)) # registering actual tasks to sequences for node in filtered_nodes: graph.add_tasks( - _create_node_instance_task( + _create_node_task( node=node, interface_name=interface_name, operation_name=operation_name, @@ -77,8 +77,8 @@ def execute_operation( ) ) - for _, node_instance_sub_workflow in subgraphs.items(): - graph.add_tasks(node_instance_sub_workflow) + for _, node_sub_workflow in subgraphs.items(): + graph.add_tasks(node_sub_workflow) # adding tasks dependencies if required if run_by_dependency_order: @@ -88,7 +88,7 @@ def execute_operation( source_task=subgraphs[node.id], after=[subgraphs[relationship.target_id]]) -def _filter_node_instances(context, node_template_ids=(), node_ids=(), type_names=()): +def _filter_nodes(context, node_template_ids=(), node_ids=(), type_names=()): def _is_node_template_by_id(node_template_id): return not node_template_ids or node_template_id in node_template_ids @@ -105,7 +105,7 @@ def _filter_node_instances(context, node_template_ids=(), node_ids=(), type_name yield node -def _create_node_instance_task( +def _create_node_task( node, interface_name, operation_name, http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/orchestrator/workflows/builtin/heal.py ---------------------------------------------------------------------- diff --git a/aria/orchestrator/workflows/builtin/heal.py b/aria/orchestrator/workflows/builtin/heal.py index 2592323..92b96ea 100644 --- a/aria/orchestrator/workflows/builtin/heal.py +++ b/aria/orchestrator/workflows/builtin/heal.py @@ -26,156 +26,156 @@ from ..api import task @workflow -def heal(ctx, graph, node_instance_id): +def heal(ctx, graph, node_id): """ The heal workflow :param WorkflowContext ctx: the workflow context :param TaskGraph graph: the graph which will describe the workflow. - :param node_instance_id: the id of the node instance to heal + :param node_id: the id of the node to heal :return: """ - failing_node = ctx.model.node.get(node_instance_id) + failing_node = ctx.model.node.get(node_id) host_node = ctx.model.node.get(failing_node.host.id) - failed_node_instance_subgraph = _get_contained_subgraph(ctx, host_node) - failed_node_instance_ids = list(n.id for n in failed_node_instance_subgraph) + failed_node_subgraph = _get_contained_subgraph(ctx, host_node) + failed_node_ids = list(n.id for n in failed_node_subgraph) - targeted_node_instances = [node_instance for node_instance in ctx.node_instances - if node_instance.id not in failed_node_instance_ids] + targeted_nodes = [node for node in ctx.nodes + if node.id not in failed_node_ids] uninstall_subgraph = task.WorkflowTask( heal_uninstall, - failing_node_instances=failed_node_instance_subgraph, - targeted_node_instances=targeted_node_instances + failing_nodes=failed_node_subgraph, + targeted_nodes=targeted_nodes ) install_subgraph = task.WorkflowTask( heal_install, - failing_node_instances=failed_node_instance_subgraph, - targeted_node_instances=targeted_node_instances) + failing_nodes=failed_node_subgraph, + targeted_nodes=targeted_nodes) graph.sequence(uninstall_subgraph, install_subgraph) -@workflow(suffix_template='{failing_node_instances}') -def heal_uninstall(ctx, graph, failing_node_instances, targeted_node_instances): +@workflow(suffix_template='{failing_nodes}') +def heal_uninstall(ctx, graph, failing_nodes, targeted_nodes): """ the uninstall part of the heal mechanism :param WorkflowContext ctx: the workflow context :param TaskGraph graph: the task graph to edit. - :param failing_node_instances: the failing nodes to heal. - :param targeted_node_instances: the targets of the relationships where the failing node are + :param failing_nodes: the failing nodes to heal. + :param targeted_nodes: the targets of the relationships where the failing node are source :return: """ - node_instance_sub_workflows = {} - - # Create install stub workflow for each unaffected node instance - for node_instance in targeted_node_instances: - node_instance_stub = task.StubTask() - node_instance_sub_workflows[node_instance.id] = node_instance_stub - graph.add_tasks(node_instance_stub) - - # create install sub workflow for every node instance - for node_instance in failing_node_instances: - node_instance_sub_workflow = task.WorkflowTask(uninstall_node, - node_instance=node_instance) - node_instance_sub_workflows[node_instance.id] = node_instance_sub_workflow - graph.add_tasks(node_instance_sub_workflow) - - # create dependencies between the node instance sub workflow - for node_instance in failing_node_instances: - node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id] - for relationship_instance in reversed(node_instance.outbound_relationship_instances): + node_sub_workflows = {} + + # Create install stub workflow for each unaffected node + for node in targeted_nodes: + node_stub = task.StubTask() + node_sub_workflows[node.id] = node_stub + graph.add_tasks(node_stub) + + # create install sub workflow for every node + for node in failing_nodes: + node_sub_workflow = task.WorkflowTask(uninstall_node, + node=node) + node_sub_workflows[node.id] = node_sub_workflow + graph.add_tasks(node_sub_workflow) + + # create dependencies between the node sub workflow + for node in failing_nodes: + node_sub_workflow = node_sub_workflows[node.id] + for relationship in reversed(node.outbound_relationships): graph.add_dependency( - node_instance_sub_workflows[relationship_instance.target_node_instance.id], - node_instance_sub_workflow) + node_sub_workflows[relationship.target_node.id], + node_sub_workflow) - # Add operations for intact nodes depending on a node instance belonging to node_instances - for node_instance in targeted_node_instances: - node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id] + # Add operations for intact nodes depending on a node belonging to nodes + for node in targeted_nodes: + node_sub_workflow = node_sub_workflows[node.id] - for relationship_instance in reversed(node_instance.outbound_relationship_instances): + for relationship in reversed(node.outbound_relationships): - target_node_instance = \ - ctx.model.node.get(relationship_instance.target_node_instance.id) - target_node_instance_subgraph = node_instance_sub_workflows[target_node_instance.id] - graph.add_dependency(target_node_instance_subgraph, node_instance_sub_workflow) + target_node = \ + ctx.model.node.get(relationship.target_node.id) + target_node_subgraph = node_sub_workflows[target_node.id] + graph.add_dependency(target_node_subgraph, node_sub_workflow) - if target_node_instance in failing_node_instances: + if target_node in failing_nodes: dependency = relationship_tasks( - relationship_instance=relationship_instance, + relationship=relationship, operation_name='aria.interfaces.relationship_lifecycle.unlink') graph.add_tasks(*dependency) - graph.add_dependency(node_instance_sub_workflow, dependency) + graph.add_dependency(node_sub_workflow, dependency) -@workflow(suffix_template='{failing_node_instances}') -def heal_install(ctx, graph, failing_node_instances, targeted_node_instances): +@workflow(suffix_template='{failing_nodes}') +def heal_install(ctx, graph, failing_nodes, targeted_nodes): """ the install part of the heal mechanism :param WorkflowContext ctx: the workflow context :param TaskGraph graph: the task graph to edit. - :param failing_node_instances: the failing nodes to heal. - :param targeted_node_instances: the targets of the relationships where the failing node are + :param failing_nodes: the failing nodes to heal. + :param targeted_nodes: the targets of the relationships where the failing node are source :return: """ - node_instance_sub_workflows = {} + node_sub_workflows = {} # Create install sub workflow for each unaffected - for node_instance in targeted_node_instances: - node_instance_stub = task.StubTask() - node_instance_sub_workflows[node_instance.id] = node_instance_stub - graph.add_tasks(node_instance_stub) - - # create install sub workflow for every node instance - for node_instance in failing_node_instances: - node_instance_sub_workflow = task.WorkflowTask(install_node, - node_instance=node_instance) - node_instance_sub_workflows[node_instance.id] = node_instance_sub_workflow - graph.add_tasks(node_instance_sub_workflow) - - # create dependencies between the node instance sub workflow - for node_instance in failing_node_instances: - node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id] - if node_instance.outbound_relationship_instances: + for node in targeted_nodes: + node_stub = task.StubTask() + node_sub_workflows[node.id] = node_stub + graph.add_tasks(node_stub) + + # create install sub workflow for every node + for node in failing_nodes: + node_sub_workflow = task.WorkflowTask(install_node, + node=node) + node_sub_workflows[node.id] = node_sub_workflow + graph.add_tasks(node_sub_workflow) + + # create dependencies between the node sub workflow + for node in failing_nodes: + node_sub_workflow = node_sub_workflows[node.id] + if node.outbound_relationships: dependencies = \ - [node_instance_sub_workflows[relationship_instance.target_node_instance.id] - for relationship_instance in node_instance.outbound_relationship_instances] - graph.add_dependency(node_instance_sub_workflow, dependencies) - - # Add operations for intact nodes depending on a node instance - # belonging to node_instances - for node_instance in targeted_node_instances: - node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id] - - for relationship_instance in node_instance.outbound_relationship_instances: - target_node_instance = ctx.model.node.get( - relationship_instance.target_node_instance.id) - target_node_instance_subworkflow = node_instance_sub_workflows[target_node_instance.id] - graph.add_dependency(node_instance_sub_workflow, target_node_instance_subworkflow) - - if target_node_instance in failing_node_instances: + [node_sub_workflows[relationship.target_node.id] + for relationship in node.outbound_relationships] + graph.add_dependency(node_sub_workflow, dependencies) + + # Add operations for intact nodes depending on a node + # belonging to nodes + for node in targeted_nodes: + node_sub_workflow = node_sub_workflows[node.id] + + for relationship in node.outbound_relationships: + target_node = ctx.model.node.get( + relationship.target_node.id) + target_node_subworkflow = node_sub_workflows[target_node.id] + graph.add_dependency(node_sub_workflow, target_node_subworkflow) + + if target_node in failing_nodes: dependent = relationship_tasks( - relationship_instance=relationship_instance, + relationship=relationship, operation_name='aria.interfaces.relationship_lifecycle.establish') graph.add_tasks(*dependent) - graph.add_dependency(dependent, node_instance_sub_workflow) + graph.add_dependency(dependent, node_sub_workflow) -def _get_contained_subgraph(context, host_node_instance): - contained_instances = [node_instance - for node_instance in context.node_instances - if node_instance.host_fk == host_node_instance.id and - node_instance.host_fk != node_instance.id] - result = [host_node_instance] +def _get_contained_subgraph(context, host_node): + contained_instances = [node + for node in context.nodes + if node.host_fk == host_node.id and + node.host_fk != node.id] + result = [host_node] if not contained_instances: return result result.extend(contained_instances) - for node_instance in contained_instances: - result.extend(_get_contained_subgraph(context, node_instance)) + for node in contained_instances: + result.extend(_get_contained_subgraph(context, node)) return set(result) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/parser/modeling/context.py ---------------------------------------------------------------------- diff --git a/aria/parser/modeling/context.py b/aria/parser/modeling/context.py index 52e724a..dff5991 100644 --- a/aria/parser/modeling/context.py +++ b/aria/parser/modeling/context.py @@ -15,9 +15,7 @@ import itertools -from ...utils.collections import StrictDict, prune, OrderedDict -from ...utils.formatting import as_raw -from ...utils.console import puts +from ...utils.collections import StrictDict, prune from ...utils.uuid import generate_uuid @@ -51,7 +49,6 @@ class ModelingContext(object): """ def __init__(self): - from ...modeling.models import Type self.template = None self.instance = None self.node_id_format = '{template}_{id}' http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/aria/utils/uuid.py ---------------------------------------------------------------------- diff --git a/aria/utils/uuid.py b/aria/utils/uuid.py index b5f39f8..1f340c6 100644 --- a/aria/utils/uuid.py +++ b/aria/utils/uuid.py @@ -31,10 +31,10 @@ UUID_LOWERCASE_ALPHANUMERIC = ShortUUID(alphabet='abcdefghijklmnopqrstuvwxyz0123 def generate_uuid(length=None, variant='base57'): """ A random string with varying degrees of guarantee of universal uniqueness. - + :param variant: options are: - * 'base57' (the default) uses a mix of upper and lowercase alphanumerics ensuring - no visually ambiguous characters; default length 22 + * 'base57' (the default) uses a mix of upper and lowercase alphanumerics + ensuring no visually ambiguous characters; default length 22 * 'alphanumeric' uses lowercase alphanumeric; default length 25 * 'uuid' user lowercase hexadecimal in the classic UUID format, including dashes; length is always 36 http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/docs/requirements.txt ---------------------------------------------------------------------- diff --git a/docs/requirements.txt b/docs/requirements.txt index 7baba04..669522a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -11,4 +11,4 @@ # limitations under the License. Sphinx==1.5.3 -sphinx_rtd_theme==0.2.0 +sphinx_rtd_theme==0.2.2 http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/mock/models.py ---------------------------------------------------------------------- diff --git a/tests/mock/models.py b/tests/mock/models.py index 78e9373..716254e 100644 --- a/tests/mock/models.py +++ b/tests/mock/models.py @@ -65,12 +65,12 @@ def create_service(service_template): def create_dependency_node_template(service_template): node_type = service_template.node_types.get_descendant('test_node_type') capability_type = service_template.capability_types.get_descendant('test_capability_type') - + capability_template = models.CapabilityTemplate( name='capability', type=capability_type ) - + node_template = models.NodeTemplate( name=DEPENDENCY_NODE_TEMPLATE_NAME, type=node_type, @@ -90,7 +90,7 @@ def create_dependent_node_template(service_template, dependency_node_template): operation_templates = dict((op, models.OperationTemplate( name=op, implementation='test')) - for _, op in operations.NODE_OPERATIONS) + for _, op in operations.NODE_OPERATIONS) interface_template = models.InterfaceTemplate( type=service_template.interface_types.get_descendant('test_interface_type'), operation_templates=operation_templates) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/mock/topology.py ---------------------------------------------------------------------- diff --git a/tests/mock/topology.py b/tests/mock/topology.py index c7d8087..5f31661 100644 --- a/tests/mock/topology.py +++ b/tests/mock/topology.py @@ -31,7 +31,7 @@ def create_simple_topology_single_node(model_storage, create_operation): inputs={'key': aria_models.Parameter(name='key', value='create', type_name='string'), 'value': aria_models.Parameter(name='value', value=True, type_name='boolean')}) ) - node_template.interface_templates[interface_template.name] = interface_template + node_template.interface_templates[interface_template.name] = interface_template # pylint: disable=unsubscriptable-object node = models.create_dependency_node(node_template, service) interface = models.create_interface( @@ -42,7 +42,7 @@ def create_simple_topology_single_node(model_storage, create_operation): inputs={'key': aria_models.Parameter(name='key', value='create', type_name='string'), 'value': aria_models.Parameter(name='value', value=True, type_name='boolean')}) ) - node.interfaces[interface.name] = interface + node.interfaces[interface.name] = interface # pylint: disable=unsubscriptable-object model_storage.service_template.put(service_template) model_storage.service.put(service) @@ -61,7 +61,7 @@ def create_simple_topology_two_nodes(model_storage): dependency_node = models.create_dependency_node(dependency_node_template, service) dependent_node = models.create_dependent_node(dependent_node_template, service) - dependent_node.outbound_relationships.append(models.create_relationship( + dependent_node.outbound_relationships.append(models.create_relationship( # pylint: disable=no-member source=dependent_node, target=dependency_node )) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/orchestrator/context/test_operation.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py index ce3bd84..8ac8d49 100644 --- a/tests/orchestrator/context/test_operation.py +++ b/tests/orchestrator/context/test_operation.py @@ -101,7 +101,7 @@ def test_node_operation_task_execution(ctx, executor): ) operations = interface.operations assert len(operations) == 1 - assert operation_context.task.implementation == operations.values()[0].implementation + assert operation_context.task.implementation == operations.values()[0].implementation # pylint: disable=no-member assert operation_context.task.inputs['putput'].value is True # Context based attributes (sugaring) @@ -151,7 +151,7 @@ def test_relationship_operation_task_execution(ctx, executor): assert operation_context.task.actor == relationship assert interface_name in operation_context.task.name operations = interface.operations - assert operation_context.task.implementation == operations.values()[0].implementation + assert operation_context.task.implementation == operations.values()[0].implementation # pylint: disable=no-member assert operation_context.task.inputs['putput'].value is True # Context based attributes (sugaring) @@ -172,7 +172,7 @@ def test_relationship_operation_task_execution(ctx, executor): def test_invalid_task_operation_id(ctx, executor): """ Checks that the right id is used. The task created with id == 1, thus running the task on - node_instance with id == 2. will check that indeed the node_instance uses the correct id. + node with id == 2. will check that indeed the node uses the correct id. :param ctx: :param executor: :return: @@ -241,8 +241,7 @@ def test_plugin_workdir(ctx, executor, tmpdir): graph.add_tasks(api.task.OperationTask.for_node(node=node, interface_name=interface_name, operation_name=operation_name, - inputs=inputs) - ) + inputs=inputs)) execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor) expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service.id), http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/orchestrator/execution_plugin/test_local.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py index 9e9540f..6f146a6 100644 --- a/tests/orchestrator/execution_plugin/test_local.py +++ b/tests/orchestrator/execution_plugin/test_local.py @@ -462,7 +462,7 @@ if __name__ == '__main__': script_path = os.path.basename(local_script_path) if local_script_path else None if script_path: workflow_context.resource.deployment.upload( - entry_id=str(workflow_context.service_instance.id), + entry_id=str(workflow_context.service.id), source=local_script_path, path=script_path) @@ -475,20 +475,20 @@ if __name__ == '__main__': @workflow def mock_workflow(ctx, graph): - op = 'test.op' - node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - node.interfaces = [mock.models.get_interface( - op, + node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + interface = mock.models.create_interface( + node.service, + 'test', + 'op', operation_kwargs=dict(implementation='{0}.{1}'.format( operations.__name__, operations.run_script_locally.__name__)) - )] - # node.operations[op] = { - # 'operation': '{0}.{1}'.format(operations.__name__, - # operations.run_script_locally.__name__)} - graph.add_tasks(api.task.OperationTask.node( - instance=node, - name=op, + ) + node.interfaces[interface.name] = interface + graph.add_tasks(api.task.OperationTask.for_node( + node=node, + interface_name='test', + operation_name='op', inputs=inputs)) return graph tasks_graph = mock_workflow(ctx=workflow_context) # pylint: disable=no-value-for-parameter @@ -498,7 +498,7 @@ if __name__ == '__main__': tasks_graph=tasks_graph) eng.execute() return workflow_context.model.node.get_by_name( - mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties + mock.models.DEPENDENCY_NODE_NAME).runtime_properties @pytest.fixture def executor(self): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/orchestrator/execution_plugin/test_ssh.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py index a65ee34..78341b8 100644 --- a/tests/orchestrator/execution_plugin/test_ssh.py +++ b/tests/orchestrator/execution_plugin/test_ssh.py @@ -124,10 +124,10 @@ class TestWithActualSSHServer(object): def test_run_script_download_resource_and_render(self, tmpdir): resource = tmpdir.join('resource') - resource.write('{{ctx.service_instance.name}}') + resource.write('{{ctx.service.name}}') self._upload(str(resource), 'test_resource') props = self._execute() - assert props['test_value'] == self._workflow_context.service_instance.name + assert props['test_value'] == self._workflow_context.service.name @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 'value'}]) def test_run_script_inputs_as_env_variables_no_override(self, value): @@ -216,15 +216,20 @@ class TestWithActualSSHServer(object): @workflow def mock_workflow(ctx, graph): - op = 'test.op' - node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - node.interfaces = [mock.models.get_interface( - op, - dict(implementation='{0}.{1}'.format(operations.__name__, operation.__name__)) - )] + node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + interface = mock.models.create_interface( + node.service, + 'test', + 'op', + operation_kwargs=dict(implementation='{0}.{1}'.format( + operations.__name__, + operation.__name__)) + ) + node.interfaces[interface.name] = interface graph.sequence(*[api.task.OperationTask.for_node( node=node, - name=op, + interface_name='test', + operation_name='op', inputs={ 'script_path': script_path, 'fabric_env': _FABRIC_ENV, @@ -243,7 +248,7 @@ class TestWithActualSSHServer(object): tasks_graph=tasks_graph) eng.execute() return self._workflow_context.model.node.get_by_name( - mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties + mock.models.DEPENDENCY_NODE_NAME).runtime_properties def _execute_and_get_task_exception(self, *args, **kwargs): signal = events.on_failure_task_signal @@ -254,7 +259,7 @@ class TestWithActualSSHServer(object): def _upload(self, source, path): self._workflow_context.resource.deployment.upload( - entry_id=str(self._workflow_context.service_instance.id), + entry_id=str(self._workflow_context.service.id), source=source, path=path) @@ -407,7 +412,7 @@ class TestFabricEnvHideGroupsAndRunCommands(object): class Stub(object): @staticmethod def abort(message=None): - model.Task.abort(message) + models.Task.abort(message) ip = None task = Stub task.runs_on = Stub http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/orchestrator/workflows/builtin/test_heal.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/workflows/builtin/test_heal.py b/tests/orchestrator/workflows/builtin/test_heal.py index b553049..92fa7ea 100644 --- a/tests/orchestrator/workflows/builtin/test_heal.py +++ b/tests/orchestrator/workflows/builtin/test_heal.py @@ -33,11 +33,11 @@ def ctx(tmpdir): @pytest.mark.skip(reason='heal is not implemented for now') def test_heal_dependent_node(ctx): - dependent_node_instance = \ - ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME) - dependent_node_instance.host_fk = dependent_node_instance.id - ctx.model.node.update(dependent_node_instance) - heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id=dependent_node_instance.id) + dependent_node = \ + ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME) + dependent_node.host_fk = dependent_node.id + ctx.model.node.update(dependent_node) + heal_graph = task.WorkflowTask(heal, ctx=ctx, node_id=dependent_node.id) assert len(list(heal_graph.tasks)) == 2 uninstall_subgraph, install_subgraph = list(heal_graph.topological_order(reverse=True)) @@ -63,11 +63,11 @@ def test_heal_dependent_node(ctx): @pytest.mark.skip(reason='heal is not implemented for now') def test_heal_dependency_node(ctx): - dependency_node_instance = \ - ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - dependency_node_instance.host_fk = dependency_node_instance.id - ctx.model.node.update(dependency_node_instance) - heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id=dependency_node_instance.id) + dependency_node = \ + ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + dependency_node.host_fk = dependency_node.id + ctx.model.node.update(dependency_node) + heal_graph = task.WorkflowTask(heal, ctx=ctx, node_id=dependency_node.id) # both subgraphs should contain un\install for both the dependent and the dependency assert len(list(heal_graph.tasks)) == 2 uninstall_subgraph, install_subgraph = list(heal_graph.topological_order(reverse=True)) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/storage/test_models.py ---------------------------------------------------------------------- diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py index 0088314..c80659b 100644 --- a/tests/storage/test_models.py +++ b/tests/storage/test_models.py @@ -662,13 +662,15 @@ class TestNodeInstanceIP(object): @pytest.mark.skip('Should be reworked into relationship') class TestRelationship(object): def test_relationship_model_creation(self, nodes_storage): + nodes = nodes_storage.node + source_node = nodes.get_by_name(mock.models.DEPENDENT_NODE_NAME) + target_node = nodes.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + relationship = mock.models.create_relationship( + source=source_node, target=nodes_storage.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) ) nodes_storage.relationship.put(relationship) - nodes = nodes_storage.node - source_node = nodes.get_by_name(mock.models.DEPENDENT_NODE_NAME) - target_node = nodes.get_by_name(mock.models.DEPENDENCY_NODE_NAME) relationship_instance = _test_model( is_valid=True, http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aa01cd4e/tests/storage/test_structures.py ---------------------------------------------------------------------- diff --git a/tests/storage/test_structures.py b/tests/storage/test_structures.py index ceaea11..cacec2e 100644 --- a/tests/storage/test_structures.py +++ b/tests/storage/test_structures.py @@ -47,7 +47,7 @@ def storage(): @pytest.fixture(scope='module', autouse=True) def module_cleanup(): - modeling.models.aria_declarative_base.metadata.remove(MockModel.__table__) #pylint: disable=no-member + modeling.models.aria_declarative_base.metadata.remove(MockModel.__table__) # pylint: disable=no-member @pytest.fixture @@ -137,7 +137,7 @@ def test_relationship_model_ordering(context): target_node=new_node, )) - new_node.outbound_relationships.append(modeling.models.Relationship( + new_node.outbound_relationships.append(modeling.models.Relationship( # pylint: disable=no-member source_node=new_node, target_node=target_node, )) @@ -159,12 +159,12 @@ def test_relationship_model_ordering(context): relationships = getattr(node, direction + '_relationships') assert len(relationships) == 2 - reversed_relationship_instances = list(reversed(relationships)) - assert relationships != reversed_relationship_instances + reversed_relationship = list(reversed(relationships)) + assert relationships != reversed_relationship - relationships[:] = reversed_relationship_instances + relationships[:] = reversed_relationship context.model.node.update(node) - assert relationships == reversed_relationship_instances + assert relationships == reversed_relationship flip_and_assert(source_node, 'outbound') flip_and_assert(target_node, 'inbound')
