Repository: incubator-ariatosca Updated Branches: refs/heads/wf-executor a9120175e -> a0ac985a1
clean some Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/a0ac985a Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/a0ac985a Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/a0ac985a Branch: refs/heads/wf-executor Commit: a0ac985a1fe61f6e771fc285e8f6037088670ccb Parents: a912017 Author: Dan Kilman <dankil...@gmail.com> Authored: Tue Oct 18 16:43:49 2016 +0300 Committer: Dan Kilman <dankil...@gmail.com> Committed: Tue Oct 18 16:43:49 2016 +0300 ---------------------------------------------------------------------- aria/events/builtin_event_handlers.py | 40 ++++------------------- aria/events/workflow_engine_event_handler.py | 12 +++---- aria/storage/models.py | 7 ++-- 3 files changed, 16 insertions(+), 43 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a0ac985a/aria/events/builtin_event_handlers.py ---------------------------------------------------------------------- diff --git a/aria/events/builtin_event_handlers.py b/aria/events/builtin_event_handlers.py index 6be39c3..404cc01 100644 --- a/aria/events/builtin_event_handlers.py +++ b/aria/events/builtin_event_handlers.py @@ -15,7 +15,6 @@ from datetime import datetime -from ..storage.models import NodeInstance from . import ( start_workflow_signal, on_success_workflow_signal, @@ -26,36 +25,9 @@ from . import ( ) -class _OperationToNodeInstanceState(dict): - def __missing__(self, key): - for cached_key, value in self.items(): - if key.startswith(cached_key): - return value - raise KeyError(key) - -_operation_to_node_instance_state = _OperationToNodeInstanceState({ - 'cloudify.interfaces.lifecycle.create': NodeInstance.INITIALIZING, - 'cloudify.interfaces.lifecycle.configure': NodeInstance.CONFIGURING, - 'cloudify.interfaces.lifecycle.start': NodeInstance.STARTING, - 'cloudify.interfaces.lifecycle.stop': NodeInstance.STOPPING, - 'cloudify.interfaces.lifecycle.delete': NodeInstance.DELETING -}) - - -@start_task_signal.connect -def _update_node_instance_state(sender, **kwargs): - try: - next_state = _operation_to_node_instance_state[sender.task_name] - except KeyError: - return - node_instance = sender.context.node_instance - node_instance.state = next_state - sender.context.storage.node_instance.store(node_instance) - - @start_task_signal.connect def _task_started(task, *args, **kwargs): - operation_context = task.operation_context + operation_context = task.context operation = operation_context.operation operation.started_at = datetime.utcnow() operation.status = operation.STARTED @@ -64,7 +36,7 @@ def _task_started(task, *args, **kwargs): @on_failure_task_signal.connect def _task_failed(task, *args, **kwargs): - operation_context = task.operation_context + operation_context = task.context operation = operation_context.operation operation.ended_at = datetime.utcnow() operation.status = operation.FAILED @@ -73,7 +45,7 @@ def _task_failed(task, *args, **kwargs): @on_success_task_signal.connect def _task_succeeded(task, *args, **kwargs): - operation_context = task.operation_context + operation_context = task.context operation = operation_context.operation operation.ended_at = datetime.utcnow() operation.status = operation.SUCCESS @@ -89,10 +61,8 @@ def _workflow_started(workflow_context, *args, **kwargs): workflow_id=workflow_context.workflow_id, blueprint_id=workflow_context.blueprint_id, status=Execution.PENDING, - created_at=datetime.utcnow(), - error='', + started_at=datetime.utcnow(), parameters=workflow_context.parameters, - is_system_workflow=False ) workflow_context.execution = execution @@ -102,6 +72,7 @@ def _workflow_failed(workflow_context, exception, *args, **kwargs): execution = workflow_context.execution execution.error = str(exception) execution.status = execution.FAILED + execution.ended_at = datetime.utcnow(), workflow_context.execution = execution @@ -109,4 +80,5 @@ def _workflow_failed(workflow_context, exception, *args, **kwargs): def _workflow_succeeded(workflow_context, *args, **kwargs): execution = workflow_context.execution execution.status = execution.TERMINATED + execution.ended_at = datetime.utcnow(), workflow_context.execution = execution http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a0ac985a/aria/events/workflow_engine_event_handler.py ---------------------------------------------------------------------- diff --git a/aria/events/workflow_engine_event_handler.py b/aria/events/workflow_engine_event_handler.py index 44759c5..6916206 100644 --- a/aria/events/workflow_engine_event_handler.py +++ b/aria/events/workflow_engine_event_handler.py @@ -25,20 +25,20 @@ from . import ( @start_task_signal.connect def start_task_handler(task, **kwargs): - task.context.logger.debug( - 'Event: Starting task: {task.context.name}'.format(task=task)) + task.logger.debug( + 'Event: Starting task: {task.name}'.format(task=task)) @on_success_task_signal.connect def success_task_handler(task, **kwargs): - task.context.logger.debug( - 'Event: Task success: {task.context.name}'.format(task=task)) + task.logger.debug( + 'Event: Task success: {task.name}'.format(task=task)) @on_failure_task_signal.connect def failure_operation_handler(task, **kwargs): - task.context.logger.error( - 'Event: Task failure: {task.context.id}'.format(task=task), + task.logger.error( + 'Event: Task failure: {task.name}'.format(task=task), exc_info=kwargs.get('exception', True)) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a0ac985a/aria/storage/models.py ---------------------------------------------------------------------- diff --git a/aria/storage/models.py b/aria/storage/models.py index d3cb3f7..d96c74a 100644 --- a/aria/storage/models.py +++ b/aria/storage/models.py @@ -191,10 +191,11 @@ class Execution(Model): deployment_id = Field(type=basestring) workflow_id = Field(type=basestring) blueprint_id = Field(type=basestring) - created_at = Field(type=datetime) - error = Field() + started_at = Field(type=datetime) + ended_at = Field(type=datetime, default=None) + error = Field(type=basestring, default=None) parameters = Field() - is_system_workflow = Field(type=bool) + is_system_workflow = Field(type=bool, default=False) class Operation(Model):