[06/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_input.py
--
diff --git 
a/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_input.py
 
b/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_input.py
new file mode 100644
index 000..a4c97a4
--- /dev/null
+++ 
b/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_input.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_functions_get_input_unknown(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { get_input: unknown }
+""").assert_failure()
+
+
+def test_functions_get_input(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  inputs:
+my_input:
+  type: string
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { get_input: my_input }
+""").assert_success()
+
+
+def test_functions_get_input_nested(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  inputs:
+my_input:
+  type: string
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { get_input: { concat: [ my, _, input ] } }
+""").assert_success()
+
+
+# Unicode
+
+def test_functions_get_input_unicode(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  類型:
+properties:
+  參數:
+type: string
+topology_template:
+  inputs:
+输入:
+  type: string
+  node_templates:
+模板:
+  type: 類型
+  properties:
+參數: { get_input: 输入 }
+""").assert_success()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_nodes_of_type.py
--
diff --git 
a/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_nodes_of_type.py
 
b/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_nodes_of_type.py
new file mode 100644
index 000..ffa2f9c
--- /dev/null
+++ 
b/tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_get_nodes_of_type.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_functions_get_nodes_of_type_unknown(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { get_nodes_of_type: unknown }
+""", import_profile=True).assert_failure()
+
+
+def test_functions_get_nodes_of_type(parser):
+

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153016106
  
--- Diff: aria/modeling/service_common.py ---
@@ -587,12 +588,11 @@ class MetadataBase(TemplateModelMixin):
 :ivar name: name
 :vartype name: basestring
 :ivar value: value
-:vartype value: basestring
 """
 
 __tablename__ = 'metadata'
 
-value = Column(Text)
+value = Column(PickleType)
--- End diff --

Why not? :)


---


[02/12] incubator-ariatosca git commit: ARIA-408 remove execution creation from workflow runner

2017-11-24 Thread emblemparade
ARIA-408 remove execution creation from workflow runner

Separated the creation of the models and the execution of the workflow.
This enables creating the execution, and executing it from a different
process.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/e71ddc9b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/e71ddc9b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/e71ddc9b

Branch: refs/heads/ARIA-1-parser-test-suite
Commit: e71ddc9b05ae32b675750e4d37d618f21c9e1304
Parents: 730750f
Author: max-orlov 
Authored: Sun Nov 19 10:09:54 2017 +0200
Committer: max-orlov 
Committed: Wed Nov 22 16:08:48 2017 +0200

--
 aria/cli/commands/executions.py | 101 ++-
 aria/orchestrator/execution_preparer.py | 170 +
 aria/orchestrator/workflow_runner.py| 188 -
 aria/orchestrator/workflows/core/engine.py  |   4 +-
 docs/aria.orchestrator.rst  |   6 +-
 test_ssh.py | 528 --
 tests/end2end/testenv.py|   1 +
 tests/orchestrator/context/__init__.py  |   2 +-
 tests/orchestrator/context/test_serialize.py|   2 +-
 tests/orchestrator/execution/__init__.py|  14 +
 .../execution/test_execution_compiler.py| 628 
 .../orchestrator/execution_plugin/test_local.py |   2 +-
 tests/orchestrator/execution_plugin/test_ssh.py |   2 +-
 tests/orchestrator/test_workflow_runner.py  | 726 ---
 .../orchestrator/workflows/core/test_engine.py  |   2 +-
 .../orchestrator/workflows/core/test_events.py  |   2 +-
 .../executor/test_process_executor_extension.py |   2 +-
 .../test_process_executor_tracked_changes.py|   2 +-
 18 files changed, 887 insertions(+), 1495 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e71ddc9b/aria/cli/commands/executions.py
--
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
index cecbbc5..2415e19 100644
--- a/aria/cli/commands/executions.py
+++ b/aria/cli/commands/executions.py
@@ -25,9 +25,11 @@ from .. import utils
 from .. import logger as cli_logger
 from .. import execution_logging
 from ..core import aria
+from ...orchestrator import execution_preparer
 from ...modeling.models import Execution
-from ...orchestrator.workflow_runner import WorkflowRunner
+from ...orchestrator.workflows.core.engine import Engine
 from ...orchestrator.workflows.executor.dry import DryExecutor
+from ...orchestrator.workflows.executor.process import ProcessExecutor
 from ...utils import formatting
 from ...utils import threading
 
@@ -141,17 +143,21 @@ def start(workflow_name,
 WORKFLOW_NAME is the unique name of the workflow within the service (e.g. 
"uninstall").
 """
 service = model_storage.service.get_by_name(service_name)
-executor = DryExecutor() if dry else None  # use WorkflowRunner's default 
executor
-
-workflow_runner = \
-WorkflowRunner(
-model_storage, resource_storage, plugin_manager,
-service_id=service.id, workflow_name=workflow_name, inputs=inputs, 
executor=executor,
-task_max_attempts=task_max_attempts, 
task_retry_interval=task_retry_interval
-)
+executor = DryExecutor() if dry else 
ProcessExecutor(plugin_manager=plugin_manager)
+
+compiler = execution_preparer.ExecutionPreparer(
+model_storage,
+resource_storage,
+plugin_manager,
+service,
+workflow_name
+)
+workflow_ctx = compiler.prepare(inputs, executor=executor)
+
+engine = Engine(executor)
 logger.info('Starting {0}execution. Press Ctrl+C cancel'.format('dry ' if 
dry else ''))
 
-_run_execution(workflow_runner, logger, model_storage, dry, mark_pattern)
+_run_execution(engine, workflow_ctx, logger, model_storage, dry, 
mark_pattern)
 
 
 @executions.command(name='resume',
@@ -178,45 +184,61 @@ def resume(execution_id,
 
 EXECUTION_ID is the unique ID of the execution.
 """
-executor = DryExecutor() if dry else None  # use WorkflowRunner's default 
executor
+executor = DryExecutor() if dry else 
ProcessExecutor(plugin_manager=plugin_manager)
 
-execution = model_storage.execution.get(execution_id)
-if execution.status != execution.CANCELLED:
+execution_to_resume = model_storage.execution.get(execution_id)
+if execution_to_resume.status != execution_to_resume.CANCELLED:
 logger.info("Can't resume execution {execution.id} - "
 "execution is in status {execution.status}. "
-"Can only resume 

[03/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_data_type.py
--
diff --git 
a/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_data_type.py 
b/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_data_type.py
new file mode 100644
index 000..5c0dd70
--- /dev/null
+++ b/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_data_type.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from .. import data
+from .mechanisms.utils import matrix
+
+
+# Derived from primitive
+
+@pytest.mark.parametrize('name', data.PRIMITIVE_TYPE_NAMES)
+def test_data_type_derived_from_primitive(parser, name):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+data_types:
+  MyType:
+derived_from: {{ name }} 
+""", dict(name=name)).assert_success()
+
+
+# Constraints
+
+@pytest.mark.parametrize('name,value', matrix(
+data.PRIMITIVE_TYPE_NAMES,
+data.NOT_A_LIST
+))
+def test_data_type_constraints_syntax_type(parser, name, value):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+data_types:
+  MyType:
+derived_from: string
+constraints: {{ value }}
+""", dict(name=name, value=value)).assert_failure()
+
+
+@pytest.mark.parametrize('name', data.PRIMITIVE_TYPE_NAMES)
+def test_data_type_constraints_syntax_empty(parser, name):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+data_types:
+  MyType:
+derived_from: string
+constraints: []
+""", dict(name=name)).assert_success()
+
+
+def test_data_type_constraints_not_derived_from_primitive(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+data_types:
+  MyType:
+constraints: [] # can't have constraints if not derived from primitive
+""").assert_failure()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_group_type.py
--
diff --git 
a/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_group_type.py 
b/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_group_type.py
new file mode 100644
index 000..7816484
--- /dev/null
+++ b/tests/extensions/aria_extension_tosca/simple_v1_0/types/test_group_type.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from .. import data
+
+
+# Members
+
+@pytest.mark.parametrize('value', data.NOT_A_LIST)
+def test_group_type_members_syntax_type(parser, value):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+group_types:
+  MyType:
+members: {{ value }}
+""", dict(value=value)).assert_failure()
+
+
+@pytest.mark.parametrize('value', data.NOT_A_STRING)
+def test_group_type_members_syntax_element_type(parser, value):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+group_types:
+  MyType:
+members: [ {{ value }} ]
+""", dict(value=value)).assert_failure()
+
+
+def test_group_type_members_syntax_empty(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+group_types:
+  MyType:
+members: []
+""").assert_success()
+
+
+def 

[07/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
--
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py 
b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
index 9bafeec..e411104 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
@@ -88,7 +88,7 @@ def get_assigned_and_defined_parameter_values(context, 
presentation, field_name)
 definition = definitions[name]
 values[name] = coerce_parameter_value(context, value, 
definition, value.value)
 else:
-context.validation.report('assignment to undefined {0} "{1}" 
in "{2}"'
+context.validation.report(u'assignment to undefined {0} "{1}" 
in "{2}"'
   .format(field_name, name, 
presentation._fullname),
   locator=value._locator, 
level=Issue.BETWEEN_TYPES)
 
@@ -99,7 +99,7 @@ def get_assigned_and_defined_parameter_values(context, 
presentation, field_name)
 if (name not in values) and \
 (('default' in definition._raw) or (field_name == 
'attribute')):
 values[name] = coerce_parameter_value(context, presentation, 
definition,
-  definition.default)
+  definition.default, 
'default')
 
 validate_required_values(context, presentation, values, definitions)
 
@@ -131,7 +131,8 @@ def get_parameter_values(context, presentation, field_name):
   parameter.value)
 else:
 default = parameter.default if hasattr(parameter, 
'default') else None
-values[name] = coerce_parameter_value(context, 
presentation, parameter, default)
+values[name] = coerce_parameter_value(context, 
presentation, parameter, default,
+  'default')
 
 return values
 
@@ -147,11 +148,21 @@ def validate_required_values(context, presentation, 
values, definitions):
 
 if not definitions:
 return
+
+def has_value(name):
+if values is None:
+return False
+value = values.get(name)
+if value is None:
+return False
+if isinstance(value, Value) and (value.value is None):
+return False
+return True
+
 for name, definition in definitions.iteritems():
-if getattr(definition, 'required', False) and \
-((values is None) or (values.get(name) is None)):
-context.validation.report('required property "%s" is not assigned 
a value in "%s"'
-  % (name, presentation._fullname),
+if getattr(definition, 'required', False) and not has_value(name):
+context.validation.report(u'required property "{0}" is not 
assigned a value in "{1}"'
+  .format(name, presentation._fullname),
   
locator=presentation._get_child_locator('properties'),
   level=Issue.BETWEEN_TYPES)
 
@@ -166,14 +177,14 @@ def merge_raw_parameter_definition(context, presentation, 
raw_property_definitio
 our_property_definition._reset_method_cache()
 type2 = our_property_definition._get_type(context)
 
-if type1 != type2:
-if not hasattr(type1, '_is_descendant') or not 
type1._is_descendant(context, type2):
-context.validation.report(
-'property definition type "{0}" is not a descendant of 
overridden '
-'property definition type "{1}"' \
-.format(type1_name, type2._name),
-locator=presentation._get_child_locator(field_name, 
property_name),
-level=Issue.BETWEEN_TYPES)
+if (type1 is not type2) and \
+(not hasattr(type1, '_is_descendant') or not 
type1._is_descendant(context, type2)):
+context.validation.report(
+u'property definition type "{0}" is not a descendant of overridden 
'
+u'property definition type "{1}"' \
+.format(our_property_definition.type, type1_name),
+locator=presentation._get_child_locator(field_name, property_name),
+level=Issue.BETWEEN_TYPES)
 
 merge(raw_property_definition, our_property_definition._raw)
 
@@ -225,6 +236,6 @@ def coerce_parameter_value(context, presentation, 
definition, value, aspect=None
 def convert_parameter_definitions_to_values(context, definitions):
 values = OrderedDict()
 for name, definition in definitions.iteritems():
-default = 

[10/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/orchestrator/topology/template_handler.py
--
diff --git a/aria/orchestrator/topology/template_handler.py 
b/aria/orchestrator/topology/template_handler.py
index a84a988..3b1948a 100644
--- a/aria/orchestrator/topology/template_handler.py
+++ b/aria/orchestrator/topology/template_handler.py
@@ -69,7 +69,7 @@ class ServiceTemplate(common.TemplateHandlerBase):
 plugin = plugin_specification.plugin
 service.plugins[plugin.name] = plugin
 else:
-self._topology.report('specified plugin not found: 
{0}'.format(
+self._topology.report(u'specified plugin not found: 
{0}'.format(
 plugin_specification.name), 
level=self._topology.Issue.EXTERNAL)
 service.meta_data = self._topology.instantiate(self._model.meta_data)
 
@@ -108,17 +108,18 @@ class ServiceTemplate(common.TemplateHandlerBase):
 def _scaling(self, node_template):
 scaling = node_template.scaling
 
-if any([scaling['min_instances'] < 0,
+if any((scaling['min_instances'] < 0,
 scaling['max_instances'] < scaling['min_instances'],
 scaling['max_instances'] < 0,
 
 scaling['default_instances'] < 0,
 scaling['default_instances'] < scaling['min_instances'],
 scaling['default_instances'] > scaling['max_instances']
-   ]):
+   )):
 self._topology.report(
-'invalid scaling parameters for node template "{0}": 
min={min_instances}, max='
-'{max_instances}, 
default={default_instances}'.format(self._model.name, **scaling),
+u'invalid scaling parameters for node template "{0}": 
min={min_instances}, max='
+u'{max_instances}, 
default={default_instances}'.format(node_template.name,
+   
**scaling),
 level=self._topology.Issue.BETWEEN_TYPES)
 
 return scaling
@@ -150,18 +151,18 @@ class ArtifactTemplate(common.TemplateHandlerBase):
 if self._model.description:
 out_stream.write(out_stream.meta_style(self._model.description))
 with out_stream.indent():
-out_stream.write('Artifact type: {0}'.format(out_stream.type_style(
+out_stream.write(u'Artifact type: 
{0}'.format(out_stream.type_style(
 self._model.type.name)))
-out_stream.write('Source path: 
{0}'.format(out_stream.literal_style(
+out_stream.write(u'Source path: 
{0}'.format(out_stream.literal_style(
 self._model.source_path)))
 if self._model.target_path is not None:
-out_stream.write('Target path: 
{0}'.format(out_stream.literal_style(
+out_stream.write(u'Target path: 
{0}'.format(out_stream.literal_style(
 self._model.target_path)))
 if self._model.repository_url is not None:
-out_stream.write('Repository URL: {0}'.format(
+out_stream.write(u'Repository URL: {0}'.format(
 out_stream.literal_style(self._model.repository_url)))
 if self._model.repository_credential:
-out_stream.write('Repository credential: {0}'.format(
+out_stream.write(u'Repository credential: {0}'.format(
 
out_stream.literal_style(self._model.repository_credential)))
 self._topology.dump(self._model.properties, out_stream, 
title='Properties')
 
@@ -189,17 +190,17 @@ class CapabilityTemplate(common.TemplateHandlerBase):
 if self._model.description:
 out_stream.write(out_stream.meta_style(self._model.description))
 with out_stream.indent():
-out_stream.write('Type: 
{0}'.format(out_stream.type_style(self._model.type.name)))
+out_stream.write(u'Type: 
{0}'.format(out_stream.type_style(self._model.type.name)))
 out_stream.write(
-'Occurrences: {0:d}{1}'.format(
+u'Occurrences: {0:d}{1}'.format(
 self._model.min_occurrences or 0,
-' to {0:d}'.format(self._model.max_occurrences)
+u' to {0:d}'.format(self._model.max_occurrences)
 if self._model.max_occurrences is not None
 else ' or more'))
 if self._model.valid_source_node_types:
-out_stream.write('Valid source node types: {0}'.format(
-', '.join((str(out_stream.type_style(v.name))
-   for v in self._model.valid_source_node_types
+out_stream.write(u'Valid source node types: {0}'.format(
+u', 

[01/12] incubator-ariatosca git commit: ARIA-408 remove execution creation from workflow runner [Forced Update!]

2017-11-24 Thread emblemparade
Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-1-parser-test-suite 0cf1deafc -> 1dfb81c8a (forced update)


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e71ddc9b/tests/orchestrator/test_workflow_runner.py
--
diff --git a/tests/orchestrator/test_workflow_runner.py 
b/tests/orchestrator/test_workflow_runner.py
deleted file mode 100644
index 011c4cc..000
--- a/tests/orchestrator/test_workflow_runner.py
+++ /dev/null
@@ -1,726 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import time
-from threading import Thread, Event
-from datetime import datetime
-
-import mock
-import pytest
-
-from aria.modeling import exceptions as modeling_exceptions
-from aria.modeling import models
-from aria.orchestrator import exceptions
-from aria.orchestrator import events
-from aria.orchestrator.workflow_runner import WorkflowRunner
-from aria.orchestrator.workflows.executor.process import ProcessExecutor
-from aria.orchestrator.workflows import api
-from aria.orchestrator.workflows.core import engine, graph_compiler
-from aria.orchestrator.workflows.executor import thread
-from aria.orchestrator import (
-workflow,
-operation,
-)
-
-from tests import (
-mock as tests_mock,
-storage
-)
-
-from ..fixtures import (  # pylint: disable=unused-import
-plugins_dir,
-plugin_manager,
-fs_model as model,
-resource_storage as resource
-)
-
-custom_events = {
-'is_resumed': Event(),
-'is_active': Event(),
-'execution_cancelled': Event(),
-'execution_failed': Event(),
-}
-
-
-class TimeoutError(BaseException):
-pass
-
-
-class FailingTask(BaseException):
-pass
-
-
-def test_undeclared_workflow(request):
-# validating a proper error is raised when the workflow is not declared in 
the service
-with pytest.raises(exceptions.UndeclaredWorkflowError):
-_create_workflow_runner(request, 'undeclared_workflow')
-
-
-def test_missing_workflow_implementation(service, request):
-# validating a proper error is raised when the workflow code path does not 
exist
-workflow = models.Operation(
-name='test_workflow',
-service=service,
-function='nonexistent.workflow.implementation')
-service.workflows['test_workflow'] = workflow
-
-with pytest.raises(exceptions.WorkflowImplementationNotFoundError):
-_create_workflow_runner(request, 'test_workflow')
-
-
-def test_builtin_workflow_instantiation(request):
-# validates the workflow runner instantiates properly when provided with a 
builtin workflow
-# (expecting no errors to be raised on undeclared workflow or missing 
workflow implementation)
-workflow_runner = _create_workflow_runner(request, 'install')
-tasks = list(workflow_runner.execution.tasks)
-assert len(tasks) == 18  # expecting 18 tasks for 2 node topology
-
-
-def test_custom_workflow_instantiation(request):
-# validates the workflow runner instantiates properly when provided with a 
custom workflow
-# (expecting no errors to be raised on undeclared workflow or missing 
workflow implementation)
-mock_workflow = _setup_mock_workflow_in_service(request)
-workflow_runner = _create_workflow_runner(request, mock_workflow)
-tasks = list(workflow_runner.execution.tasks)
-assert len(tasks) == 2  # mock workflow creates only start workflow and 
end workflow task
-
-
-def test_existing_active_executions(request, service, model):
-existing_active_execution = models.Execution(
-service=service,
-status=models.Execution.STARTED,
-workflow_name='uninstall')
-model.execution.put(existing_active_execution)
-with pytest.raises(exceptions.ActiveExecutionsError):
-_create_workflow_runner(request, 'install')
-
-
-def test_existing_executions_but_no_active_ones(request, service, model):
-existing_terminated_execution = models.Execution(
-service=service,
-status=models.Execution.SUCCEEDED,
-workflow_name='uninstall')
-model.execution.put(existing_terminated_execution)
-# no active executions exist, so no error should be raised
-

[08/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/extensions/aria_extension_tosca/simple_v1_0/data_types.py
--
diff --git a/extensions/aria_extension_tosca/simple_v1_0/data_types.py 
b/extensions/aria_extension_tosca/simple_v1_0/data_types.py
index 216f1e4..b85caa1 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/data_types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/data_types.py
@@ -37,13 +37,13 @@ class Timezone(tzinfo):
 super(Timezone, self).__init__()
 self._offset = timedelta(hours=hours, minutes=minutes)
 
-def utcoffset(self, dt): # pylint: disable=unused-argument
+def utcoffset(self, dt):   
 # pylint: disable=unused-argument
 return self._offset
 
-def tzname(self, dt): # pylint: disable=unused-argument
-return str(self._offset)
+def tzname(self, dt):  
 # pylint: disable=unused-argument
+return unicode(self._offset)
 
-def dst(self, dt): # pylint: disable=unused-argument
+def dst(self, dt): 
 # pylint: disable=unused-argument
 return Timezone._ZERO
 
 _ZERO = timedelta(0)
@@ -74,8 +74,8 @@ class Timestamp(object):
 r'(([ 
\t]*)Z|(?P[-+][0-9][0-9])?(:(?P[0-9][0-9])?)?)?$'
 CANONICAL = '%Y-%m-%dT%H:%M:%S'
 
-def __init__(self, entry_schema, constraints, value, aspect): # pylint: 
disable=unused-argument
-value = str(value)
+def __init__(self, entry_schema, constraints, value, aspect):  
 # pylint: disable=unused-argument
+value = unicode(value)
 match = re.match(Timestamp.REGULAR_SHORT, value)
 if match is not None:
 # Parse short form
@@ -116,8 +116,8 @@ class Timestamp(object):
   Timezone(tzhour, tzminute))
 else:
 raise ValueError(
-'timestamp must be formatted as YAML ISO8601 variant or 
"-MM-DD": %s'
-% safe_repr(value))
+u'timestamp must be formatted as YAML ISO8601 variant or 
"-MM-DD": {0}'
+.format(safe_repr(value)))
 
 @property
 def as_datetime_utc(self):
@@ -129,8 +129,8 @@ class Timestamp(object):
 
 def __str__(self):
 the_datetime = self.as_datetime_utc
-return '%s%sZ' \
-% (the_datetime.strftime(Timestamp.CANONICAL), 
Timestamp._fraction_as_str(the_datetime))
+return u'{0}{1}Z'.format(the_datetime.strftime(Timestamp.CANONICAL),
+ Timestamp._fraction_as_str(the_datetime))
 
 def __repr__(self):
 return repr(self.__str__())
@@ -145,7 +145,7 @@ class Timestamp(object):
 
 @staticmethod
 def _fraction_as_str(the_datetime):
-return '{0:g}'.format(the_datetime.microsecond / 100.0).lstrip('0')
+return u'{0:g}'.format(the_datetime.microsecond / 
100.0).lstrip('0')
 
 
 @total_ordering
@@ -165,7 +165,7 @@ class Version(object):
 
 REGEX = \
 r'^(?P\d+)\.(?P\d+)(\.(?P\d+)' + \
-r'((\.(?P\d+))(\-(?P\d+))?)?)?$'
+r'((\.(?P\w+))(\-(?P\d+))?)?)?$'
 
 @staticmethod
 def key(version):
@@ -174,14 +174,13 @@ class Version(object):
 """
 return (version.major, version.minor, version.fix, version.qualifier, 
version.build)
 
-def __init__(self, entry_schema, constraints, value, aspect): # pylint: 
disable=unused-argument
-str_value = str(value)
-match = re.match(Version.REGEX, str_value)
+def __init__(self, entry_schema, constraints, value, aspect):  
 # pylint: disable=unused-argument
+str_value = unicode(value)
+match = re.match(Version.REGEX, str_value, flags=re.UNICODE)
 if match is None:
 raise ValueError(
-'version must be formatted as .'
-'[.[.[-

[09/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/parser/presentation/presenter.py
--
diff --git a/aria/parser/presentation/presenter.py 
b/aria/parser/presentation/presenter.py
index 9fd296f..d2f3292 100644
--- a/aria/parser/presentation/presenter.py
+++ b/aria/parser/presentation/presenter.py
@@ -41,10 +41,10 @@ class Presenter(Presentation):
 if tosca_definitions_version is not None \
 and tosca_definitions_version not in 
self.__class__.ALLOWED_IMPORTED_DSL_VERSIONS:
 context.validation.report(
-'import "tosca_definitions_version" is not one of %s: %s'
-% (' or '.join([safe_repr(v)
-for v in 
self.__class__.ALLOWED_IMPORTED_DSL_VERSIONS]),
-   presentation.service_template.tosca_definitions_version),
+u'import "tosca_definitions_version" is not one of {0}: {1}'
+.format(u' or '.join([safe_repr(v)
+  for v in 
self.__class__.ALLOWED_IMPORTED_DSL_VERSIONS]),
+
presentation.service_template.tosca_definitions_version),
 locator=presentation._get_child_locator('inputs'),
 level=Issue.BETWEEN_TYPES)
 return False

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/parser/presentation/source.py
--
diff --git a/aria/parser/presentation/source.py 
b/aria/parser/presentation/source.py
index 4bfb8e1..0bee5d1 100644
--- a/aria/parser/presentation/source.py
+++ b/aria/parser/presentation/source.py
@@ -26,7 +26,7 @@ class PresenterSource(object):
 Presenter sources provide appropriate :class:`Presenter` classes for 
agnostic raw data.
 """
 
-def get_presenter(self, raw):  # pylint: 
disable=unused-argument,no-self-use
+def get_presenter(self, raw):  
 # pylint: disable=unused-argument,no-self-use
 raise PresenterNotFoundError('presenter not found')
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/aria/parser/presentation/utils.py
--
diff --git a/aria/parser/presentation/utils.py 
b/aria/parser/presentation/utils.py
index f0fd390..b805299 100644
--- a/aria/parser/presentation/utils.py
+++ b/aria/parser/presentation/utils.py
@@ -56,7 +56,7 @@ def validate_primitive(value, cls, coerce=False):
 :raises ValueError: if not a primitive type or if coercion failed.
 """
 
-if (cls is not None) and (value is not None) and (value is not NULL):
+if (cls is not None) and (value is not None):
 if (cls is unicode) or (cls is str): # These two types are 
interchangeable
 valid = isinstance(value, basestring)
 elif cls is int:
@@ -66,9 +66,11 @@ def validate_primitive(value, cls, coerce=False):
 valid = isinstance(value, cls)
 if not valid:
 if coerce:
+if value is NULL:
+value = None
 value = cls(value)
 else:
-raise ValueError('not a "%s": %s' % (full_type_name(cls), 
safe_repr(value)))
+raise ValueError(u'not a "{0}": 
{1}'.format(full_type_name(cls), safe_repr(value)))
 return value
 
 
@@ -78,7 +80,8 @@ def validate_no_short_form(context, presentation):
 """
 
 if not hasattr(presentation, 'SHORT_FORM_FIELD') and not 
isinstance(presentation._raw, dict):
-context.validation.report('short form not allowed for field "%s"' % 
presentation._fullname,
+context.validation.report(u'short form not allowed for field "{0}"'
+  .format(presentation._fullname),
   locator=presentation._locator,
   level=Issue.BETWEEN_FIELDS)
 
@@ -94,8 +97,8 @@ def validate_no_unknown_fields(context, presentation):
 and hasattr(presentation, 'FIELDS'):
 for k in presentation._raw:
 if k not in presentation.FIELDS:
-context.validation.report('field "%s" is not supported in "%s"'
-  % (k, presentation._fullname),
+context.validation.report(u'field "{0}" is not supported in 
"{1}"'
+  .format(k, presentation._fullname),
   
locator=presentation._get_child_locator(k),
   level=Issue.BETWEEN_FIELDS)
 
@@ -161,27 +164,28 @@ def get_parent_presentation(context, presentation, 
*types_dict_names):
 def report_issue_for_unknown_type(context, presentation, type_name, 
field_name, value=None):
 if value is None:
 value = 

[04/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/test_names.py
--
diff --git a/tests/extensions/aria_extension_tosca/simple_v1_0/test_names.py 
b/tests/extensions/aria_extension_tosca/simple_v1_0/test_names.py
new file mode 100644
index 000..54cfd90
--- /dev/null
+++ b/tests/extensions/aria_extension_tosca/simple_v1_0/test_names.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_names_shorthand(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+topology_template:
+  node_templates:
+my_server:
+  type: Compute
+  requirements:
+- local_storage:
+node: my_block_storage
+relationship:
+  type: AttachesTo
+  properties:
+location: /path1/path2
+my_block_storage:
+  type: BlockStorage
+  properties:
+size: 10 GB
+""", import_profile=True).assert_success()
+
+
+def test_names_type_qualified(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+topology_template:
+  node_templates:
+my_server:
+  type: tosca:Compute
+  requirements:
+- local_storage:
+node: my_block_storage
+relationship:
+  type: AttachesTo
+  properties:
+location: /path1/path2
+my_block_storage:
+  type: tosca:BlockStorage
+  properties:
+size: 10 GB
+""", import_profile=True).assert_success()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/test_profile.py
--
diff --git a/tests/extensions/aria_extension_tosca/simple_v1_0/test_profile.py 
b/tests/extensions/aria_extension_tosca/simple_v1_0/test_profile.py
new file mode 100644
index 000..922f9dc
--- /dev/null
+++ b/tests/extensions/aria_extension_tosca/simple_v1_0/test_profile.py
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_profile(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+""", import_profile=True, validate_normative=True).assert_success()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/test_repositories.py
--
diff --git 
a/tests/extensions/aria_extension_tosca/simple_v1_0/test_repositories.py 
b/tests/extensions/aria_extension_tosca/simple_v1_0/test_repositories.py
new file mode 100644
index 000..9d40e22
--- /dev/null
+++ b/tests/extensions/aria_extension_tosca/simple_v1_0/test_repositories.py
@@ -0,0 +1,179 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# 

[05/12] incubator-ariatosca git commit: ARIA-1 Parser test suite

2017-11-24 Thread emblemparade
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/89b9f130/tests/extensions/aria_extension_tosca/simple_v1_0/templates/node_template/test_node_template_artifacts.py
--
diff --git 
a/tests/extensions/aria_extension_tosca/simple_v1_0/templates/node_template/test_node_template_artifacts.py
 
b/tests/extensions/aria_extension_tosca/simple_v1_0/templates/node_template/test_node_template_artifacts.py
new file mode 100644
index 000..e9ccc89
--- /dev/null
+++ 
b/tests/extensions/aria_extension_tosca/simple_v1_0/templates/node_template/test_node_template_artifacts.py
@@ -0,0 +1,307 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Here we are testing not only artifacts attached to node templates, but also 
artifacts attached to
+node types. The reason is that artifacts attached node types use the same 
property assignment
+(rather than definition) syntax we see in templates.
+"""
+
+import pytest
+
+from ... import data
+from ..mechanisms.utils import matrix
+
+
+# Artifacts attached to a node template
+TEMPLATE_MACROS = """
+{% macro artifacts() %}
+node_types:
+  MyType: {}
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  artifacts: {{ caller()|indent(8) }}
+{%- endmacro %}
+"""
+
+# Artifacts attached to a node type
+TYPE_MACROS = """
+{% macro artifacts() %}
+node_types:
+  MyType:
+artifacts: {{ caller()|indent(6) }}
+{%- endmacro %}
+"""
+
+MACROS = {
+'template': TEMPLATE_MACROS,
+'type': TYPE_MACROS
+}
+
+PERMUTATIONS = (
+'template',
+'type'
+)
+
+
+
+# Artifacts section
+
+@pytest.mark.parametrize('macros,value', matrix(PERMUTATIONS, data.NOT_A_DICT))
+def test_node_template_artifacts_section_syntax_type(parser, macros, value):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() -%}
+{{ value }}
+{% endcall %}
+""", dict(value=value)).assert_failure()
+
+
+@pytest.mark.parametrize('macros', PERMUTATIONS)
+def test_node_template_artifacts_section_syntax_empty(parser, macros):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() -%}
+{}
+{% endcall %}
+""").assert_success()
+
+
+# Artifact
+
+@pytest.mark.parametrize('macros,value', matrix(PERMUTATIONS, data.NOT_A_DICT))
+def test_node_template_artifact_syntax_type(parser, macros, value):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() %}
+my_artifact: {{ value }}
+{% endcall %}
+""", dict(value=value)).assert_failure()
+
+
+@pytest.mark.parametrize('macros', PERMUTATIONS)
+def test_node_template_artifact_syntax_unsupported(parser, macros):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() %}
+my_artifact:
+  type: MyType
+  unsupported: {}
+{% endcall %}
+""").assert_failure()
+
+
+@pytest.mark.parametrize('macros', PERMUTATIONS)
+def test_node_template_artifact_syntax_empty(parser, macros):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() %}
+my_artifact: {} # "type" and "file" are required
+{% endcall %}
+""").assert_failure()
+
+
+# Type
+
+@pytest.mark.parametrize('macros,value', matrix(PERMUTATIONS, 
data.NOT_A_STRING))
+def test_node_template_artifact_type_syntax_type(parser, macros, value):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() %}
+my_artifact:
+  type: {{ value }}
+  file: a file
+{% endcall %}
+""", dict(value=value)).assert_failure()
+
+
+@pytest.mark.parametrize('macros', PERMUTATIONS)
+def test_node_template_artifact_type_unknown(parser, macros):
+parser.parse_literal(MACROS[macros] + """
+tosca_definitions_version: tosca_simple_yaml_1_0
+{%- call artifacts() %}
+my_artifact:
+  type: UnknownType
+  file: a file
+{% endcall %}
+""").assert_failure()
+
+
+# File
+
+@pytest.mark.parametrize('macros,value', matrix(PERMUTATIONS, 
data.NOT_A_STRING))
+def test_node_template_artifact_file_syntax_type(parser, macros, value):
+ 

[12/12] incubator-ariatosca git commit: Fixes

2017-11-24 Thread emblemparade
Fixes


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/1dfb81c8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/1dfb81c8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/1dfb81c8

Branch: refs/heads/ARIA-1-parser-test-suite
Commit: 1dfb81c8aac235c5f6aa024f4916b2eefcc43463
Parents: 89b9f13
Author: Tal Liron 
Authored: Fri Nov 24 12:22:52 2017 -0600
Committer: Tal Liron 
Committed: Fri Nov 24 12:27:30 2017 -0600

--
 aria/__init__.py|  5 +-
 aria/modeling/service_instance.py   |  2 +-
 aria/parser/consumption/presentation.py | 50 ++--
 aria/parser/presentation/presentation.py|  1 +
 aria/parser/reading/yaml.py | 11 +++--
 aria/utils/collections.py   | 25 +++---
 .../simple_v1_0/assignments.py  | 19 
 .../simple_v1_0/modeling/__init__.py| 14 +++---
 .../simple_v1_0/modeling/data_types.py  |  6 +--
 .../extensions/aria_extension_tosca/conftest.py |  3 ++
 tests/mechanisms/parsing/__init__.py| 10 +++-
 tests/mechanisms/utils.py   | 15 --
 12 files changed, 110 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1dfb81c8/aria/__init__.py
--
diff --git a/aria/__init__.py b/aria/__init__.py
index acaf81b..980a2bb 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -57,9 +57,8 @@ def install_aria_extensions(strict=True):
 if module_name.startswith('aria_extension_'):
 loader.find_module(module_name).load_module(module_name)
 for entry_point in pkg_resources.iter_entry_points(group='aria_extension'):
-# It should be possible to enable non strict loading - use the package
-# that is already installed inside the environment, and forego the
-# version demand
+# It should be possible to enable non strict loading - use the package 
that is already
+# installed inside the environment, and forgo the version demand
 if strict:
 entry_point.load()
 else:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1dfb81c8/aria/modeling/service_instance.py
--
diff --git a/aria/modeling/service_instance.py 
b/aria/modeling/service_instance.py
index b0e426c..21c1029 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -510,7 +510,7 @@ class NodeBase(InstanceModelMixin):
 @classmethod
 def determine_state(cls, op_name, is_transitional):
 """
-:returns the state the node should be in as a result of running the 
operation on this node.
+:return: the state the node should be in as a result of running the 
operation on this node.
 
 E.g. if we are running 
tosca.interfaces.node.lifecycle.Standard.create, then
 the resulting state should either 'creating' (if the task just 
started) or 'created'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1dfb81c8/aria/parser/consumption/presentation.py
--
diff --git a/aria/parser/consumption/presentation.py 
b/aria/parser/consumption/presentation.py
index b1f943d..0f0b380 100644
--- a/aria/parser/consumption/presentation.py
+++ b/aria/parser/consumption/presentation.py
@@ -46,19 +46,19 @@ class Read(Consumer):
 
 def consume(self):
 # Present the main location and all imports recursively
-main, results = self._present_all()
+main_result, all_results = self._present_all()
 
 # Merge presentations
-main.merge(results, self.context)
+main_result.merge(all_results, self.context)
 
 # Cache merged presentations
 if self.context.presentation.cache:
-for result in results:
+for result in all_results:
 result.cache()
 
-self.context.presentation.presenter = main.presentation
-if main.canonical_location is not None:
-self.context.presentation.location = main.canonical_location
+self.context.presentation.presenter = main_result.presentation
+if main_result.canonical_location is not None:
+self.context.presentation.location = main_result.canonical_location
 
 def dump(self):
 if self.context.has_arg_switch('yaml'):
@@ -73,11 +73,18 @@ class Read(Consumer):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if 

incubator-ariatosca git commit: Fixes

2017-11-24 Thread emblemparade
Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-1-parser-test-suite ce4a18371 -> 0cf1deafc


Fixes


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/0cf1deaf
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/0cf1deaf
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/0cf1deaf

Branch: refs/heads/ARIA-1-parser-test-suite
Commit: 0cf1deafc25bb21377be5f0153ae639f877af8a3
Parents: ce4a183
Author: Tal Liron 
Authored: Fri Nov 24 12:22:52 2017 -0600
Committer: Tal Liron 
Committed: Fri Nov 24 12:22:52 2017 -0600

--
 aria/__init__.py|   5 +-
 aria/modeling/service_instance.py   |   2 +-
 aria/parser/consumption/presentation.py |  50 +-
 aria/parser/presentation/presentation.py|   1 +
 aria/parser/reading/yaml.py |  11 +-
 aria/utils/collections.py   |  25 +-
 .../simple_v1_0/assignments.py  |  19 +-
 .../simple_v1_0/modeling/__init__.py|  14 +-
 .../simple_v1_0/modeling/data_types.py  |   6 +-
 test_ssh.py | 528 ---
 .../extensions/aria_extension_tosca/conftest.py |   3 +
 tests/mechanisms/parsing/__init__.py|  10 +-
 tests/mechanisms/utils.py   |  15 +-
 13 files changed, 110 insertions(+), 579 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/__init__.py
--
diff --git a/aria/__init__.py b/aria/__init__.py
index acaf81b..980a2bb 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -57,9 +57,8 @@ def install_aria_extensions(strict=True):
 if module_name.startswith('aria_extension_'):
 loader.find_module(module_name).load_module(module_name)
 for entry_point in pkg_resources.iter_entry_points(group='aria_extension'):
-# It should be possible to enable non strict loading - use the package
-# that is already installed inside the environment, and forego the
-# version demand
+# It should be possible to enable non strict loading - use the package 
that is already
+# installed inside the environment, and forgo the version demand
 if strict:
 entry_point.load()
 else:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/modeling/service_instance.py
--
diff --git a/aria/modeling/service_instance.py 
b/aria/modeling/service_instance.py
index b0e426c..21c1029 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -510,7 +510,7 @@ class NodeBase(InstanceModelMixin):
 @classmethod
 def determine_state(cls, op_name, is_transitional):
 """
-:returns the state the node should be in as a result of running the 
operation on this node.
+:return: the state the node should be in as a result of running the 
operation on this node.
 
 E.g. if we are running 
tosca.interfaces.node.lifecycle.Standard.create, then
 the resulting state should either 'creating' (if the task just 
started) or 'created'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0cf1deaf/aria/parser/consumption/presentation.py
--
diff --git a/aria/parser/consumption/presentation.py 
b/aria/parser/consumption/presentation.py
index b1f943d..0f0b380 100644
--- a/aria/parser/consumption/presentation.py
+++ b/aria/parser/consumption/presentation.py
@@ -46,19 +46,19 @@ class Read(Consumer):
 
 def consume(self):
 # Present the main location and all imports recursively
-main, results = self._present_all()
+main_result, all_results = self._present_all()
 
 # Merge presentations
-main.merge(results, self.context)
+main_result.merge(all_results, self.context)
 
 # Cache merged presentations
 if self.context.presentation.cache:
-for result in results:
+for result in all_results:
 result.cache()
 
-self.context.presentation.presenter = main.presentation
-if main.canonical_location is not None:
-self.context.presentation.location = main.canonical_location
+self.context.presentation.presenter = main_result.presentation
+if main_result.canonical_location is not None:
+self.context.presentation.location = main_result.canonical_location
 
 def dump(self):
 if self.context.has_arg_switch('yaml'):
@@ 

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153014666
  
--- Diff: aria/utils/threading.py ---
@@ -161,11 +242,7 @@ def close(self):
 self._workers = None
 
 def drain(self):
-"""
-Blocks until all current tasks finish execution, but leaves the 
worker threads alive.
-"""
-
-self._tasks.join()  # oddly, the API does not support a timeout 
parameter
+self._tasks.join() # oddly, the API does not support a timeout 
parameter
--- End diff --

Hm, I always hated this, and a lot of projects ignore it. Right now we have 
a mix of styles.

Let's put this as a separate JIRA and maybe change all our comments at once?


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153014611
  
--- Diff: aria/parser/reading/yaml.py ---
@@ -82,7 +84,11 @@ def read(self):
 # see issue here:
 # 
https://bitbucket.org/ruamel/yaml/issues/61/roundtriploader-causes-exceptions-with
 #yaml_loader = yaml.RoundTripLoader(data)
-yaml_loader = yaml.SafeLoader(data)
+try:
+# Faster C-based loader, might not be available on all 
platforms
+yaml_loader = yaml.CSafeLoader(data)
+except BaseException:
--- End diff --

I think I'm sure ... I want the failover to always work. Even if 
CSafeLoader does exist but fails somehow internally, we should still make an 
effort to run.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153014418
  
--- Diff: aria/parser/reading/reader.py ---
@@ -28,16 +28,9 @@ def __init__(self, context, location, loader):
 
 def load(self):
 with OpenClose(self.loader) as loader:
--- End diff --

If you can refactor this to make it use context manager and still be clear, 
please show me.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153014247
  
--- Diff: aria/parser/presentation/presentation.py ---
@@ -199,6 +199,9 @@ class Presentation(PresentationBase):
 """
 
 def _validate(self, context):
+if (not 
context.presentation.configuration.get('validate_normative', True)) \
--- End diff --

+1


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153013980
  
--- Diff: aria/parser/loading/uri.py ---
@@ -44,6 +45,7 @@ def __init__(self, context, location, 
origin_location=None):
 self.location = location
 self._prefixes = StrictList(value_class=basestring)
 self._loader = None
+self._canonical_location = None
--- End diff --

I think "canonical" is a well-known adjective for file systems and URIs and 
might not need explanation. "Canonical" means globally absolute. If you think 
it should be documented, then where? It is used a lot.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153013913
  
--- Diff: aria/parser/loading/loader.py ---
@@ -32,3 +32,6 @@ def close(self):
 
 def load(self):
 raise NotImplementedError
+
+def get_canonical_location(self):  
 # pylint: disable=no-self-use
--- End diff --

What do you propose?


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153013856
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+try:
+# Is the presentation in the local cache?
+presentation = self._cache[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+# Create and cache new presentation
+presentation = self._create_presentation(canonical_location, 
loader, origin_presenter_class)
+self._cache[canonical_location] = presentation
 
+# Submit imports to executor
+if hasattr(presentation, '_get_import_locations'):
+import_locations = 
presentation._get_import_locations(self.context)
+if import_locations:
+for import_location in import_locations:
+import_location = UriLocation(import_location)
+executor.submit(self._present, import_location, 
canonical_location,
+presentation.__class__, executor)
+
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+
+def _create_loader(self, location, origin_canonical_location):
+loader = 
self.context.loading.loader_source.get_loader(self.context.loading, location,
+   
origin_canonical_location)
+
+canonical_location = None
+
+if origin_canonical_location is not None:
+cache_key = (origin_canonical_location, location)
+try:
+canonical_location = CANONICAL_LOCATION_CACHE[cache_key]
+return loader, canonical_location
+except KeyError:
+pass
+else:
+cache_key = None
+
+canonical_location = loader.get_canonical_location()
+
+# Because retrieving the canonical location can be costly, we will 
try to cache it
+if cache_key is not None:
+CANONICAL_LOCATION_CACHE[cache_key] = canonical_location
+
+return loader, canonical_location
+
+def _create_presentation(self, canonical_location, loader, 
default_presenter_class):
+   

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153013725
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -31,47 +32,33 @@ class Read(Consumer):
 instances.
 
 It supports agnostic raw data composition for presenters that have
-``_get_import_locations`` and ``_merge_import``.
+``_get_import_locations``, ``_validate_import``, and ``_merge_import``.
 
 To improve performance, loaders are called asynchronously on separate 
threads.
 
 Note that parsing may internally trigger more than one 
loading/reading/presentation
 cycle, for example if the agnostic raw data has dependencies that must 
also be parsed.
 """
 
-def consume(self):
-if self.context.presentation.location is None:
-self.context.validation.report('Presentation consumer: missing 
location')
-return
-
-presenter = None
-imported_presentations = None
+def __init__(self, context):
+super(Read, self).__init__(context)
+self._cache = {}
 
-executor = 
FixedThreadPoolExecutor(size=self.context.presentation.threads,
-   
timeout=self.context.presentation.timeout)
-executor.print_exceptions = 
self.context.presentation.print_exceptions
-try:
-presenter = self._present(self.context.presentation.location, 
None, None, executor)
-executor.drain()
-
-# Handle exceptions
-for e in executor.exceptions:
-self._handle_exception(e)
+def consume(self):
+# Present the main location and all imports recursively
+main, results = self._present_all()
 
-imported_presentations = executor.returns
-finally:
-executor.close()
+# Merge presentations
+main.merge(results, self.context)
 
-# Merge imports
-if (imported_presentations is not None) and hasattr(presenter, 
'_merge_import'):
-for imported_presentation in imported_presentations:
-okay = True
-if hasattr(presenter, '_validate_import'):
-okay = presenter._validate_import(self.context, 
imported_presentation)
-if okay:
-presenter._merge_import(imported_presentation)
+# Cache merged presentations
+if self.context.presentation.cache:
+for result in results:
+result.cache()
 
-self.context.presentation.presenter = presenter
+self.context.presentation.presenter = main.presentation
+if main.canonical_location is not None:
--- End diff --

The loader might fail for some reason to turn the location into a canonical 
location


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153013051
  
--- Diff: tests/mechanisms/utils.py ---
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+
+
+def matrix(*iterables, **kwargs):
+"""
+Generates a matrix of parameters for ``@pytest.mark.parametrize``.
--- End diff --

Because `*iterables` can be any length... if you use both `*` and `**` how 
else can specify a named parameter?


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153013058
  
--- Diff: tests/mechanisms/utils.py ---
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+
+
+def matrix(*iterables, **kwargs):
+"""
+Generates a matrix of parameters for ``@pytest.mark.parametrize``.
+
+The matrix is essentially the Cartesian product of the arguments 
(which should be iterables),
+with the added ability to "flatten" each value by breaking up tuples 
and recombining them into a
+final flat value.
+
+To do such recombination, use the ``counts`` argument (tuple) to 
specify the number of elements
+per value in order. Any count greater than 1 (the default) enables 
recombination of that value.
+
+Example::
+
+  x = ('hello', 'goodbye')
+  y = ('Linus', 'Richard')
+  matrix(x, y) ->
+('hello', 'Linus'),
+('hello', 'Richard'),
+('goodbye', 'Linus'),
+('goodbye', 'Richard')
+
+  y = (('Linus', 'Torvalds'), ('Richard', 'Stallman'))
+  matrix(x, y) ->
+('hello', ('Linus', 'Torvalds')),
+('hello', ('Richard', 'Stallman')),
+('goodbye', ('Linus', 'Torvalds')),
+('goodbye', ('Richard', 'Stallman'))
+
+  matrix(x, y, counts=(1, 2)) ->
+('hello', 'Linus', 'Torvalds'),
+('hello', 'Richard', 'Stallman'),
+('goodbye', 'Linus', 'Torvalds'),
+('goodbye', 'Richard', 'Stallman')
+"""
--- End diff --

I will add more documentation.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153012904
  
--- Diff: tests/mechanisms/parsing/__init__.py ---
@@ -0,0 +1,75 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import jinja2
+
+
+LINE_BREAK = '\n' + '-' * 60
+
+
+class Parsed(object):
+def __init__(self):
+self.issues = []
+self.text = ''
+self.verbose = False
+
+def assert_success(self):
+__tracebackhide__ = True # pylint: disable=unused-variable
--- End diff --

A PyTest feature ... I will link to the documentation.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153012524
  
--- Diff: 
tests/extensions/aria_extension_tosca/simple_v1_0/functions/test_function_concat.py
 ---
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_functions_concat_syntax_empty(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { concat: [] }
+""").assert_success()
+
+
+def test_functions_concat_strings(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { concat: [ a, b, c ] }
+""").assert_success()
+
+
+def test_functions_concat_mixed(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { concat: [ a, 1, 1.1, null, [], {} ] }
+""").assert_success()
+
+
+def test_functions_concat_nested(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  MyType:
+properties:
+  my_parameter:
+type: string
+topology_template:
+  node_templates:
+my_node:
+  type: MyType
+  properties:
+my_parameter: { concat: [ a, { concat: [ b, c ] } ] }
+""").assert_success()
+
+
+# Unicode
+
+def test_functions_concat_unicode(parser):
+parser.parse_literal("""
+tosca_definitions_version: tosca_simple_yaml_1_0
+node_types:
+  類型:
+properties:
+  參數:
+type: string
+topology_template:
+  node_templates:
+模板:
+  type: 類型
+  properties:
+參數: { concat: [ 一, 二, 三 ] }
+""").assert_success()
--- End diff --

It would impossible to create such a test that would work with non-TOSCA 
parsers.

In the future we will boost our topology engine tests to test actual 
function evaluation. )And of course things get much more complicated there, 
because we deal with HOST, TARGET, and other topological aspects, as well as 
runtime values in `get_attribute`.)

For this suite, we are just testing parsing of intrinsic functions, not 
their evaluation.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153011903
  
--- Diff: tests/extensions/aria_extension_tosca/conftest.py ---
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+PyTest configuration module.
+
+Add support for a "--tosca-parser" CLI option.
+"""
+
+import pytest
+
+from ...mechanisms.parsing.aria import AriaParser
+
+
+def pytest_addoption(parser):
--- End diff --

These are PyTest-defined hooks. I will add a link to the PyTest 
documentation.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153011505
  
--- Diff: 
extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py ---
@@ -318,15 +318,15 @@ def report(message, constraint):
 #
 
 def get_data_type_value(context, presentation, field_name, type_name):
-the_type = get_type_by_name(context, type_name, 'data_types')
-if the_type is not None:
-value = getattr(presentation, field_name)
-if value is not None:
+value = getattr(presentation, field_name)
+if value is not None:
+the_type = get_type_by_name(context, type_name, 'data_types')
--- End diff --

+1 to `data_type`


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153010864
  
--- Diff: aria/utils/collections.py ---
@@ -220,27 +225,30 @@ def __setitem__(self, key, value, **_):
 return super(StrictDict, self).__setitem__(key, value)
 
 
-def merge(dict_a, dict_b, path=None, strict=False):
+def merge(dict_a, dict_b, copy=True, strict=False, path=None):
--- End diff --

+1


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153009548
  
--- Diff: aria/parser/reading/yaml.py ---
@@ -16,18 +16,30 @@
 from .reader import Reader
 from .locator import Locator
 from .exceptions import ReaderSyntaxError
-from .locator import LocatableString, LocatableInt, LocatableFloat
+from .locator import (LocatableString, LocatableInt, LocatableFloat)
 
-# Add our types to ruamel.yaml
+
+MERGE_TAG = u'tag:yaml.org,2002:merge'
+MAP_TAG = u'tag:yaml.org,2002:map'
+
+
+# Add our types to RoundTripRepresenter
 yaml.representer.RoundTripRepresenter.add_representer(
 LocatableString, 
yaml.representer.RoundTripRepresenter.represent_unicode)
 yaml.representer.RoundTripRepresenter.add_representer(
 LocatableInt, yaml.representer.RoundTripRepresenter.represent_int)
 yaml.representer.RoundTripRepresenter.add_representer(
 LocatableFloat, yaml.representer.RoundTripRepresenter.represent_float)
 
-MERGE_TAG = u'tag:yaml.org,2002:merge'
-MAP_TAG = u'tag:yaml.org,2002:map'
+
+def construct_yaml_map(self, node):
--- End diff --

+1


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153009232
  
--- Diff: aria/parser/presentation/field_validators.py ---
@@ -14,12 +14,29 @@
 # limitations under the License.
 
 
+from ...utils.formatting import safe_repr
 from ..validation import Issue
 from .utils import (parse_types_dict_names, report_issue_for_unknown_type,
 report_issue_for_parent_is_self, 
report_issue_for_unknown_parent_type,
 report_issue_for_circular_type_hierarchy)
 
 
+def not_negative_validator(field, presentation, context):
+"""
+Makes sure that the field is not negative.
+
+Can be used with the :func:`field_validator` decorator.
+"""
+
+field.default_validate(presentation, context)
+value = getattr(presentation, field.name)
+if (value is not None) and (value < 0):
--- End diff --

The field also has a type which is enforced as a separate validation. I 
wanted this particular validator to be general purpose, so it would work with 
any kind of object that supports comparison ("__gt__" and other magic methods).


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153009028
  
--- Diff: aria/parser/presentation/context.py ---
@@ -63,3 +67,12 @@ def get_from_dict(self, *names):
 """
 
 return self.presenter._get_from_dict(*names) if self.presenter is 
not None else None
+
+def create_executor(self):
+if self.threads == 1:
--- End diff --

What do you mean by "initiator"? You can configure the thread count in the 
parser context, just like everything else, whenever you start the parsing 
process. Normally you don't need to do this -- the defaults should be fine. 
Just for running tests in tox (which is multiprocess) it makes sense to 
override the default and enforce single-threading.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153008880
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+try:
+# Is the presentation in the local cache?
+presentation = self._cache[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+# Create and cache new presentation
+presentation = self._create_presentation(canonical_location, 
loader, origin_presenter_class)
+self._cache[canonical_location] = presentation
 
+# Submit imports to executor
+if hasattr(presentation, '_get_import_locations'):
+import_locations = 
presentation._get_import_locations(self.context)
+if import_locations:
+for import_location in import_locations:
+import_location = UriLocation(import_location)
+executor.submit(self._present, import_location, 
canonical_location,
+presentation.__class__, executor)
+
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+
+def _create_loader(self, location, origin_canonical_location):
+loader = 
self.context.loading.loader_source.get_loader(self.context.loading, location,
+   
origin_canonical_location)
+
+canonical_location = None
+
+if origin_canonical_location is not None:
+cache_key = (origin_canonical_location, location)
+try:
+canonical_location = CANONICAL_LOCATION_CACHE[cache_key]
+return loader, canonical_location
+except KeyError:
+pass
+else:
+cache_key = None
+
+canonical_location = loader.get_canonical_location()
+
+# Because retrieving the canonical location can be costly, we will 
try to cache it
+if cache_key is not None:
+CANONICAL_LOCATION_CACHE[cache_key] = canonical_location
+
+return loader, canonical_location
+
+def _create_presentation(self, canonical_location, loader, 
default_presenter_class):
+   

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153008578
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+try:
+# Is the presentation in the local cache?
+presentation = self._cache[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+# Create and cache new presentation
+presentation = self._create_presentation(canonical_location, 
loader, origin_presenter_class)
+self._cache[canonical_location] = presentation
 
+# Submit imports to executor
+if hasattr(presentation, '_get_import_locations'):
+import_locations = 
presentation._get_import_locations(self.context)
+if import_locations:
+for import_location in import_locations:
+import_location = UriLocation(import_location)
+executor.submit(self._present, import_location, 
canonical_location,
+presentation.__class__, executor)
+
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+
+def _create_loader(self, location, origin_canonical_location):
+loader = 
self.context.loading.loader_source.get_loader(self.context.loading, location,
+   
origin_canonical_location)
+
+canonical_location = None
+
+if origin_canonical_location is not None:
+cache_key = (origin_canonical_location, location)
+try:
+canonical_location = CANONICAL_LOCATION_CACHE[cache_key]
+return loader, canonical_location
+except KeyError:
+pass
+else:
+cache_key = None
+
+canonical_location = loader.get_canonical_location()
+
+# Because retrieving the canonical location can be costly, we will 
try to cache it
+if cache_key is not None:
+CANONICAL_LOCATION_CACHE[cache_key] = canonical_location
+
+return loader, canonical_location
+
+def _create_presentation(self, canonical_location, loader, 
default_presenter_class):
+   

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153008582
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+try:
+# Is the presentation in the local cache?
+presentation = self._cache[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+# Create and cache new presentation
+presentation = self._create_presentation(canonical_location, 
loader, origin_presenter_class)
+self._cache[canonical_location] = presentation
 
+# Submit imports to executor
+if hasattr(presentation, '_get_import_locations'):
+import_locations = 
presentation._get_import_locations(self.context)
+if import_locations:
+for import_location in import_locations:
+import_location = UriLocation(import_location)
+executor.submit(self._present, import_location, 
canonical_location,
+presentation.__class__, executor)
+
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+
+def _create_loader(self, location, origin_canonical_location):
+loader = 
self.context.loading.loader_source.get_loader(self.context.loading, location,
+   
origin_canonical_location)
+
+canonical_location = None
+
+if origin_canonical_location is not None:
+cache_key = (origin_canonical_location, location)
+try:
+canonical_location = CANONICAL_LOCATION_CACHE[cache_key]
+return loader, canonical_location
+except KeyError:
+pass
+else:
+cache_key = None
+
+canonical_location = loader.get_canonical_location()
+
+# Because retrieving the canonical location can be costly, we will 
try to cache it
+if cache_key is not None:
+CANONICAL_LOCATION_CACHE[cache_key] = canonical_location
+
+return loader, canonical_location
+
+def _create_presentation(self, canonical_location, loader, 
default_presenter_class):
+   

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153008552
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+try:
+# Is the presentation in the local cache?
+presentation = self._cache[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+# Create and cache new presentation
+presentation = self._create_presentation(canonical_location, 
loader, origin_presenter_class)
+self._cache[canonical_location] = presentation
 
+# Submit imports to executor
+if hasattr(presentation, '_get_import_locations'):
+import_locations = 
presentation._get_import_locations(self.context)
+if import_locations:
+for import_location in import_locations:
+import_location = UriLocation(import_location)
+executor.submit(self._present, import_location, 
canonical_location,
+presentation.__class__, executor)
+
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+
+def _create_loader(self, location, origin_canonical_location):
+loader = 
self.context.loading.loader_source.get_loader(self.context.loading, location,
+   
origin_canonical_location)
+
+canonical_location = None
+
+if origin_canonical_location is not None:
+cache_key = (origin_canonical_location, location)
+try:
+canonical_location = CANONICAL_LOCATION_CACHE[cache_key]
+return loader, canonical_location
+except KeyError:
+pass
+else:
+cache_key = None
+
+canonical_location = loader.get_canonical_location()
+
+# Because retrieving the canonical location can be costly, we will 
try to cache it
+if cache_key is not None:
+CANONICAL_LOCATION_CACHE[cache_key] = canonical_location
+
+return loader, canonical_location
+
+def _create_presentation(self, canonical_location, loader, 
default_presenter_class):
+   

[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153008314
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+try:
+# Is the presentation in the local cache?
+presentation = self._cache[canonical_location]
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+except KeyError:
+pass
+
+# Create and cache new presentation
+presentation = self._create_presentation(canonical_location, 
loader, origin_presenter_class)
+self._cache[canonical_location] = presentation
 
+# Submit imports to executor
+if hasattr(presentation, '_get_import_locations'):
+import_locations = 
presentation._get_import_locations(self.context)
+if import_locations:
+for import_location in import_locations:
+import_location = UriLocation(import_location)
+executor.submit(self._present, import_location, 
canonical_location,
+presentation.__class__, executor)
+
+return _Result(presentation, canonical_location, 
origin_canonical_location)
+
+def _create_loader(self, location, origin_canonical_location):
+loader = 
self.context.loading.loader_source.get_loader(self.context.loading, location,
+   
origin_canonical_location)
+
+canonical_location = None
+
+if origin_canonical_location is not None:
--- End diff --

Perhaps the goal wasn't clear here: "location" is relative, while 
"canonical_location" is globally absolute. So the cache key has to be a 
combination of both, hence a tuple. I will add documentation to clarify this.


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153007941
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -86,52 +73,193 @@ def dump(self):
 self.context.presentation.presenter._dump(self.context)
 
 def _handle_exception(self, e):
-if isinstance(e, AlreadyReadException):
+if isinstance(e, _Skip):
 return
 super(Read, self)._handle_exception(e)
 
-def _present(self, location, origin_location, presenter_class, 
executor):
+def _present_all(self):
+location = self.context.presentation.location
+
+if location is None:
+self.context.validation.report('Read consumer: missing 
location')
+return
+
+executor = self.context.presentation.create_executor()
+try:
+# This call may recursively submit tasks to the executor if 
there are imports
+main = self._present(location, None, None, executor)
+
+# Wait for all tasks to complete
+executor.drain()
+
+# Handle exceptions
+for e in executor.exceptions:
+self._handle_exception(e)
+
+results = executor.returns or []
+finally:
+executor.close()
+
+results.insert(0, main)
+
+return main, results
+
+def _present(self, location, origin_canonical_location, 
origin_presenter_class, executor):
 # Link the context to this thread
 self.context.set_thread_local()
 
-raw = self._read(location, origin_location)
+# Canonicalize the location
+if self.context.reading.reader is None:
+loader, canonical_location = self._create_loader(location, 
origin_canonical_location)
+else:
+# If a reader is specified in the context then we skip loading
+loader = None
+canonical_location = location
+
+# Skip self imports
+if canonical_location == origin_canonical_location:
+raise _Skip()
+
+if self.context.presentation.cache:
+# Is the presentation in the global cache?
+try:
+presentation = PRESENTATION_CACHE[canonical_location]
--- End diff --

Yes there is. An "if" statement plus a retrieval statement are non-atomic, 
and since we are in a concurrent situation it's possible that between the if 
and the retrieval that the data will removed from the cache, causing the 
retrieval to fail with an exception even though the "if" succeeded. A single 
retrieval is atomic.

(This is the generally the idiomatic "Python way" to do this -- always 
choose the atomic!)


---


[GitHub] incubator-ariatosca pull request #207: ARIA-1 Parser test suite

2017-11-24 Thread tliron
Github user tliron commented on a diff in the pull request:

https://github.com/apache/incubator-ariatosca/pull/207#discussion_r153006807
  
--- Diff: aria/parser/consumption/presentation.py ---
@@ -31,47 +32,33 @@ class Read(Consumer):
 instances.
 
 It supports agnostic raw data composition for presenters that have
-``_get_import_locations`` and ``_merge_import``.
+``_get_import_locations``, ``_validate_import``, and ``_merge_import``.
 
 To improve performance, loaders are called asynchronously on separate 
threads.
 
 Note that parsing may internally trigger more than one 
loading/reading/presentation
 cycle, for example if the agnostic raw data has dependencies that must 
also be parsed.
 """
 
-def consume(self):
-if self.context.presentation.location is None:
-self.context.validation.report('Presentation consumer: missing 
location')
-return
-
-presenter = None
-imported_presentations = None
+def __init__(self, context):
+super(Read, self).__init__(context)
+self._cache = {}
 
-executor = 
FixedThreadPoolExecutor(size=self.context.presentation.threads,
-   
timeout=self.context.presentation.timeout)
-executor.print_exceptions = 
self.context.presentation.print_exceptions
-try:
-presenter = self._present(self.context.presentation.location, 
None, None, executor)
-executor.drain()
-
-# Handle exceptions
-for e in executor.exceptions:
-self._handle_exception(e)
+def consume(self):
+# Present the main location and all imports recursively
+main, results = self._present_all()
--- End diff --

+1 renamed to "main_result" and "all_results"


---