Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-30-SQL-based-storage-implementation c9df5b140 -> 2d8f13385


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
new file mode 100644
index 0000000..ab26dd7
--- /dev/null
+++ b/aria/storage/sql_mapi.py
@@ -0,0 +1,363 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+SQLAlchemy based MAPI
+"""
+
+from sqlalchemy.exc import SQLAlchemyError
+from sqlalchemy.sql.elements import Label
+
+from aria.utils.collections import OrderedDict
+
+from aria.storage import (
+    api,
+    exceptions
+)
+
+
+DEFAULT_SQL_DIALECT = 'sqlite'
+
+
+class SQLAlchemyModelAPI(api.ModelAPI):
+    """
+    SQL based MAPI.
+    """
+
+    def __init__(self,
+                 engine,
+                 session,
+                 **kwargs):
+        super(SQLAlchemyModelAPI, self).__init__(**kwargs)
+        self._engine = engine
+        self._session = session
+
+    def get(self, entry_id, include=None, filters=None, locking=False, 
**kwargs):
+        """Return a single result based on the model class and element ID
+        """
+        filters = filters or {'id': entry_id}
+        query = self._get_query(include, filters)
+        if locking:
+            query = query.with_for_update()
+        result = query.first()
+
+        if not result:
+            raise exceptions.StorageError(
+                'Requested {0} with ID `{1}` was not found'
+                .format(self.model_cls.__name__, entry_id)
+            )
+        return result
+
+    def iter(self,
+             include=None,
+             filters=None,
+             pagination=None,
+             sort=None,
+             **kwargs):
+        """Return a (possibly empty) list of `model_class` results
+        """
+        query = self._get_query(include, filters, sort)
+
+        results, _, _, _ = self._paginate(query, pagination)
+
+        for result in results:
+            yield result
+
+    def put(self, entry, **kwargs):
+        """Create a `model_class` instance from a serializable `model` object
+
+        :param entry: A dict with relevant kwargs, or an instance of a class
+        that has a `to_dict` method, and whose attributes match the columns
+        of `model_class` (might also my just an instance of `model_class`)
+        :return: An instance of `model_class`
+        """
+        self._session.add(entry)
+        self._safe_commit()
+        return entry
+
+    def delete(self, entry_id, filters=None, **kwargs):
+        """Delete a single result based on the model class and element ID
+        """
+        try:
+            instance = self.get(
+                entry_id,
+                filters=filters
+            )
+        except exceptions.StorageError:
+            raise exceptions.StorageError(
+                'Could not delete {0} with ID `{1}` - element not found'
+                .format(
+                    self.model_cls.__name__,
+                    entry_id
+                )
+            )
+        self._load_properties(instance)
+        self._session.delete(instance)
+        self._safe_commit()
+        return instance
+
+    # TODO: this might need rework
+    def update(self, entry, **kwargs):
+        """Add `instance` to the DB session, and attempt to commit
+
+        :return: The updated instance
+        """
+        return self.put(entry)
+
+    def refresh(self, entry):
+        """Reload the instance with fresh information from the DB
+
+        :param entry: Instance to be re-loaded from the DB
+        :return: The refreshed instance
+        """
+        self._session.refresh(entry)
+        self._load_properties(entry)
+        return entry
+
+    def _destroy_connection(self):
+        pass
+
+    def _establish_connection(self):
+        pass
+
+    def create(self):
+        self.model_cls.__table__.create(self._engine)
+
+    def drop(self):
+        """
+        Drop the table from the storage.
+        :return:
+        """
+        self.model_cls.__table__.drop(self._engine)
+
+    def _safe_commit(self):
+        """Try to commit changes in the session. Roll back if exception raised
+        Excepts SQLAlchemy errors and rollbacks if they're caught
+        """
+        try:
+            self._session.commit()
+        except SQLAlchemyError as e:
+            self._session.rollback()
+            raise exceptions.StorageError(
+                'SQL Storage error: {0}'.format(str(e))
+            )
+
+    def _get_base_query(self, include, joins):
+        """Create the initial query from the model class and included columns
+
+        :param include: A (possibly empty) list of columns to include in
+        the query
+        :param joins: A (possibly empty) list of models on which the query
+        should join
+        :return: An SQLAlchemy AppenderQuery object
+        """
+
+        # If only some columns are included, query through the session object
+        if include:
+            query = self._session.query(*include)
+        else:
+            # If all columns should be returned, query directly from the model
+            query = self._session.query(self.model_cls)
+
+        # Add any joins that might be necessary
+        for join_model in joins:
+            query = query.join(join_model)
+
+        return query
+
+    @staticmethod
+    def _sort_query(query, sort=None):
+        """Add sorting clauses to the query
+
+        :param query: Base SQL query
+        :param sort: An optional dictionary where keys are column names to
+        sort by, and values are the order (asc/desc)
+        :return: An SQLAlchemy AppenderQuery object
+        """
+        if sort:
+            for column, order in sort.items():
+                if order == 'desc':
+                    column = column.desc()
+                query = query.order_by(column)
+        return query
+
+    @staticmethod
+    def _filter_query(query, filters):
+        """Add filter clauses to the query
+
+        :param query: Base SQL query
+        :param filters: An optional dictionary where keys are column names to
+        filter by, and values are values applicable for those columns (or lists
+        of such values)
+        :return: An SQLAlchemy AppenderQuery object
+        """
+        for column, value in filters.items():
+            # If there are multiple values, use `in_`, otherwise, use `eq`
+            if isinstance(value, (list, tuple)):
+                query = query.filter(column.in_(value))
+            else:
+                query = query.filter(column == value)
+
+        return query
+
+    def _get_query(self,
+                   include=None,
+                   filters=None,
+                   sort=None):
+        """Get an SQL query object based on the params passed
+
+        :param include: An optional list of columns to include in the query
+        :param filters: An optional dictionary where keys are column names to
+        filter by, and values are values applicable for those columns (or lists
+        of such values)
+        :param sort: An optional dictionary where keys are column names to
+        sort by, and values are the order (asc/desc)
+        :return: A sorted and filtered query with only the relevant
+        columns
+        """
+
+        include = include or []
+        filters = filters or dict()
+        sort = sort or OrderedDict()
+
+        joins = self._get_join_models_list(include, filters, sort)
+        include, filters, sort = self._get_columns_from_field_names(
+            include, filters, sort
+        )
+
+        query = self._get_base_query(include, joins)
+        query = self._filter_query(query, filters)
+        query = self._sort_query(query, sort)
+        return query
+
+    def _get_columns_from_field_names(self,
+                                      include,
+                                      filters,
+                                      sort):
+        """Go over the optional parameters (include, filters, sort), and
+        replace column names with actual SQLA column objects
+        """
+        all_includes = [self._get_column(c) for c in include]
+        include = []
+        # Columns that are inferred from properties (Labels) should be included
+        # last for the following joins to work properly
+        for col in all_includes:
+            if isinstance(col, Label):
+                include.append(col)
+            else:
+                include.insert(0, col)
+
+        filters = dict((self._get_column(c), filters[c]) for c in filters)
+        sort = OrderedDict((self._get_column(c), sort[c]) for c in sort)
+
+        return include, filters, sort
+
+    def _get_join_models_list(self, include, filters, sort):
+        """Return a list of models on which the query should be joined, as
+        inferred from the include, filter and sort column names
+        """
+        if not self.model_cls.is_resource:
+            return []
+
+        all_column_names = include + filters.keys() + sort.keys()
+        join_columns = set(column_name for column_name in all_column_names
+                           if self._is_join_column(column_name))
+
+        # If the only columns included are the columns on which we would
+        # normally join, there isn't actually a need to join, as the FROM
+        # clause in the query will be generated from the relevant models anyway
+        if include == list(join_columns):
+            return []
+
+        # Initializing a set, because the same model can appear in several
+        # join lists
+        join_models = set()
+        for column_name in join_columns:
+            join_models.update(
+                self.model_cls.join_properties[column_name]['models']
+            )
+        # Sort the models by their correct join order
+        join_models = sorted(join_models,
+                             key=lambda model: model.join_order, reverse=True)
+
+        return join_models
+
+    def _is_join_column(self, column_name):
+        """Return False if the column name corresponds to a regular SQLA
+        column that `model_class` has.
+        Return True if the column that should be used is a join column (see
+        SQLModelBase for an explanation)
+        """
+        return self.model_cls.is_resource and \
+            column_name in self.model_cls.join_properties
+
+    def _get_column(self, column_name):
+        """Return the column on which an action (filtering, sorting, etc.)
+        would need to be performed. Can be either an attribute of the class,
+        or needs to be inferred from the class' `join_properties` property
+        """
+        if self._is_join_column(column_name):
+            return self.model_cls.join_properties[column_name]['column']
+        else:
+            return getattr(self.model_cls, column_name)
+
+    # TODO is this really needed in aria?
+    @staticmethod
+    def _paginate(query, pagination):
+        """Paginate the query by size and offset
+
+        :param query: Current SQLAlchemy query object
+        :param pagination: An optional dict with size and offset keys
+        :return: A tuple with four elements:
+        - results: `size` items starting from `offset`
+        - the total count of items
+        - `size` [default: 0]
+        - `offset` [default: 0]
+        """
+        if pagination:
+            size = pagination.get('size', 0)
+            offset = pagination.get('offset', 0)
+            total = query.order_by(None).count()  # Fastest way to count
+            results = query.limit(size).offset(offset).all()
+            return results, total, size, offset
+        else:
+            results = query.all()
+            return results, len(results), 0, 0
+
+    @staticmethod
+    def _load_properties(instance):
+        """A helper method used to overcome a problem where the properties
+        that rely on joins aren't being loaded automatically
+        """
+        if instance.is_resource:
+            for prop in instance.join_properties:
+                getattr(instance, prop)
+
+
+class ListResult(object):
+    """
+    a ListResult contains results about the requested items.
+    """
+    def __init__(self, items, metadata):
+        self.items = items
+        self.metadata = metadata
+
+    def __len__(self):
+        return len(self.items)
+
+    def __iter__(self):
+        return iter(self.items)
+
+    def __getitem__(self, item):
+        return self.items[item]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/aria/storage/structures.py
----------------------------------------------------------------------
diff --git a/aria/storage/structures.py b/aria/storage/structures.py
index 9b120de..9bca142 100644
--- a/aria/storage/structures.py
+++ b/aria/storage/structures.py
@@ -31,8 +31,10 @@ import json
 import jsonpickle
 from sqlalchemy import VARCHAR
 from sqlalchemy.ext.mutable import Mutable
+from sqlalchemy.orm import relationship, backref
 from sqlalchemy.ext.declarative import declarative_base
 # pylint: disable=unused-import
+from sqlalchemy.ext.associationproxy import association_proxy
 from sqlalchemy import (
     schema,
     Column,
@@ -49,82 +51,40 @@ from sqlalchemy import (
     orm,
 )
 
-
 Model = declarative_base()
 
 
-def foreign_key(
-        parent_table,
-        id_col_name='storage_id',
-        nullable=False,
-        column_type=Integer
-):
+def foreign_key(foreign_key_column, nullable=False):
     """Return a ForeignKey object with the relevant
 
-    :param parent_table: SQL name of the parent table
-    :param id_col_name: Name of the parent table's ID column [default: `id`]
+    :param foreign_key_column: Unique id column in the parent table
     :param nullable: Should the column be allowed to remain empty
-    :param column_type: The type (integer/text/etc.) of the column
-    :return:
     """
     return Column(
-        column_type,
-        ForeignKey(
-            '{0}.{1}'.format(parent_table.__tablename__, id_col_name),
-            ondelete='CASCADE'
-        ),
+        ForeignKey(foreign_key_column, ondelete='CASCADE'),
         nullable=nullable
     )
 
 
-def one_to_many_relationship(
-        child_class_name,
-        column_name,
-        parent_class_name,
-        back_reference_name,
-        parent_id_name='storage_id',
-):
+def one_to_many_relationship(child_class,
+                             parent_class,
+                             foreign_key_column,
+                             backreference=None):
     """Return a one-to-many SQL relationship object
     Meant to be used from inside the *child* object
 
-    :param child_class_name: Class name of the child table
-    :param column_name: Name of the column pointing to the parent table
-    :param parent_class_name: Class name of the parent table
-    :param back_reference_name: The name to give to the reference to the child
-    :param parent_id_name: Name of the parent table's ID column [default: `id`]
-    :return:
+    :param parent_class: Class of the parent table
+    :param child_class: Class of the child table
+    :param foreign_key_column: The column of the foreign key
+    :param backreference: The name to give to the reference to the child
     """
-    return orm.relationship(
-        parent_class_name,
-        primaryjoin='{0}.{1} == {2}.{3}'.format(
-            child_class_name,
-            column_name,
-            parent_class_name,
-            parent_id_name
-        ),
+    backreference = backreference or child_class.__tablename__
+    return relationship(
+        parent_class,
+        primaryjoin=lambda: parent_class.storage_id == foreign_key_column,
         # The following line make sure that when the *parent* is
         # deleted, all its connected children are deleted as well
-        backref=orm.backref(back_reference_name, cascade='all')
-    )
-
-
-def many_to_many_relationship(
-        other_table_class_name,
-        connecting_table,
-        back_reference_name
-):
-    """Return a many-to-many SQL relationship object
-
-    :param other_table_class_name: The name of the table we're connecting to
-    :param connecting_table: The secondary table used in the relationship
-    :param back_reference_name: The name to give to the reference to the
-    current table from the other table
-    :return:
-    """
-    return orm.relationship(
-        other_table_class_name,
-        secondary=connecting_table,
-        backref=orm.backref(back_reference_name, lazy='dynamic')
+        backref=backref(backreference, cascade='all')
     )
 
 
@@ -186,12 +146,18 @@ class MutableDict(Mutable, dict):
 class SQLModelBase(Model):
     """Abstract base class for all SQL models that allows [de]serialization
     """
+
+    storage_id = Column(Integer, primary_key=True, autoincrement=True)
+    id = Column(Text, index=True)
+
     # SQLAlchemy syntax
     __abstract__ = True
 
     # Indicates to the storage manager whether the table is a resource or not
     is_resource = False
 
+    join_properties = {}
+
     _private_fields = []
 
     # Indicates whether the `id` column in this class should be unique
@@ -211,12 +177,17 @@ class SQLModelBase(Model):
         """
         return jsonpickle.encode(self.to_dict(), unpicklable=False)
 
-    def fields(self):
+    @classmethod
+    def fields(cls):
         """Return the list of field names for this table
 
         Mostly for backwards compatibility in the code (that uses `fields`)
         """
-        return self.__table__.columns.keys()
+        fields = cls.__table__.columns.keys()
+        fields = [f for f in fields if f not in cls._private_fields]
+        properties = set(cls.join_properties.keys()) - set(cls._private_fields)
+        fields.extend(properties)
+        return fields
 
     def _get_unique_id(self):
         """A method to allow classes to override the default representation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/aria/utils/application.py
----------------------------------------------------------------------
diff --git a/aria/utils/application.py b/aria/utils/application.py
index b1a7fcc..113e054 100644
--- a/aria/utils/application.py
+++ b/aria/utils/application.py
@@ -117,7 +117,7 @@ class StorageManager(LoggerMixin):
             updated_at=now,
             main_file_name=main_file_name,
         )
-        self.model_storage.blueprint.store(blueprint)
+        self.model_storage.blueprint.put(blueprint)
         self.logger.debug('created blueprint model storage entry')
 
     def create_nodes_storage(self):
@@ -138,7 +138,7 @@ class StorageManager(LoggerMixin):
             scalable = node_copy.pop('capabilities')['scalable']['properties']
             for index, relationship in enumerate(node_copy['relationships']):
                 relationship = 
self.model_storage.relationship.model_cls(**relationship)
-                self.model_storage.relationship.store(relationship)
+                self.model_storage.relationship.put(relationship)
                 node_copy['relationships'][index] = relationship
 
             node_copy = self.model_storage.node.model_cls(
@@ -149,7 +149,7 @@ class StorageManager(LoggerMixin):
                 max_number_of_instances=scalable['max_instances'],
                 number_of_instances=scalable['current_instances'],
                 **node_copy)
-            self.model_storage.node.store(node_copy)
+            self.model_storage.node.put(node_copy)
 
     def create_deployment_storage(self):
         """
@@ -190,7 +190,7 @@ class StorageManager(LoggerMixin):
             created_at=now,
             updated_at=now
         )
-        self.model_storage.deployment.store(deployment)
+        self.model_storage.deployment.put(deployment)
         self.logger.debug('created deployment model storage entry')
 
     def create_node_instances_storage(self):
@@ -213,7 +213,7 @@ class StorageManager(LoggerMixin):
                     type=relationship_instance['type'],
                     target_id=relationship_instance['target_id'])
                 relationship_instances.append(relationship_instance_model)
-                
self.model_storage.relationship_instance.store(relationship_instance_model)
+                
self.model_storage.relationship_instance.put(relationship_instance_model)
 
             node_instance_model = self.model_storage.node_instance.model_cls(
                 node=node_model,
@@ -224,7 +224,7 @@ class StorageManager(LoggerMixin):
                 version='1.0',
                 relationship_instances=relationship_instances)
 
-            self.model_storage.node_instance.store(node_instance_model)
+            self.model_storage.node_instance.put(node_instance_model)
         self.logger.debug('created node-instances model storage entries')
 
     def create_plugin_storage(self, plugin_id, source):
@@ -258,7 +258,7 @@ class StorageManager(LoggerMixin):
             supported_py_versions=plugin.get('supported_python_versions'),
             uploaded_at=now
         )
-        self.model_storage.plugin.store(plugin)
+        self.model_storage.plugin.put(plugin)
         self.logger.debug('created plugin model storage entry')
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index 0d09bb1..0ab18bf 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -18,7 +18,7 @@ import pytest
 
 from aria import application_model_storage
 from aria.orchestrator import context
-from aria.storage.mapi import SQLAlchemyModelAPI
+from aria.storage.sql_mapi import SQLAlchemyModelAPI
 
 from tests.storage import get_sqlite_api_params
 
@@ -29,39 +29,39 @@ from . import models
 def simple(**kwargs):
     api_params = get_sqlite_api_params()
     model_storage = application_model_storage(SQLAlchemyModelAPI, 
api_params=api_params)
-    model_storage.blueprint.store(models.get_blueprint())
+    model_storage.blueprint.put(models.get_blueprint())
     blueprint = model_storage.blueprint.get(models.BLUEPRINT_ID)
     deployment = models.get_deployment(blueprint)
-    model_storage.deployment.store(deployment)
+    model_storage.deployment.put(deployment)
 
     
#################################################################################
     # Creating a simple deployment with node -> node as a graph
 
     dependency_node = models.get_dependency_node(deployment)
-    model_storage.node.store(dependency_node)
+    model_storage.node.put(dependency_node)
     storage_dependency_node = model_storage.node.get(dependency_node.id)
 
     dependency_node_instance = 
models.get_dependency_node_instance(storage_dependency_node)
-    model_storage.node_instance.store(dependency_node_instance)
+    model_storage.node_instance.put(dependency_node_instance)
     storage_dependency_node_instance = 
model_storage.node_instance.get(dependency_node_instance.id)
 
     dependent_node = models.get_dependent_node(deployment)
-    model_storage.node.store(dependent_node)
+    model_storage.node.put(dependent_node)
     storage_dependent_node = model_storage.node.get(dependent_node.id)
 
     dependent_node_instance = 
models.get_dependent_node_instance(storage_dependent_node)
-    model_storage.node_instance.store(dependent_node_instance)
+    model_storage.node_instance.put(dependent_node_instance)
     storage_dependent_node_instance = 
model_storage.node_instance.get(dependent_node_instance.id)
 
     relationship = models.get_relationship(storage_dependent_node, 
storage_dependency_node)
-    model_storage.relationship.store(relationship)
+    model_storage.relationship.put(relationship)
     storage_relationship = model_storage.relationship.get(relationship.id)
     relationship_instance = models.get_relationship_instance(
         relationship=storage_relationship,
         target_instance=storage_dependency_node_instance,
         source_instance=storage_dependent_node_instance
     )
-    model_storage.relationship_instance.store(relationship_instance)
+    model_storage.relationship_instance.put(relationship_instance)
 
     final_kwargs = dict(
         name='simple_context',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index bdcbed9..5de3380 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -46,7 +46,7 @@ def get_dependency_node(deployment):
         operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
         min_number_of_instances=1,
         max_number_of_instances=1,
-        deployment_storage_id=deployment.storage_id
+        deployment_fk=deployment.storage_id
     )
 
 
@@ -56,8 +56,8 @@ def get_dependency_node_instance(dependency_node):
         host_id=DEPENDENCY_NODE_INSTANCE_ID,
         runtime_properties={'ip': '1.1.1.1'},
         version=None,
-        node_storage_id=dependency_node.storage_id,
-        deployment_storage_id=dependency_node.deployment.storage_id,
+        node_fk=dependency_node.storage_id,
+        deployment_fk=dependency_node.deployment.storage_id,
         state='',
         scaling_groups={}
     )
@@ -66,8 +66,8 @@ def get_dependency_node_instance(dependency_node):
 def get_relationship(source=None, target=None):
     return models.Relationship(
         id=RELATIONSHIP_ID,
-        source_node_storage_id=source.storage_id,
-        target_node_storage_id=target.storage_id,
+        source_node_fk=source.storage_id,
+        target_node_fk=target.storage_id,
         source_interfaces={},
         source_operations=dict((key, {}) for key in 
operations.RELATIONSHIP_OPERATIONS),
         target_interfaces={},
@@ -81,17 +81,16 @@ def get_relationship(source=None, target=None):
 def get_relationship_instance(source_instance, target_instance, relationship):
     return models.RelationshipInstance(
         id=RELATIONSHIP_INSTANCE_ID,
-        type='some_type',
-        relationship_storage_id=relationship.storage_id,
-        target_node_instance_storage_id=target_instance.storage_id,
-        source_node_instance_storage_id=source_instance.storage_id,
+        relationship_fk=relationship.storage_id,
+        target_node_instance_fk=target_instance.storage_id,
+        source_node_instance_fk=source_instance.storage_id,
     )
 
 
 def get_dependent_node(deployment):
     return models.Node(
         id=DEPENDENT_NODE_ID,
-        deployment_storage_id=deployment.storage_id,
+        deployment_fk=deployment.storage_id,
         host_id=DEPENDENT_NODE_ID,
         type='test_node_type',
         type_hierarchy=[],
@@ -111,8 +110,8 @@ def get_dependent_node_instance(dependent_node):
         host_id=DEPENDENT_NODE_INSTANCE_ID,
         runtime_properties={},
         version=None,
-        node_storage_id=dependent_node.storage_id,
-        deployment_storage_id=dependent_node.deployment.storage_id,
+        node_fk=dependent_node.storage_id,
+        deployment_fk=dependent_node.deployment.storage_id,
         state='',
         scaling_groups={}
     )
@@ -133,7 +132,7 @@ def get_blueprint():
 def get_execution(deployment):
     return models.Execution(
         id=EXECUTION_ID,
-        deployment_storage_id=deployment.storage_id,
+        deployment_fk=deployment.storage_id,
         status=models.Execution.STARTED,
         workflow_id=WORKFLOW_ID,
         started_at=datetime.utcnow(),
@@ -145,7 +144,7 @@ def get_deployment(blueprint):
     now = datetime.utcnow()
     return models.Deployment(
         id=DEPLOYMENT_ID,
-        blueprint_storage_id=blueprint.storage_id,
+        blueprint_fk=blueprint.storage_id,
         description='',
         created_at=now,
         updated_at=now,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py 
b/tests/orchestrator/context/test_toolbelt.py
index 480f289..5cad219 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -68,7 +68,7 @@ def test_host_ip(workflow_context, executor):
         'operation': op_path(host_ip, module_path=__name__)
 
     }
-    workflow_context.model.node.store(dependency_node)
+    workflow_context.model.node.put(dependency_node)
     inputs = {'putput': True}
 
     @workflow
@@ -95,7 +95,7 @@ def test_dependent_node_instances(workflow_context, executor):
         'operation': op_path(dependent_nodes, module_path=__name__)
 
     }
-    workflow_context.model.node.store(dependency_node)
+    workflow_context.model.node.put(dependency_node)
     inputs = {'putput': True}
 
     @workflow
@@ -121,7 +121,7 @@ def test_relationship_tool_belt(workflow_context, executor):
     relationship.source_operations[operation_name] = {
         'operation': op_path(relationship_operation, module_path=__name__)
     }
-    workflow_context.model.relationship.store(relationship)
+    workflow_context.model.relationship.put(relationship)
 
     inputs = {'putput': True}
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py 
b/tests/orchestrator/context/test_workflow.py
index 4c4979f..fbe5d75 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -19,10 +19,9 @@ import pytest
 
 from aria import application_model_storage
 from aria.orchestrator import context
-from aria.storage.mapi.sql import SQLAlchemyModelAPI
-
-from tests.mock import models
+from aria.storage.sql_mapi import SQLAlchemyModelAPI
 from tests import storage as test_storage
+from tests.mock import models
 
 
 class TestWorkflowContext(object):
@@ -60,7 +59,7 @@ class TestWorkflowContext(object):
 def storage():
     api_params = test_storage.get_sqlite_api_params()
     result = application_model_storage(SQLAlchemyModelAPI, 
api_params=api_params)
-    result.blueprint.store(models.get_blueprint())
+    result.blueprint.put(models.get_blueprint())
     blueprint = result.blueprint.get(models.BLUEPRINT_ID)
-    result.deployment.store(models.get_deployment(blueprint))
+    result.deployment.put(models.get_deployment(blueprint))
     return result

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py 
b/tests/orchestrator/workflows/api/test_task.py
index 4da42c1..3ae700e 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -30,7 +30,7 @@ def ctx():
     :return:
     """
     simple_context = mock.context.simple()
-    
simple_context.model.execution.store(mock.models.get_execution(simple_context.deployment))
+    
simple_context.model.execution.put(mock.models.get_execution(simple_context.deployment))
 
     return simple_context
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py 
b/tests/orchestrator/workflows/core/test_engine.py
index b58460a..45eaa27 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -12,7 +12,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import time
 # TODO: fix together with the test
 # import threading

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py 
b/tests/storage/test_model_storage.py
index 8fdf870..4d610f3 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -23,17 +23,17 @@ from aria.storage import (
     ModelStorage,
     models,
     exceptions,
-    mapi as storage_api,
+    sql_mapi,
 )
-
-from tests import storage
+from aria import application_model_storage
+from tests.storage import get_sqlite_api_params
 
 temp_dir = tempfile.mkdtemp()
 
-APIs = [
-    ModelStorage(storage_api.SQLAlchemyModelAPI, 
api_params=storage.get_sqlite_api_params()),
-    # ModelStorage(storage_api.FileSystemModelAPI, 
api_params=dict(directory=temp_dir)),
-]
+
+@pytest.fixture
+def storage():
+    return ModelStorage(sql_mapi.SQLAlchemyModelAPI, 
api_params=get_sqlite_api_params())
 
 
 @pytest.fixture(autouse=True)
@@ -45,18 +45,16 @@ def cleanup():
         pass
 
 
-@pytest.mark.parametrize('storage', APIs)
 def test_storage_base(storage):
     with pytest.raises(AttributeError):
         storage.non_existent_attribute()
 
 
-@pytest.mark.parametrize('storage', APIs)
 def test_model_storage(storage):
     storage.register(models.ProviderContext)
 
     pc = models.ProviderContext(context={}, name='context_name', id='id1')
-    storage.provider_context.store(pc)
+    storage.provider_context.put(pc)
 
     assert storage.provider_context.get('id1') == pc
 
@@ -73,12 +71,11 @@ def test_model_storage(storage):
         storage.provider_context.get('id1')
 
 
-@pytest.mark.parametrize('storage', APIs)
 def test_storage_driver(storage):
     storage.register(models.ProviderContext)
 
     pc = models.ProviderContext(context={}, name='context_name', id='id2')
-    storage.registered['provider_context'].store(entry=pc)
+    storage.registered['provider_context'].put(entry=pc)
 
     assert storage.registered['provider_context'].get(entry_id='id2') == pc
 
@@ -91,17 +88,16 @@ def test_storage_driver(storage):
         storage.registered['provider_context'].get('id2')
 
 
-# @pytest.mark.parametrize('storage', APIs)
-# def test_application_storage_factory(storage):
-#     storage = application_model_storage(api, api_params=api_params)
-#     assert storage.node
-#     assert storage.node_instance
-#     assert storage.plugin
-#     assert storage.blueprint
-#     assert storage.snapshot
-#     assert storage.deployment
-#     assert storage.deployment_update
-#     assert storage.deployment_update_step
-#     assert storage.deployment_modification
-#     assert storage.execution
-#     assert storage.provider_context
+def test_application_storage_factory():
+    storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                        api_params=get_sqlite_api_params())
+    assert storage.node
+    assert storage.node_instance
+    assert storage.plugin
+    assert storage.blueprint
+    assert storage.deployment
+    assert storage.deployment_update
+    assert storage.deployment_update_step
+    assert storage.deployment_modification
+    assert storage.execution
+    assert storage.provider_context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d8f1338/tests/storage/test_resource_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_resource_storage.py 
b/tests/storage/test_resource_storage.py
index 452867e..4347512 100644
--- a/tests/storage/test_resource_storage.py
+++ b/tests/storage/test_resource_storage.py
@@ -18,8 +18,8 @@ import tempfile
 
 import pytest
 
+from aria.storage.filesystem_rapi import FileSystemResourceAPI
 from aria.storage import (
-    rapi,
     exceptions,
     ResourceStorage
 )
@@ -44,12 +44,12 @@ class TestResourceStorage(TestFileSystem):
         storage.blueprint.upload(entry_id=id, source=tmp_dir)
 
     def _create_storage(self):
-        return ResourceStorage(rapi.FileSystemResourceAPI,
+        return ResourceStorage(FileSystemResourceAPI,
                                api_params=dict(directory=self.path))
 
     def test_name(self):
-        api = rapi.FileSystemResourceAPI
-        storage = ResourceStorage(rapi.FileSystemResourceAPI,
+        api = FileSystemResourceAPI
+        storage = ResourceStorage(FileSystemResourceAPI,
                                   items=['blueprint'],
                                   api_params=dict(directory=self.path))
         assert repr(storage) == 'ResourceStorage(api={api})'.format(api=api)
@@ -62,8 +62,7 @@ class TestResourceStorage(TestFileSystem):
         assert os.path.exists(os.path.join(self.path, 'blueprint'))
 
     def test_upload_file(self):
-        storage = ResourceStorage(rapi.FileSystemResourceAPI,
-                                  api_params=dict(directory=self.path))
+        storage = ResourceStorage(FileSystemResourceAPI, 
api_params=dict(directory=self.path))
         self._create(storage)
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, 
dir=self.path)[1]
         self._upload(storage, tmpfile_path, id='blueprint_id')
@@ -104,7 +103,7 @@ class TestResourceStorage(TestFileSystem):
         storage = self._create_storage()
         self._create(storage)
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.data(entry_id='blueprint_id', path='fake_path')
+            storage.blueprint.read(entry_id='blueprint_id', path='fake_path')
 
     def test_data_file(self):
         storage = self._create_storage()
@@ -112,7 +111,7 @@ class TestResourceStorage(TestFileSystem):
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, 
dir=self.path)[1]
         self._upload(storage, tmpfile_path, 'blueprint_id')
 
-        assert storage.blueprint.data(entry_id='blueprint_id') == 'fake 
context'
+        assert storage.blueprint.read(entry_id='blueprint_id') == 'fake 
context'
 
     def test_upload_dir(self):
         storage = self._create_storage()
@@ -189,4 +188,4 @@ class TestResourceStorage(TestFileSystem):
         storage.blueprint.upload(entry_id='blueprint_id', source=tmp_dir)
 
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.data(entry_id='blueprint_id')
+            storage.blueprint.read(entry_id='blueprint_id')


Reply via email to