http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
 
b/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
deleted file mode 100644
index 0d86474..0000000
--- 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
+++ /dev/null
@@ -1,301 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.types.cache;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.annotation.ConditionalOnAtlasProperty;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Singleton;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-
-/**
- * Caches the types in-memory within the same process space.
- */
-@SuppressWarnings("rawtypes")
-@Singleton
-@Component
-@ConditionalOnAtlasProperty(property = "atlas.TypeCache.impl", isDefault = 
true)
-public class DefaultTypeCache implements TypeCache {
-    private static final Logger LOG = 
LoggerFactory.getLogger(DefaultTypeCache.class);
-
-    private Map<String, IDataType> types_ = new ConcurrentHashMap<>();
-    private static final List<TypeCategory> validTypeFilterCategories =
-            Arrays.asList(TypeCategory.CLASS, TypeCategory.TRAIT, 
TypeCategory.ENUM, TypeCategory.STRUCT);
-    private static final List<TypeCategory> validSupertypeFilterCategories =
-            Arrays.asList(TypeCategory.CLASS, TypeCategory.TRAIT);
-
-    /*
-     * (non-Javadoc)
-     * @see
-     * org.apache.atlas.typesystem.types.cache.TypeCache#has(java.lang
-     * .String)
-     */
-    @Override
-    public boolean has(String typeName) throws AtlasException {
-
-        return types_.containsKey(typeName);
-    }
-
-    /* (non-Javadoc)
-     * @see org.apache.atlas.typesystem.types.cache.TypeCache#has(org.
-     * apache.atlas.typesystem.types.DataTypes.TypeCategory, java.lang.String)
-     */
-    @Override
-    public boolean has(TypeCategory typeCategory, String typeName)
-            throws AtlasException {
-
-        assertValidTypeCategory(typeCategory);
-        return has(typeName);
-    }
-
-    private void assertValidTypeCategory(String typeCategory) {
-        assertValidTypeCategory(TypeCategory.valueOf(typeCategory));
-    }
-
-    private void assertValidTypeCategory(TypeCategory typeCategory) {
-        // there might no need of 'typeCategory' in this implementation for
-        // certain API, but for a distributed cache, it might help for the
-        // implementers to partition the types per their category
-        // while persisting so that look can be efficient
-
-        if (typeCategory == null) {
-            throw new IllegalArgumentException("Category of the types to be 
filtered is null.");
-        }
-
-        if (!validTypeFilterCategories.contains(typeCategory)) {
-            throw new IllegalArgumentException("Category of the types should 
be one of " +
-                    StringUtils.join(validTypeFilterCategories, ", "));
-        }
-    }
-
-    /*
-     * (non-Javadoc)
-     * @see
-     * org.apache.atlas.typesystem.types.cache.TypeCache#get(java.lang
-     * .String)
-     */
-    @Override
-    public IDataType get(String typeName) throws AtlasException {
-
-        return types_.get(typeName);
-    }
-
-    /* (non-Javadoc)
-     * @see org.apache.atlas.typesystem.types.cache.TypeCache#get(org.apache.
-     * atlas.typesystem.types.DataTypes.TypeCategory, java.lang.String)
-     */
-    @Override
-    public IDataType get(TypeCategory typeCategory, String typeName) throws 
AtlasException {
-
-        assertValidTypeCategory(typeCategory);
-        return get(typeName);
-    }
-
-    /**
-     * Return the list of type names in the type system which match the 
specified filter.
-     *
-     * @return list of type names
-     * @param filterMap - Map of filter for type names. Valid keys are 
CATEGORY, SUPERTYPE, NOT_SUPERTYPE
-     * For example, CATEGORY = TRAIT && SUPERTYPE contains 'X' && SUPERTYPE 
!contains 'Y'
-     */
-    @Override
-    public Collection<String> getTypeNames(Map<TYPE_FILTER, String> filterMap) 
throws AtlasException {
-        assertFilter(filterMap);
-
-        List<String> typeNames = new ArrayList<>();
-        for (IDataType type : types_.values()) {
-            if (shouldIncludeType(type, filterMap)) {
-                typeNames.add(type.getName());
-            }
-        }
-        return typeNames;
-    }
-
-    private boolean shouldIncludeType(IDataType type, Map<TYPE_FILTER, String> 
filterMap) {
-        if (filterMap == null) {
-            return true;
-        }
-
-        for (Entry<TYPE_FILTER, String> filterEntry : filterMap.entrySet()) {
-            switch (filterEntry.getKey()) {
-            case CATEGORY:
-                if 
(!filterEntry.getValue().equals(type.getTypeCategory().name())) {
-                    return false;
-                }
-                break;
-
-            case SUPERTYPE:
-                if 
(!validSupertypeFilterCategories.contains(type.getTypeCategory()) ||
-                        !((HierarchicalType) 
type).getAllSuperTypeNames().contains(filterEntry.getValue())) {
-                    return false;
-                }
-                break;
-
-            case NOT_SUPERTYPE:
-                if 
(!validSupertypeFilterCategories.contains(type.getTypeCategory()) ||
-                        type.getName().equals(filterEntry.getValue()) ||
-                        ((HierarchicalType) 
type).getAllSuperTypeNames().contains(filterEntry.getValue())) {
-                    return false;
-                }
-                break;
-            }
-        }
-        return true;
-    }
-
-
-    private void assertFilter(Map<TYPE_FILTER, String> filterMap) throws 
AtlasException {
-        if (filterMap == null) {
-            return;
-        }
-
-        for (Entry<TYPE_FILTER, String> filterEntry : filterMap.entrySet()) {
-            switch (filterEntry.getKey()) {
-            case CATEGORY:
-                assertValidTypeCategory(filterEntry.getValue());
-                break;
-
-            case SUPERTYPE:
-            case NOT_SUPERTYPE:
-                if (!has(filterEntry.getValue())) {
-                    if (LOG.isDebugEnabled()) {
-                        LOG.debug("{}: supertype does not exist", 
filterEntry.getValue());
-                    }
-                }
-                break;
-
-            default:
-                throw new IllegalStateException("Unhandled filter " + 
filterEntry.getKey());
-            }
-        }
-    }
-
-    /*
-     * (non-Javadoc)
-     * @see
-     * org.apache.atlas.typesystem.types.cache.TypeCache#getAllNames()
-     */
-    @Override
-    public Collection<String> getAllTypeNames() throws AtlasException {
-
-        return types_.keySet();
-    }
-
-    /*
-     * (non-Javadoc)
-     * @see
-     * org.apache.atlas.typesystem.types.cache.TypeCache#put(org.apache
-     * .atlas.typesystem.types.IDataType)
-     */
-    @Override
-    public void put(IDataType type) throws AtlasException {
-
-        assertValidType(type);
-        types_.put(type.getName(), type);
-    }
-
-    private void assertValidType(IDataType type) throws
-        AtlasException {
-
-        if (type == null) {
-            throw new AtlasException("type is null.");
-        }
-
-        boolean validTypeCategory = (type instanceof ClassType) ||
-            (type instanceof TraitType) ||
-            (type instanceof EnumType) ||
-            (type instanceof StructType);
-
-        if (!validTypeCategory) {
-            throw new AtlasException("Category of the types should be one of 
ClassType | "
-                + "TraitType | EnumType | StructType.");
-        }
-    }
-
-    /*
-     * (non-Javadoc)
-     * @see
-     * org.apache.atlas.typesystem.types.cache.TypeCache#putAll(java
-     * .util.Collection)
-     */
-    @Override
-    public void putAll(Collection<IDataType> types) throws AtlasException {
-
-        for (IDataType type : types) {
-            assertValidType(type);
-            types_.put(type.getName(), type);
-        }
-    }
-
-    /*
-     * (non-Javadoc)
-     * @see
-     * org.apache.atlas.typesystem.types.cache.TypeCache#remove(java
-     * .lang.String)
-     */
-    @Override
-    public void remove(String typeName) throws AtlasException {
-
-        types_.remove(typeName);
-    }
-
-    /* (non-Javadoc)
-     * @see org.apache.atlas.typesystem.types.cache.TypeCache#remove(org.
-     * apache.atlas.typesystem.types.DataTypes.TypeCategory, java.lang.String)
-     */
-    @Override
-    public void remove(TypeCategory typeCategory, String typeName)
-            throws AtlasException {
-
-        assertValidTypeCategory(typeCategory);
-        remove(typeName);
-    }
-
-    /*
-     * (non-Javadoc)
-     * @see org.apache.atlas.typesystem.types.cache.TypeCache#clear()
-     */
-    @Override
-    public void clear() {
-
-        types_.clear();
-    }
-
-    @Override
-    public IDataType onTypeFault(String typeName) throws AtlasException {
-
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
 
b/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
deleted file mode 100644
index c8f65be..0000000
--- 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types.cache;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-import java.util.Collection;
-import java.util.Map;
-
-/**
- * The types are cached to allow faster lookup when type info is needed during
- * creation/updation of entities, DSL query translation/execution.
- * Implementations of this can chose to plugin a distributed cache
- * or an in-memory cache synched across nodes in an Altas cluster. <br>
- * <br>
- * Type entries in the cache can be one of ... <br>
- * {@link org.apache.atlas.typesystem.types.ClassType} <br>
- * {@link org.apache.atlas.typesystem.types.TraitType} <br>
- * {@link org.apache.atlas.typesystem.types.StructType} <br>
- * {@link org.apache.atlas.typesystem.types.EnumType}
- */
-@SuppressWarnings("rawtypes")
-public interface TypeCache {
-
-    enum TYPE_FILTER {
-        CATEGORY, SUPERTYPE, NOT_SUPERTYPE
-    }
-
-    /**
-     * @param typeName
-     * @return true if the type exists in cache, false otherwise.
-     * @throws AtlasException
-     */
-    boolean has(String typeName) throws AtlasException;
-
-    /**
-     * @param typeCategory Non-null category of type. The category can be one 
of
-     * TypeCategory.CLASS | TypeCategory.TRAIT | TypeCategory.STRUCT | 
TypeCategory.ENUM.
-     * @param typeName
-     * @return true if the type of given category exists in cache, false 
otherwise.
-     * @throws AtlasException
-     */
-    boolean has(DataTypes.TypeCategory typeCategory, String typeName) throws 
AtlasException;
-
-    /**
-     * @param typeName The name of the type.
-     * @return returns non-null type if cached, otherwise null
-     * @throws AtlasException
-     */
-    IDataType get(String typeName) throws AtlasException;
-
-    /**
-     * @param typeCategory Non-null category of type. The category can be one 
of
-     * TypeCategory.CLASS | TypeCategory.TRAIT | TypeCategory.STRUCT | 
TypeCategory.ENUM.
-     * @param typeName
-     * @return returns non-null type (of the specified category) if cached, 
otherwise null
-     * @throws AtlasException
-     */
-    IDataType get(DataTypes.TypeCategory typeCategory, String typeName) throws 
AtlasException;
-
-    /**
-     *
-     * @param filter @return
-     * @throws AtlasException
-     */
-    Collection<String> getTypeNames(Map<TYPE_FILTER, String> filter) throws 
AtlasException;
-
-    /**
-     * This is a convenience API to get the names of all types.
-     *
-     * @see TypeCache#getTypeNames(Map)
-     * @return
-     * @throws AtlasException
-     */
-    Collection<String> getAllTypeNames() throws AtlasException;
-
-    /**
-     * @param type The type to be added to the cache. The type should not be
-     * null, otherwise throws NullPointerException. <br>
-     * Type entries in the cache can be one of ... <br>
-     * {@link org.apache.atlas.typesystem.types.ClassType} <br>
-     * {@link org.apache.atlas.typesystem.types.TraitType} <br>
-     * {@link org.apache.atlas.typesystem.types.StructType} <br>
-     * {@link org.apache.atlas.typesystem.types.EnumType}
-     * @throws AtlasException
-     */
-    void put(IDataType type) throws AtlasException;
-
-    /**
-     * @param types The types to be added to the cache. The type should not be
-     * null, otherwise throws NullPointerException. <br>
-     * Type entries in the cache can be one of ... <br>
-     * {@link org.apache.atlas.typesystem.types.ClassType} <br>
-     * {@link org.apache.atlas.typesystem.types.TraitType} <br>
-     * {@link org.apache.atlas.typesystem.types.StructType} <br>
-     * {@link org.apache.atlas.typesystem.types.EnumType}
-     * @throws AtlasException
-     */
-    void putAll(Collection<IDataType> types) throws AtlasException;
-
-    /**
-     * @param typeName Name of the type to be removed from the cache. If type
-     * exists, it will be removed, otherwise does nothing.
-     * @throws AtlasException
-     */
-    void remove(String typeName) throws AtlasException;
-
-    /**
-     * @param typeCategory Non-null category of type. The category can be one 
of
-     * TypeCategory.CLASS | TypeCategory.TRAIT | TypeCategory.STRUCT | 
TypeCategory.ENUM.
-     * @param typeName Name of the type to be removed from the cache. If type
-     * exists, it will be removed, otherwise does nothing.
-     * @throws AtlasException
-     */
-    void remove(DataTypes.TypeCategory typeCategory, String typeName) throws 
AtlasException;
-
-    /**
-     * Clear the type cache
-     *
-     */
-    void clear();
-
-    /**
-     * Called when a type lookup request on {@link TypeSystem}
-     * fails because the type is not present in the runtime type information.
-     * Implementations can take action such as retrieving the requested type
-     * from some persistent storage.
-
-     * @param typeName
-     * @throws AtlasException
-     */
-    IDataType onTypeFault(String typeName) throws AtlasException;
-
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java 
b/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java
deleted file mode 100755
index fbd4216..0000000
--- 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * <h2>Types:</h2>
- * <img src="doc-files/dataTypes.png" />
- * <ul>
- *     <li> <b>IDataType:</b> Represents a <i>DataType</i> in the TypeSystem. 
All Instances and
- *     Attributes are associated
- *     with a DataType. They represent the <b>Set</b> of values that 
Instances/Attributes of this
- *     type can have.
- *     Currently the namespace of DataTypes is flat. DataTypes can be asked to 
<i>convert</i>
- *     arbitrary java Objects
- *     to instances of this type, and they can be asked for a String 
representation of an
- *     instance.</li>
- *     <li><b>Type Categories:</b></li> DataTypes are grouped into Categories. 
A Category implies
- *     certain semantics about
- *     the Types belonging to the Category. We have PRIMITIVE, ENUM, ARRAY, 
MAP, STRUCT, TRAIT,
- *     and CLASS categories.
- *     <li><b>Primitive Types:</b> There are corresponding DataTypes for the 
java primitives:
- *     Boolean, Byte, Short,
- *     Int, Long, Float, Double. We also support BigInteger, BigDecimal, 
String, and Date</li>
- *     <li><b>Collection Types:</b>ArrayType and MapType are parameterized 
DataTypes taking one
- *     and two parameters
- *     respectively.</li>
- *     <li><b>Enum Types:</b> Used to define DataTypes with all valid values 
listed in the Type
- *     definition. For e.g.
- * <pre>
- * {@code
- * ts.defineEnumType("HiveObjectType",
-new EnumValue("GLOBAL", 1),
-new EnumValue("DATABASE", 2),
-new EnumValue("TABLE", 3),
-new EnumValue("PARTITION", 4),
-new EnumValue("COLUMN", 5))
- * }
- * </pre> Each <i>EnumValue</i> has name and an ordinal. Either one can be 
used as a value for an
- * Attribute of this Type.
- *     </li>
- *     <li><b>Constructable Types:</b> Are complex Types that are composed of 
Attributes. We
- *     support Structs, Classes
- *     and Traits constructable types. A ConstructableType is parameterized by 
the Type of its
- *     <i>Instance</i> java
- *     class(these are implementations of the ITypedInstance interface). A 
value of the
- *     IConstructableType will
- *     implement this parameterized Type. IConstructableTypes can be asked to 
create an 'empty'
- *     instance of their Type.
- *     IConstructableTypes are associated with FieldMappings that encapsulate 
the mapping from/to
- *     the ITypedInstance
- *     java object.
- *     </li>
- *     <li><b>Attribute Info:</b>Represents an Attribute of a complex 
datatype. Attributes are
- *     defined by a name, a
- *     dataType, its Multiplicity and whether it is a composite relation. 
<i>Multiplicity</i> is
- *     a constraint on the
- *     number of instances that an instance can have. For non collection types 
and Maps:
- *     Multiplicity is OPTIONAL or
- *     REQUIRED.
- *     For Arrays the Multiplicity is specified by a lower-bound, upper-bound 
and a uniqueness
- *     constraint.
- *     </li>
- *     <li><b>Struct Types:</b>Are IConstructableTypes whose instances are 
IStructs. Conceptually
- *     these are like 'C'
- *     structs: they represent a collection of Attributes. For e.g.
- * <pre>
- * {@code
- * ts.defineStructType(STRUCT_TYPE_1,
-true,
-createRequiredAttrDef("a", DataTypes.INT_TYPE),
-createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
-createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
-createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
-createOptionalAttrDef("e", DataTypes.INT_TYPE),
-createOptionalAttrDef("f", DataTypes.INT_TYPE),
-createOptionalAttrDef("g", DataTypes.LONG_TYPE),
-createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
-createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
-createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
-createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
-createOptionalAttrDef("l", DataTypes.DATE_TYPE),
-createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
-createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
-createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, 
DataTypes.DOUBLE_TYPE))
- * }
- * </pre>
- *     </li>
- *     <li><b>Hierarchical Types:</b>Are DataTypes that can have a SuperType. 
Classes and Traits
- *     are the supported
- *     Hierarchical Types. </li>
- *     <li><b>Class Types:</b></li>
- *     <li><b>Trait Types:</b></li>
- * </ul>
- *
- *
- * <h2>Instances:</h2>
- * <img src="doc-files/instance.png" />
- * <ul>
- *     <li> <b>IStruct:</b></li>
- *     <li><b>IReferenceableInstance:</b></li>
- *     <li><b>ITypedStruct:</b></li>
- *     <li><b>ITypedReferenceableInstance:</b></li>
- * </ul>
- *
- * <h3>Serialization of Types:</h3>
- *
- * <h3>Serialization of Instances:</h3>
- *
- * <h3>Searching on Classes and Traits:</h3>
- */
-package org.apache.atlas.typesystem.types;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
 
b/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
deleted file mode 100755
index 39244dc..0000000
--- 
a/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types.utils;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.AtlasConstants;
-
-import org.apache.atlas.typesystem.types.TypeSystem;
-import scala.collection.JavaConversions;
-
-/**
- * Types utilities class.
- */
-public class TypesUtil {
-
-    private TypesUtil() {
-    }
-
-    public static AttributeDefinition createOptionalAttrDef(String name, 
IDataType dataType) {
-        return new AttributeDefinition(name, dataType.getName(), 
Multiplicity.OPTIONAL, false, null);
-    }
-
-    public static AttributeDefinition createOptionalAttrDef(String name, 
String dataType) {
-        return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, 
false, null);
-    }
-
-    public static AttributeDefinition createRequiredAttrDef(String name, 
String dataType) {
-        return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, 
false, null);
-    }
-
-    public static AttributeDefinition createUniqueRequiredAttrDef(String name, 
IDataType dataType) {
-        return new AttributeDefinition(name, dataType.getName(), 
Multiplicity.REQUIRED, false, true, true, null);
-    }
-
-    public static AttributeDefinition createRequiredAttrDef(String name, 
IDataType dataType) {
-        return new AttributeDefinition(name, dataType.getName(), 
Multiplicity.REQUIRED, false, null);
-    }
-
-    public static EnumTypeDefinition createEnumTypeDef(String name, 
EnumValue... enumValues) {
-        return new EnumTypeDefinition(name, enumValues);
-    }
-
-    public static HierarchicalTypeDefinition<TraitType> 
createTraitTypeDef(String name,
-            ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
-        return createTraitTypeDef(name, null, superTypes, attrDefs);
-    }
-
-    public static HierarchicalTypeDefinition<TraitType> 
createTraitTypeDef(String name, String description,
-        ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
-        return createTraitTypeDef(name, description, 
AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, attrDefs);
-    }
-
-    public static HierarchicalTypeDefinition<TraitType> 
createTraitTypeDef(String name, String description, String version,
-        ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
-        return new HierarchicalTypeDefinition<>(TraitType.class, name, 
description, version, superTypes, attrDefs);
-    }
-
-    public static StructTypeDefinition createStructTypeDef(String name, 
AttributeDefinition... attrDefs) {
-        return createStructTypeDef(name, null, attrDefs);
-    }
-
-    public static StructTypeDefinition createStructTypeDef(String name, String 
description, AttributeDefinition... attrDefs) {
-        return new StructTypeDefinition(name, description, attrDefs);
-    }
-
-    public static StructTypeDefinition createStructTypeDef(String name, String 
description, String version, AttributeDefinition... attrDefs) {
-        return new StructTypeDefinition(name, description, version, attrDefs);
-    }
-
-    public static HierarchicalTypeDefinition<ClassType> 
createClassTypeDef(String name,
-            ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
-        return createClassTypeDef(name, null, superTypes, attrDefs);
-    }
-
-    public static HierarchicalTypeDefinition<ClassType> 
createClassTypeDef(String name, String description,
-        ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
-        return createClassTypeDef(name, description, 
AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, attrDefs);
-    }
-
-    public static HierarchicalTypeDefinition<ClassType> 
createClassTypeDef(String name, String description, String version,
-        ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
-        return new HierarchicalTypeDefinition<>(ClassType.class, name, 
description, version, superTypes, attrDefs);
-    }
-
-    public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
-                                       ImmutableList<StructTypeDefinition> 
structs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
-                                       
ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
-        return new TypesDef(JavaConversions.asScalaBuffer(enums), 
JavaConversions.asScalaBuffer(structs),
-                JavaConversions.asScalaBuffer(traits), 
JavaConversions.asScalaBuffer(classes));
-    }
-
-    private static final TypeSystem ts = TypeSystem.getInstance();
-
-    public static AttributeInfo newAttributeInfo(String attribute, IDataType 
type) {
-        try {
-            return new AttributeInfo(ts, new AttributeDefinition(attribute, 
type.getName(), Multiplicity.REQUIRED,
-                    false, null), null);
-        } catch (AtlasException e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-
-    /**
-     * Get the field mappings for the specified data type.
-     * Field mappings are only relevant for CLASS, TRAIT, and STRUCT types.
-     *
-     * @param type
-     * @return {@link FieldMapping} for the specified type
-     * @throws IllegalArgumentException if type is not a CLASS, TRAIT, or 
STRUCT type.
-     */
-    public static FieldMapping getFieldMapping(IDataType type) {
-        switch (type.getTypeCategory()) {
-        case CLASS:
-        case TRAIT:
-            return ((HierarchicalType)type).fieldMapping();
-
-        case STRUCT:
-            return ((StructType)type).fieldMapping();
-
-        default:
-            throw new IllegalArgumentException("Type " + type + " doesn't have 
any fields!");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/resources/atlas-log4j.xml
----------------------------------------------------------------------
diff --git a/typesystem/src/main/resources/atlas-log4j.xml 
b/typesystem/src/main/resources/atlas-log4j.xml
deleted file mode 100755
index 510e2cf..0000000
--- a/typesystem/src/main/resources/atlas-log4j.xml
+++ /dev/null
@@ -1,105 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
-
-<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/";>
-    <appender name="console" class="org.apache.log4j.ConsoleAppender">
-        <param name="Target" value="System.out"/>
-        <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m 
(%C{1}:%L)%n"/>
-        </layout>
-    </appender>
-
-    <appender name="AUDIT" class="org.apache.log4j.RollingFileAppender">
-        <param name="File" value="${atlas.log.dir}/audit.log"/>
-        <param name="Append" value="true"/>
-        <param name="Threshold" value="debug"/>
-        <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d %x %m%n"/>
-            <param name="maxFileSize" value="100MB" />
-            <param name="maxBackupIndex" value="20" />
-        </layout>
-    </appender>
-
-    <logger name="org.apache.atlas" additivity="false">
-        <level value="debug"/>
-        <appender-ref ref="console"/>
-    </logger>
-
-    <!-- uncomment this block to generate performance traces
-    <appender name="perf_appender" 
class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="file" value="${atlas.log.dir}/atlas_perf.log" />
-        <param name="datePattern" value="'.'yyyy-MM-dd" />
-        <param name="append" value="true" />
-        <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d|%t|%m%n" />
-        </layout>
-    </appender>
-
-    <logger name="org.apache.atlas.perf" additivity="false">
-        <level value="debug" />
-        <appender-ref ref="perf_appender" />
-    </logger>
-    -->
-
-    <appender name="FAILED" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="File" value="${atlas.log.dir}/failed.log"/>
-        <param name="Append" value="true"/>
-        <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d %m"/>
-        </layout>
-    </appender>
-
-    <logger name="FAILED" additivity="false">
-        <level value="info"/>
-        <appender-ref ref="AUDIT"/>
-    </logger>
-
-    <logger name="com.thinkaurelius.titan" additivity="false">
-        <level value="warn"/>
-        <appender-ref ref="console"/>
-    </logger>
-
-    <logger name="org.springframework" additivity="false">
-        <level value="warn"/>
-        <appender-ref ref="console"/>
-    </logger>
-
-    <logger name="org.eclipse" additivity="false">
-        <level value="warn"/>
-        <appender-ref ref="console"/>
-    </logger>
-
-    <logger name="com.sun.jersey" additivity="false">
-        <level value="warn"/>
-        <appender-ref ref="console"/>
-    </logger>
-
-    <logger name="AUDIT" additivity="false">
-        <level value="info"/>
-        <appender-ref ref="console"/>
-    </logger>
-
-    <root>
-        <priority value="warn"/>
-        <appender-ref ref="console"/>
-    </root>
-
-</log4j:configuration>

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala 
b/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
deleted file mode 100755
index b51048d..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem
-
-import org.apache.atlas.typesystem.types._
-
-case class TypesDef(enumTypes: Seq[EnumTypeDefinition],
-                    structTypes: Seq[StructTypeDefinition],
-                    traitTypes: Seq[HierarchicalTypeDefinition[TraitType]],
-                    classTypes: Seq[HierarchicalTypeDefinition[ClassType]]) {
-    def this() = this(Seq(), Seq(), Seq(), Seq())
-    def this(enumType : EnumTypeDefinition) = this(Seq(enumType), Seq(), 
Seq(), Seq())
-    def this(structType: StructTypeDefinition) = this(Seq(), Seq(structType), 
Seq(), Seq())
-    def this(typ: HierarchicalTypeDefinition[_], isTrait : Boolean) = this(
-      Seq(),
-      Seq(),
-      if ( isTrait )
-        Seq(typ.asInstanceOf[HierarchicalTypeDefinition[TraitType]]) else 
Seq(),
-      if (!isTrait )
-        Seq(typ.asInstanceOf[HierarchicalTypeDefinition[ClassType]]) else Seq()
-    )
-
-    def enumTypesAsJavaList() = {
-        import scala.collection.JavaConverters._
-        enumTypes.asJava
-    }
-
-    def structTypesAsJavaList() = {
-        import scala.collection.JavaConverters._
-        structTypes.asJava
-    }
-
-    def traitTypesAsJavaList() = {
-        import scala.collection.JavaConverters._
-        traitTypes.asJava
-    }
-
-    def classTypesAsJavaList() = {
-        import scala.collection.JavaConverters._
-        classTypes.asJava
-    }
-
-    def isEmpty() = {
-      enumTypes.isEmpty & structTypes.isEmpty & traitTypes.isEmpty & 
classTypes.isEmpty
-    }
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
 
b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
deleted file mode 100644
index 9e22f67..0000000
--- 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import org.apache.atlas.typesystem.{IReferenceableInstance, IStruct, 
Referenceable, Struct}
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConverters._
-import scala.collection.mutable.ArrayBuffer
-import scala.language.{dynamics, implicitConversions}
-import scala.util.DynamicVariable
-
-class InstanceBuilder extends Dynamic {
-
-  private val references : ArrayBuffer[Referenceable] = new 
ArrayBuffer[Referenceable]()
-
-  val context = new DynamicVariable[DynamicStruct](null)
-
-  def struct(typeName : String) : DynamicStruct = {
-    context.value = new DynamicStruct(this, new Struct(typeName))
-    context.value
-  }
-
-  def instance(typeName: String, traitNames: String*)(f : => Unit) : 
DynamicReference = {
-    val r = new Referenceable(typeName, traitNames:_*)
-    references.append(r)
-    val dr = new DynamicReference(this, r)
-    context.withValue(dr){f}
-    dr
-  }
-
-  def create( f : => Unit ) : java.util.List[Referenceable] = {
-    f
-    references.asJava
-  }
-
-  def applyDynamic(name : String)(value : Any) : Any = {
-    context.value.updateDynamic(name)(value)
-  }
-
-  implicit def symbolToDynamicStruct(s : Symbol) : DynamicValue =
-    new DynamicValue(this, s.name, if (context.value == null) null else 
context.value.s)
-
-}
-
-object DynamicValue {
-
-  private[builders] def transformOut(s: IStruct, attr : String, v : 
Any)(implicit ib : InstanceBuilder) : DynamicValue =
-    v match {
-    case r : Referenceable => new DynamicReference(ib, r)
-    case s : Struct => new DynamicStruct(ib, s)
-    case jL : java.util.List[_] => {
-      if ( s != null ) {
-        new DynamicCollection(ib, attr, s)
-      } else {
-        new DynamicValue(ib, attr, s, jL.map { e => transformOut(null, null, 
e) })
-      }
-    }
-    case jM : java.util.Map[_,_] => {
-      if ( s != null ) {
-        new DynamicMap(ib, attr, s)
-      } else {
-        new DynamicValue(ib, attr, s, jM.map {
-          case (k, v) => k -> transformOut(null, null, v)
-        }.toMap)
-      }
-    }
-    case x => {
-      if ( s != null ) {
-        new DynamicValue(ib, attr, s)
-      } else {
-        new DynamicValue(ib, attr, s, x)
-      }
-    }
-  }
-
-  private[builders] def transformIn(v : Any) : Any = v match {
-    case dr : DynamicReference => dr.r
-    case ds : DynamicStruct => ds.s
-    case dv : DynamicValue => dv.get
-    case l : Seq[_] => l.map{ e => transformIn(e)}.asJava
-    case m : Map[_,_] => m.map {
-      case (k,v) => k -> transformIn(v)
-    }.asJava
-    case x => x
-  }
-
-}
-
-class DynamicValue(val ib : InstanceBuilder, val attrName : String, val s: 
IStruct, var value : Any = null) extends Dynamic {
-  import DynamicValue._
-
-  implicit val iib : InstanceBuilder = ib
-
-  def ~(v : Any): Unit = {
-    if ( s != null ) {
-      s.set(attrName, transformIn(v))
-    } else {
-      value = v
-    }
-  }
-
-  def get : Any = if ( s != null ) s.get(attrName) else value
-
-  def selectDynamic(name: String) : DynamicValue = {
-
-    throw new UnsupportedOperationException()
-  }
-
-  def update(key : Any, value : Object): Unit = {
-    throw new UnsupportedOperationException()
-  }
-
-  def apply(key : Any): DynamicValue = {
-
-    if ( s != null && s.isInstanceOf[Referenceable] && 
key.isInstanceOf[String]) {
-      val r = s.asInstanceOf[Referenceable]
-      if ( r.getTraits contains attrName ) {
-        val traitAttr = key.asInstanceOf[String]
-        return new DynamicStruct(ib, 
r.getTrait(attrName)).selectDynamic(traitAttr)
-      }
-    }
-    throw new UnsupportedOperationException()
-  }
-}
-
-class DynamicCollection(ib : InstanceBuilder, attrName : String, s: IStruct) 
extends DynamicValue(ib, attrName ,s) {
-  import DynamicValue._
-
-  override def update(key : Any, value : Object): Unit = {
-    var jL = s.get(attrName)
-    val idx = key.asInstanceOf[Int]
-    if (jL == null ) {
-      val l = new java.util.ArrayList[Object]()
-      l.ensureCapacity(idx)
-      jL = l
-    }
-    val nJL = new 
java.util.ArrayList[Object](jL.asInstanceOf[java.util.List[Object]])
-    nJL.asInstanceOf[java.util.List[Object]].set(idx, 
transformIn(value).asInstanceOf[Object])
-    s.set(attrName, nJL)
-  }
-
-  override def apply(key : Any): DynamicValue = {
-    var jL = s.get(attrName)
-    val idx = key.asInstanceOf[Int]
-    if (jL == null ) {
-      null
-    } else {
-      transformOut(null, null, 
jL.asInstanceOf[java.util.List[Object]].get(idx))
-    }
-  }
-}
-
-class DynamicMap(ib : InstanceBuilder, attrName : String, s: IStruct) extends 
DynamicValue(ib, attrName ,s) {
-  import DynamicValue._
-  override def update(key : Any, value : Object): Unit = {
-    var jM = s.get(attrName)
-    if (jM == null ) {
-      jM = new java.util.HashMap[Object, Object]()
-    }
-    jM.asInstanceOf[java.util.Map[Object, 
Object]].put(key.asInstanceOf[AnyRef], value)
-  }
-
-  override def apply(key : Any): DynamicValue = {
-    var jM = s.get(attrName)
-    if (jM == null ) {
-      null
-    } else {
-      transformOut(null, null, jM.asInstanceOf[java.util.Map[Object, 
Object]].get(key))
-    }
-  }
-}
-
-class DynamicStruct(ib : InstanceBuilder, s: IStruct) extends DynamicValue(ib, 
null ,s) {
-  import DynamicValue._
-  override def selectDynamic(name: String) : DynamicValue = {
-    transformOut(s, name, s.get(name))
-  }
-
-  def updateDynamic(name: String)(value: Any) {
-    s.set(name, transformIn(value))
-  }
-
-  override def ~(v : Any): Unit = { throw new UnsupportedOperationException()}
-  override def get : Any = s
-
-}
-
-class DynamicReference(ib : InstanceBuilder, val r : IReferenceableInstance) 
extends DynamicStruct(ib, r) {
-
-  private def _trait(name : String) = new DynamicStruct(ib, r.getTrait(name))
-
-  override def selectDynamic(name: String) : DynamicValue = {
-    if ( r.getTraits contains name ) {
-      _trait(name)
-    } else {
-      super.selectDynamic(name)
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
 
b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
deleted file mode 100644
index 5ea345f..0000000
--- 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
+++ /dev/null
@@ -1,188 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.typesystem.TypesDef
-import org.apache.atlas.typesystem.types._
-import org.apache.atlas.typesystem.types.utils.TypesUtil
-import scala.collection.mutable.ArrayBuffer
-import scala.language.{dynamics, implicitConversions, postfixOps}
-import scala.util.DynamicVariable
-import com.google.common.collect.ImmutableSet
-
-object TypesBuilder {
-
-  case class Context(enums : ArrayBuffer[EnumTypeDefinition],
-                      structs : ArrayBuffer[StructTypeDefinition],
-                      classes : 
ArrayBuffer[HierarchicalTypeDefinition[ClassType]],
-                      traits : 
ArrayBuffer[HierarchicalTypeDefinition[TraitType]],
-                      currentTypeAttrs : ArrayBuffer[Attr] = null)
-
-  class AttrOption()
-  class ReverseAttributeName(val rNm : String) extends AttrOption
-  class MultiplicityOption(val lower: Int, val upper: Int, val isUnique: 
Boolean) extends AttrOption
-
-  val required = new AttrOption()
-  val optional = new AttrOption()
-  val collection = new AttrOption()
-  val set = new AttrOption()
-  val composite = new AttrOption()
-  val unique = new AttrOption()
-  val indexed = new AttrOption()
-  def reverseAttributeName(rNm : String) = new ReverseAttributeName(rNm)
-  def multiplicty(lower: Int, upper: Int, isUnique: Boolean) = new 
MultiplicityOption(lower, upper, isUnique)
-
-  val boolean = DataTypes.BOOLEAN_TYPE.getName
-  val byte = DataTypes.BYTE_TYPE.getName
-  val short = DataTypes.SHORT_TYPE.getName
-  val int = DataTypes.INT_TYPE.getName
-  val long = DataTypes.LONG_TYPE.getName
-  val float = DataTypes.FLOAT_TYPE.getName
-
-  val double = DataTypes.DOUBLE_TYPE.getName
-  val bigint = DataTypes.BIGINTEGER_TYPE.getName
-  val bigdecimal = DataTypes.BIGDECIMAL_TYPE.getName
-  val date = DataTypes.DATE_TYPE.getName
-  val string = DataTypes.STRING_TYPE.getName
-
-  def array(t : String) : String = {
-    DataTypes.arrayTypeName(t)
-  }
-
-  def map(kt : String, vt : String) : String = {
-    DataTypes.mapTypeName(kt, vt)
-  }
-
-  class Attr(ctx : Context, val name : String) {
-
-    private var dataTypeName : String = DataTypes.BOOLEAN_TYPE.getName
-    private var multiplicity: Multiplicity = Multiplicity.OPTIONAL
-    private var isComposite: Boolean = false
-    private var reverseAttributeName: String = null
-    private var isUnique: Boolean = false
-    private var isIndexable: Boolean = false
-
-    ctx.currentTypeAttrs += this
-
-    def getDef : AttributeDefinition =
-      new AttributeDefinition(name, dataTypeName,
-        multiplicity, isComposite, isUnique, isIndexable, reverseAttributeName)
-
-    def `~`(dT : String, options : AttrOption*) : Attr = {
-      dataTypeName = dT
-      options.foreach { o =>
-        o match {
-          case `required` => {multiplicity = Multiplicity.REQUIRED}
-          case `optional` => {multiplicity = Multiplicity.OPTIONAL}
-          case `collection` => {multiplicity = Multiplicity.COLLECTION}
-          case `set` => {multiplicity = Multiplicity.SET}
-          case `composite` => {isComposite = true}
-          case `unique` => {isUnique = true}
-          case `indexed` => {isIndexable = true}
-          case m : MultiplicityOption => {multiplicity = new 
Multiplicity(m.lower, m.upper, m.isUnique)}
-          case r : ReverseAttributeName => {reverseAttributeName = r.rNm}
-          case _ => ()
-        }
-      }
-      this
-    }
-
-  }
-
-}
-
-class TypesBuilder {
-
-  import org.apache.atlas.typesystem.builders.TypesBuilder.{Attr, Context}
-
-  val required = TypesBuilder.required
-  val optional = TypesBuilder.optional
-  val collection = TypesBuilder.collection
-  val set = TypesBuilder.set
-  val composite = TypesBuilder.composite
-  val unique = TypesBuilder.unique
-  val indexed = TypesBuilder.indexed
-  def multiplicty = TypesBuilder.multiplicty _
-  def reverseAttributeName = TypesBuilder.reverseAttributeName _
-
-  val boolean = TypesBuilder.boolean
-  val byte = TypesBuilder.byte
-  val short = TypesBuilder.short
-  val int = TypesBuilder.int
-  val long = TypesBuilder.long
-  val float = TypesBuilder.float
-
-  val double = TypesBuilder.double
-  val bigint = TypesBuilder.bigint
-  val bigdecimal = TypesBuilder.bigdecimal
-  val date = TypesBuilder.date
-  val string = TypesBuilder.string
-
-  def array = TypesBuilder.array _
-
-  def map = TypesBuilder.map _
-
-  val context = new DynamicVariable[Context](Context(new ArrayBuffer(),
-    new ArrayBuffer(),
-    new ArrayBuffer(),
-    new ArrayBuffer()))
-
-  implicit def strToAttr(s : String) = new Attr(context.value, s)
-
-  def types(f : => Unit ) : TypesDef = {
-    f
-    TypesDef(context.value.enums,
-      context.value.structs,
-      context.value.traits,
-      context.value.classes)
-  }
-
-  def _class(name : String, superTypes : List[String] = List())(f : => Unit): 
Unit = {
-    val attrs = new ArrayBuffer[Attr]()
-    context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
-    context.value.classes +=
-      TypesUtil.createClassTypeDef(name, 
ImmutableSet.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*)
-  }
-
-  def _trait(name : String, superTypes : List[String] = List())(f : => Unit): 
Unit = {
-    val attrs = new ArrayBuffer[Attr]()
-    context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
-    context.value.traits +=
-      TypesUtil.createTraitTypeDef(name, 
ImmutableSet.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*)
-    val v = context.value
-    v.traits.size
-  }
-
-  def struct(name : String)(f : => Unit): Unit = {
-    val attrs = new ArrayBuffer[Attr]()
-    context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
-    context.value.structs +=
-      new StructTypeDefinition(name, attrs.map(_.getDef).toArray)
-  }
-
-  def enum(name : String, values : String*) : Unit = {
-    val enums = values.zipWithIndex.map{ case (v, i) =>
-        new EnumValue(v,i)
-    }
-    context.value.enums +=
-      TypesUtil.createEnumTypeDef(name, enums:_*)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
 
b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
deleted file mode 100755
index d4bed75..0000000
--- 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import java.text.SimpleDateFormat
-
-import org.apache.atlas.typesystem._
-import org.apache.atlas.typesystem.persistence.{AtlasSystemAttributes, Id}
-import org.apache.atlas.typesystem.types._
-import org.json4s._
-import org.json4s.native.Serialization._
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConverters._
-import java.util.Date
-
-object InstanceSerialization {
-
-  case class _Id(id : String, version : Int, typeName : String, state : 
Option[String])
-  case class _AtlasSystemAttributes(createdBy: Option[String], modifiedBy: 
Option[String], createdTime: Option[Date], modifiedTime: Option[Date])
-  case class _Struct(typeName : String, values : Map[String, AnyRef])
-  case class _Reference(id : Option[_Id],
-                        typeName : String,
-                        values : Map[String, AnyRef],
-                        traitNames : List[String],
-                        traits : Map[String, _Struct],
-                        systemAttributes : Option[_AtlasSystemAttributes])
-
-  def Try[B](x : => B) : Option[B] = {
-    try { Some(x) } catch { case _ : Throwable => None }
-  }
-
-  /**
-   * Convert a Map into
-   * - a Reference or
-   * - a Struct or
-   * - a Id or
-   * - a Java Map whose values are recursively converted.
-   * @param jsonMap
-   * @param format
-   */
-  class InstanceJavaConversion(jsonMap : Map[String, _], format : Formats) {
-
-    /**
-     * For Id, Map must contain the [[_Id]] 'typeHint'
-     * @return
-     */
-    def idClass: Option[String] = {
-      jsonMap.get(format.typeHintFieldName).flatMap(x => 
Try(x.asInstanceOf[String])).
-        filter(s => s == classOf[_Id].getName)
-    }
-
-    /**
-     * validate and extract 'id' attribute from Map
-     * @return
-     */
-    def id: Option[String] = {
-      jsonMap.get("id").filter(_.isInstanceOf[String]).flatMap(v => 
Some(v.asInstanceOf[String]))
-    }
-
-    def createdBy: Option[String] = {
-      jsonMap.get("createdBy").filter(_.isInstanceOf[String]).flatMap(v => 
Some(v.asInstanceOf[String]))
-    }
-
-    def modifiedBy: Option[String] = {
-      jsonMap.get("modifiedBy").filter(_.isInstanceOf[String]).flatMap(v => 
Some(v.asInstanceOf[String]))
-    }
-
-    /**
-     * validate and extract 'state' attribute from Map
-     * @return
-     */
-    def state: Option[String] = {
-      jsonMap.get("state").filter(_.isInstanceOf[String]).flatMap(v => 
Some(v.asInstanceOf[String]))
-    }
-
-    /**
-     * validate and extract 'version' attribute from Map
-     * @return
-     */
-    def version: Option[Int] = {
-      jsonMap.get("version").flatMap{
-        case i : Int => Some(i)
-        case bI : BigInt => Some(bI.toInt)
-        case _ => None
-      }
-    }
-
-    def createdTime: Option[Date] = {
-      jsonMap.get("createdTime").filter(_.isInstanceOf[String]).flatMap(v => 
Some(v.asInstanceOf[Date]))
-    }
-
-    def modifiedTime: Option[Date] = {
-      jsonMap.get("modifiedTime").filter(_.isInstanceOf[String]).flatMap(v => 
Some(v.asInstanceOf[Date]))
-    }
-
-    /**
-     * A Map is an Id if:
-     * - it has the correct [[format.typeHintFieldName]]
-     * - it has a 'typeName'
-     * - it has an 'id'
-     * - it has a 'version'
-     * @return
-     */
-    def convertId : Option[_Id] = {
-      for {
-        refClass <- idClass
-        typNm <- typeName
-        i <- id
-        s <- Some(state)
-        v <- version
-      } yield _Id(i, v, typNm, s)
-    }
-
-    def convertSystemAttributes: Option[_AtlasSystemAttributes] = {
-      for {
-        c <- Some(createdBy)
-        m <- Some(modifiedBy)
-        c_t <- Some(createdTime)
-        m_t <- Some(modifiedTime)
-      } yield _AtlasSystemAttributes(c, m, c_t, m_t)
-    }
-
-    /**
-     * validate and extract 'typeName' attribute from Map
-     * @return
-     */
-    def typeName: Option[String] = {
-      jsonMap.get("typeName").flatMap(x => Try(x.asInstanceOf[String]))
-    }
-
-    /**
-     * For Reference, Map must contain the [[_Reference]] 'typeHint'
-     * @return
-     */
-    def referenceClass: Option[String] = {
-      jsonMap.get(format.typeHintFieldName).flatMap(x => 
Try(x.asInstanceOf[String])).
-        filter(s => s == classOf[_Reference].getName)
-    }
-
-    /**
-     * For Reference, Map must contain the [[_Struct]] 'typeHint'
-     * @return
-     */
-    def structureClass: Option[String] = {
-      jsonMap.get(format.typeHintFieldName).flatMap(x => 
Try(x.asInstanceOf[String])).
-        filter(s => s == classOf[_Struct].getName)
-    }
-
-    /**
-     * validate and extract 'values' attribute from Map
-     * @return
-     */
-    def valuesMap: Option[Map[String, AnyRef]] = {
-      jsonMap.get("values").flatMap(x => Try(x.asInstanceOf[Map[String, 
AnyRef]]))
-    }
-
-    /**
-     * validate and extract 'traitNames' attribute from Map
-     * @return
-     */
-    def traitNames: Option[Seq[String]] = {
-      jsonMap.get("traitNames").flatMap(x => Try(x.asInstanceOf[Seq[String]]))
-    }
-
-    /**
-     * A Map is an Struct if:
-     * - it has the correct [[format.typeHintFieldName]]
-     * - it has a 'typeName'
-     * - it has a 'values' attribute
-     * @return
-     */
-    def struct: Option[_Struct] = {
-      for {
-        refClass <- structureClass
-        typNm <- typeName
-        values <- valuesMap
-      } yield _Struct(typNm, values)
-    }
-
-    def sequence[A](a : List[(String,Option[A])]) : Option[List[(String,A)]] = 
a match {
-      case Nil => Some(Nil)
-      case h :: t => {
-        h._2 flatMap {hh => sequence(t) map { (h._1,hh) :: _}}
-      }
-    }
-
-    /**
-     * Extract and convert the traits in this Map.
-     *
-     * @return
-     */
-    def traits: Option[Map[String, _Struct]] = {
-
-      /**
-       * 1. validate and extract 'traitss' attribute from Map
-       * Must be a Map[String, _]
-       */
-      val tEntry : Option[Map[String, _]] = jsonMap.get("traits").flatMap(x => 
Try(x.asInstanceOf[Map[String, _]]))
-
-
-      /**
-       * Try to convert each entry in traits Map into a _Struct
-       * - each entry itself must be of type Map[String, _]
-       * - recursively call InstanceJavaConversion on this Map to convert to a 
struct
-       */
-      val x: Option[List[(String, Option[_Struct])]] = tEntry.map { tMap: 
Map[String, _] =>
-        val y: Map[String, Option[_Struct]] = tMap.map { t =>
-          val tObj: Option[_Struct] = Some(t._2).flatMap(x => 
Try(x.asInstanceOf[Map[String, _]])).
-            flatMap { traitObj: Map[String, _] =>
-            new InstanceJavaConversion(traitObj, format).struct
-          }
-          (t._1, tObj)
-        }
-        y.toList
-      }
-
-      /**
-       * Convert a List of Optional successes into an Option of List
-       */
-      x flatMap (sequence(_)) map (_.toMap)
-
-    }
-
-    def idObject : Option[_Id] = {
-      val idM = jsonMap.get("id").flatMap(x => Try(x.asInstanceOf[Map[String, 
_]]))
-      idM flatMap  (m => new InstanceJavaConversion(m, format).convertId)
-    }
-
-    /**
-     * A Map is an Reference if:
-     * - it has the correct [[format.typeHintFieldName]]
-     * - it has a 'typeName'
-     * - it has a 'values' attribute
-     * - it has 'traitNames' attribute
-     * - it has 'traits' attribute
-     * @return
-     */
-    def reference : Option[_Reference] = {
-      for {
-        refClass <- referenceClass
-        typNm <- typeName
-        i <- Some(idObject)
-        values <- valuesMap
-        traitNms <- traitNames
-        ts <- traits
-        s_attr <- Some(convertSystemAttributes)
-      } yield _Reference(i, typNm, values, traitNms.toList, ts, s_attr)
-    }
-
-    /**
-     * A Map converted to Java:
-     * - if Map can be materialized as a _Reference, materialize and then 
recursively call asJava on it.
-     * - if Map can be materialized as a _Struct, materialize and then 
recursively call asJava on it.
-     * - if Map can be materialized as a _Id, materialize and then recursively 
call asJava on it.
-     * - otherwise convert each value with asJava and construct as new JavaMap.
-     * @return
-     */
-    def convert : Any = {
-      reference.map(asJava(_)(format)).getOrElse {
-        struct.map(asJava(_)(format)).getOrElse {
-          convertId.map(asJava(_)(format)).getOrElse {
-            jsonMap.map { t =>
-              (t._1 -> asJava(t._2)(format))
-            }.asJava
-          }
-        }
-      }
-    }
-  }
-
-  def asJava(v : Any)(implicit format: Formats) : Any = v match {
-    case i : _Id => new Id(i.id, i.version, i.typeName, i.state.orNull)
-    case s : _Struct => new Struct(s.typeName, 
asJava(s.values).asInstanceOf[java.util.Map[String, Object]])
-    case s_attr : _AtlasSystemAttributes => new 
AtlasSystemAttributes(s_attr.createdBy.orNull, s_attr.modifiedBy.orNull, 
s_attr.createdTime.orNull, s_attr.modifiedTime.orNull)
-    case r : _Reference => {
-      val id = r.id match {
-        case Some(i) => new Id(i.id, i.version, i.typeName, i.state.orNull)
-        case None => new Id(r.typeName)
-      }
-
-      val s_attr = r.systemAttributes match {
-        case Some(s) => new AtlasSystemAttributes(s.createdBy.orNull, 
s.modifiedBy.orNull, s.createdTime.orNull, s.modifiedTime.orNull)
-        case None => new AtlasSystemAttributes()
-      }
-      new Referenceable(id,
-        r.typeName,
-        asJava(r.values).asInstanceOf[java.util.Map[String, Object]],
-        asJava(r.traitNames).asInstanceOf[java.util.List[String]],
-        asJava(r.traits).asInstanceOf[java.util.Map[String, IStruct]], s_attr)
-    }
-    case l : List[_] => l.map(e => asJava(e)).asJava
-    case m : Map[_, _] if Try{m.asInstanceOf[Map[String,_]]}.isDefined => {
-      if (m.keys.size == 2 && m.keys.contains("value") && 
m.keys.contains("ordinal")) {
-        new EnumValue(m.get("value").toString, 
m.get("ordinal").asInstanceOf[BigInt].intValue())
-      } else {
-        new InstanceJavaConversion(m.asInstanceOf[Map[String,_]], 
format).convert
-      }
-    }
-
-    case _ => v
-  }
-
-  def asScala(v : Any) : Any = v match {
-    case i : Id => _Id(i._getId(), i.getVersion, i.getClassName, 
Some(i.getStateAsString))
-    case s_attr: AtlasSystemAttributes => 
_AtlasSystemAttributes(Some(s_attr.createdBy), Some(s_attr.modifiedBy), 
Some(s_attr.createdTime), Some(s_attr.modifiedTime))
-    case r : IReferenceableInstance => {
-      val traits = r.getTraits.map { tName =>
-        val t = r.getTrait(tName).asInstanceOf[IStruct]
-        (tName -> _Struct(t.getTypeName, 
asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]]))
-      }.toMap
-      _Reference(Some(asScala(r.getId).asInstanceOf[_Id]),
-        r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, 
AnyRef]],
-        asScala(r.getTraits).asInstanceOf[List[String]],
-        traits.asInstanceOf[Map[String, _Struct]], 
Some(asScala(r.getSystemAttributes).asInstanceOf[_AtlasSystemAttributes]))
-    }
-    case s : IStruct => _Struct(s.getTypeName, 
asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
-    case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList
-    case m : java.util.Map[_, _] => m.asScala.map(t => (asScala(t._1), 
asScala(t._2))).toMap
-    case _ => v
-  }
-
-  val _formats = new DefaultFormats {
-    override val dateFormatter = 
TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat]
-    override val typeHints = FullTypeHints(List(classOf[_Id], 
classOf[_Struct], classOf[_Reference]))
-  }
-
-  def buildFormat(withBigDecimals : Boolean) = {
-    if (withBigDecimals)
-      _formats + new BigDecimalSerializer + new BigIntegerSerializer
-    else
-      _formats
-  }
-
-  def _toJson(value: AnyRef, withBigDecimals : Boolean = false): String = {
-    implicit val formats = buildFormat(withBigDecimals)
-
-    val _s : AnyRef = asScala(value).asInstanceOf[AnyRef]
-    write(_s)
-  }
-
-  def toJson(value: IStruct, withBigDecimals : Boolean = false): String = {
-    _toJson(value, withBigDecimals)
-  }
-
-  def fromJsonStruct(jsonStr: String, withBigDecimals : Boolean = false): 
Struct = {
-    implicit val formats = buildFormat(withBigDecimals)
-    val _s = read[_Struct](jsonStr)
-    asJava(_s).asInstanceOf[Struct]
-  }
-
-  //def toJsonReferenceable(value: Referenceable, withBigDecimals : Boolean = 
false): String = _toJson(value, withBigDecimals)
-  def fromJsonReferenceable(jsonStr: String, withBigDecimals : Boolean = 
false): Referenceable = {
-    implicit val formats = buildFormat(withBigDecimals)
-    val _s = read[_Reference](jsonStr)
-    asJava(_s).asInstanceOf[Referenceable]
-  }
-}

http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
----------------------------------------------------------------------
diff --git 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
 
b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
deleted file mode 100755
index ca4dd7f..0000000
--- 
a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
+++ /dev/null
@@ -1,348 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import org.apache.atlas.typesystem._
-import org.apache.atlas.typesystem.persistence.{AtlasSystemAttributes, Id, 
ReferenceableInstance, StructInstance}
-import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, MapType, 
TypeCategory}
-import org.apache.atlas.typesystem.types._
-import org.json4s.JsonAST.JInt
-import org.json4s.{JsonAST, _}
-import org.json4s.native.Serialization._
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConverters._
-import java.util.Date
-
-class BigDecimalSerializer extends 
CustomSerializer[java.math.BigDecimal](format => (
-    {
-        case JDecimal(e) => e.bigDecimal
-    },
-    {
-        case e: java.math.BigDecimal => JDecimal(new BigDecimal(e))
-    }
-    ))
-
-class BigIntegerSerializer extends 
CustomSerializer[java.math.BigInteger](format => (
-    {
-        case JInt(e) => e.bigInteger
-    },
-    {
-        case e: java.math.BigInteger => JInt(new BigInt(e))
-    }
-    ))
-
-class IdSerializer extends CustomSerializer[Id](format => ( {
-    case JObject(JField("id", JInt(id)) ::
-        JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
-        JField("version", JInt(version)) :: Nil) => new Id(id.toLong, 
version.toInt, typeName)
-    case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(typeName)) ::
-        JField("id", JInt(id)) ::
-        JField("version", JInt(version)) :: Nil) => new Id(id.toLong, 
version.toInt, typeName)
-    case JObject(JField("id", JString(id)) ::
-        JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
-        JField("version", JString(version)) :: Nil) => new Id(id, 
version.toInt, typeName)
-    case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(typeName)) ::
-        JField("id", JString(id)) ::
-        JField("version", JString(version)) :: Nil) => new Id(id, 
version.toInt, typeName)
-
-}, {
-    case id: Id => JObject(JField("id", JString(id.id)),
-        JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.typeName)),
-        JField("version", JInt(id.version)))
-}
-    ))
-
-class TypedStructSerializer() extends Serializer[ITypedStruct] {
-
-    def deserialize(implicit format: Formats) = {
-        case (TypeInfo(clazz, ptype), json) if 
classOf[ITypedStruct].isAssignableFrom(clazz) => json match {
-            case JObject(fs) =>
-                val (typ, fields) = fs.partition(f => f._1 == 
Serialization.STRUCT_TYPE_FIELD_NAME)
-                val typName = typ(0)._2.asInstanceOf[JString].s
-                val sT = typSystem.getDataType(
-                    classOf[IConstructableType[IStruct, ITypedStruct]], 
typName).asInstanceOf[IConstructableType[IStruct, ITypedStruct]]
-                val s = sT.createInstance()
-                Serialization.deserializeFields(typSystem, sT, s, fields)
-                s
-            case x => throw new MappingException("Can't convert " + x + " to 
TypedStruct")
-        }
-
-    }
-
-    def typSystem = TypeSystem.getInstance()
-
-    /**
-     * Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
-     * match the builtin conversion for BigDecimal.
-     * See https://groups.google.com/forum/#!topic/scala-language/AFUamvxu68Q
-     */
-    //implicit def javaBigInteger2bigInt(x: java.math.BigInteger): BigInt = 
new BigInt(x)
-
-    def serialize(implicit format: Formats) = {
-        case e: ITypedStruct =>
-            val fields = Serialization.serializeFields(e)
-            JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(e.getTypeName)) :: fields)
-    }
-}
-
-class TypedReferenceableInstanceSerializer()
-    extends Serializer[ITypedReferenceableInstance] {
-
-    def deserialize(implicit format: Formats) = {
-        case (TypeInfo(clazz, ptype), json) if 
classOf[ITypedReferenceableInstance].isAssignableFrom(clazz) => json match {
-            case JObject(JField("id", JInt(id)) ::
-                JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(typeName)) ::
-                JField("version", JInt(version)) ::
-                JField("state", JString(state)) :: Nil) => new Id(id.toLong, 
version.toInt, typeName, state)
-            case JObject(JField("id", JString(id)) ::
-                JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(typeName)) ::
-                JField("version", JInt(version)) ::
-                JField("state", JString(state)) :: Nil) => new Id(id, 
version.toInt, typeName, state)
-            case JObject(fs) =>
-                var typField: Option[JField] = None
-                var idField: Option[JField] = None
-                var traitsField: Option[JField] = None
-                var sysAttrField: Option[JField] = None
-                var fields: List[JField] = Nil
-
-                fs.foreach { f: JField => f._1 match {
-                    case Serialization.STRUCT_TYPE_FIELD_NAME => typField = 
Some(f)
-                    case Serialization.ID_TYPE_FIELD_NAME => idField = Some(f)
-                    case Serialization.TRAIT_TYPE_FIELD_NAME => traitsField = 
Some(f)
-                    case Serialization.SYSTEM_ATTR_FIELD_NAME => sysAttrField 
= Some(f)
-                    case _ => fields = fields :+ f
-                }
-                }
-
-                var traitNames: List[String] = Nil
-
-                traitsField.map { t =>
-                    val tObj: JObject = t._2.asInstanceOf[JObject]
-                    tObj.obj.foreach { oTrait =>
-                        val tName: String = oTrait._1
-                        traitNames = traitNames :+ tName
-                    }
-                }
-
-                val typName = typField.get._2.asInstanceOf[JString].s
-                val sT = typSystem.getDataType(
-                    classOf[ClassType], typName).asInstanceOf[ClassType]
-                val id = Serialization.deserializeId(idField.get._2)
-                val s_attr = 
Serialization.deserializeSystemAttributes(sysAttrField.get._2)
-                val s = sT.createInstance(id, s_attr, traitNames: _*)
-                Serialization.deserializeFields(typSystem, sT, s, fields)
-
-                traitsField.map { t =>
-                    val tObj: JObject = t._2.asInstanceOf[JObject]
-                    tObj.obj.foreach { oTrait =>
-                        val tName: String = oTrait._1
-                        val traitJObj: JObject = 
oTrait._2.asInstanceOf[JObject]
-                        val traitObj = 
s.getTrait(tName).asInstanceOf[ITypedStruct]
-                        val tT = typSystem.getDataType(
-                            classOf[TraitType], 
traitObj.getTypeName).asInstanceOf[TraitType]
-                        val (tTyp, tFields) = traitJObj.obj.partition(f => 
f._1 == Serialization.STRUCT_TYPE_FIELD_NAME)
-                        Serialization.deserializeFields(typSystem, tT, 
traitObj, tFields)
-                    }
-                }
-
-                s
-            case x => throw new MappingException("Can't convert " + x + " to 
TypedStruct")
-        }
-
-    }
-
-    def typSystem = TypeSystem.getInstance()
-
-    def serialize(implicit format: Formats) = {
-        case id: Id => Serialization.serializeId(id)
-        case e: ITypedReferenceableInstance =>
-            val idJ = JField(Serialization.ID_TYPE_FIELD_NAME, 
Serialization.serializeId(e.getId))
-            val s_attrJ = JField(Serialization.SYSTEM_ATTR_FIELD_NAME, 
Serialization.serializeSystemAttributes(e.getSystemAttributes))
-            var fields = Serialization.serializeFields(e)
-            val traitsJ: List[JField] = e.getTraits.map(tName => JField(tName, 
Extraction.decompose(e.getTrait(tName)))).toList
-
-            fields = idJ :: s_attrJ :: fields
-            if (traitsJ.size > 0) {
-                fields = fields :+ JField(Serialization.TRAIT_TYPE_FIELD_NAME, 
JObject(traitsJ: _*))
-            }
-
-            JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(e.getTypeName)) :: fields)
-    }
-}
-
-
-object Serialization {
-    val STRUCT_TYPE_FIELD_NAME = "$typeName$"
-    val ID_TYPE_FIELD_NAME = "$id$"
-    val TRAIT_TYPE_FIELD_NAME = "$traits$"
-    val SYSTEM_ATTR_FIELD_NAME = "$systemAttributes$"
-
-    def extractList(lT: ArrayType, value: JArray)(implicit format: Formats): 
Any = {
-        val dT = lT.getElemType
-        value.arr.map(extract(dT, _)).asJava
-    }
-
-    def extractMap(mT: MapType, value: JObject)(implicit format: Formats): Any 
= {
-        val kT = mT.getKeyType
-        val vT = mT.getValueType
-        value.obj.map { f: JField => f._1 -> extract(vT, f._2)}.toMap.asJava
-    }
-
-    def extract(dT: IDataType[_], value: JValue)(implicit format: Formats): 
Any = value match {
-        case value: JBool => Extraction.extract[Boolean](value)
-        case value: JInt => Extraction.extract[Int](value)
-        case value: JDouble => Extraction.extract[Double](value)
-        case value: JDecimal => Extraction.extract[BigDecimal](value)
-        case value: JString => Extraction.extract[String](value)
-        case JNull => null
-        case value: JArray => extractList(dT.asInstanceOf[ArrayType], 
value.asInstanceOf[JArray])
-        case value: JObject if dT.getTypeCategory eq TypeCategory.MAP =>
-            extractMap(dT.asInstanceOf[MapType], value.asInstanceOf[JObject])
-        case value: JObject if ((dT.getTypeCategory eq TypeCategory.STRUCT) || 
(dT.getTypeCategory eq TypeCategory.TRAIT)) =>
-            Extraction.extract[ITypedStruct](value)
-        case value: JObject =>
-            Extraction.extract[ITypedReferenceableInstance](value)
-    }
-
-    def serializeId(id: Id) = JObject(JField("id", JString(id.id)),
-        JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.typeName)),
-        JField("version", JInt(id.version)), JField("state", 
JString(id.state.name())))
-
-
-    //Handling serialization issues with null values
-    //See https://github.com/json4s/json4s/issues/358
-    def parseString(s: Any) = s match {
-        case s:String => JString(s)
-        case s:Date => JString(s.toString)
-        case _ => JString("")
-    }
-
-    def serializeSystemAttributes(s_attr: AtlasSystemAttributes) = JObject(
-        JField("createdBy", parseString(s_attr.modifiedBy)),
-        JField("modifiedBy", parseString(s_attr.modifiedBy)),
-        JField("createdTime", parseString(s_attr.createdTime)),
-        JField("modifiedTime", parseString(s_attr.modifiedTime))
-    )
-
-    def serializeFields(e: ITypedInstance)(implicit format: Formats) = 
e.fieldMapping.fields.map {
-        case (fName, info) => {
-            var v = e.get(fName)
-            if (v != null && (info.dataType().getTypeCategory eq 
TypeCategory.MAP)) {
-                v = v.asInstanceOf[java.util.Map[_, _]].toMap
-            }
-
-            if (v != null && (info.dataType().getTypeCategory eq 
TypeCategory.CLASS) && !info.isComposite) {
-                v = v.asInstanceOf[IReferenceableInstance].getId
-            }
-
-            if (v != null && (info.dataType().getTypeCategory eq 
TypeCategory.ENUM)) {
-                v = v.asInstanceOf[EnumValue].value
-            }
-
-            JField(fName, Extraction.decompose(v))
-        }
-    }.toList.map(_.asInstanceOf[JField])
-
-    def deserializeFields[T <: ITypedInstance](typeSystem: TypeSystem,
-                                               sT: IConstructableType[_, T],
-                                               s: T, fields: 
List[JField])(implicit format: Formats)
-    = {
-        //MetadataService.setCurrentService(currentMdSvc)
-        fields.foreach { f =>
-            val fName = f._1
-            val fInfo = sT.fieldMapping.fields(fName)
-            if (fInfo != null) {
-                //println(fName)
-                var v = f._2
-                if (fInfo.dataType().getTypeCategory == TypeCategory.TRAIT ||
-                    fInfo.dataType().getTypeCategory == TypeCategory.STRUCT) {
-                    v = v match {
-                        case JObject(sFields) =>
-                            
JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, 
JString(fInfo.dataType.getName)) :: sFields)
-                        case x => x
-                    }
-                }
-                s.set(fName, Serialization.extract(fInfo.dataType(), v))
-            }
-        }
-    }
-
-    def deserializeId(value: JValue)(implicit format: Formats) = value match {
-        case JObject(JField("id", JInt(id)) ::
-            JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
-            JField("version", JInt(version)) ::
-            JField("state", JString(state)) :: Nil) => new Id(id.toLong, 
version.toInt, typeName, state)
-        case JObject(JField("id", JString(id)) ::
-            JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
-            JField("version", JInt(version)) ::
-            JField("state", JString(state)) :: Nil) => new Id(id, 
version.toInt, typeName, state)
-    }
-
-    def deserializeSystemAttributes(value: JValue)(implicit format : Formats) 
= value match {
-        case JObject(JField("createdBy", JString(createdBy))::
-            JField("modifiedBy", JString(modifiedBy))::
-            JField("createdTime", JString(createdTime))::
-            JField("modifiedTime", JString(modifiedTime))::Nil) => new 
AtlasSystemAttributes(createdBy, modifiedBy, createdTime, modifiedTime)
-    }
-
-    def toJson(value: ITypedReferenceableInstance): String = {
-        implicit val formats = 
org.json4s.native.Serialization.formats(NoTypeHints) + new 
TypedStructSerializer +
-            new TypedReferenceableInstanceSerializer + new 
BigDecimalSerializer + new BigIntegerSerializer
-
-        write(value)
-    }
-
-    def toJson(value: ITypedInstance): String = {
-      implicit val formats = 
org.json4s.native.Serialization.formats(NoTypeHints) + new 
TypedStructSerializer +
-        new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + 
new BigIntegerSerializer
-
-      write(value)
-    }
-
-    def toJsonPretty(value: ITypedReferenceableInstance): String = {
-        implicit val formats = 
org.json4s.native.Serialization.formats(NoTypeHints) + new 
TypedStructSerializer +
-            new TypedReferenceableInstanceSerializer + new 
BigDecimalSerializer + new BigIntegerSerializer
-
-        writePretty(value)
-    }
-
-    def fromJson(jsonStr: String): ITypedReferenceableInstance = {
-        implicit val formats = 
org.json4s.native.Serialization.formats(NoTypeHints) + new 
TypedStructSerializer +
-            new TypedReferenceableInstanceSerializer + new 
BigDecimalSerializer + new BigIntegerSerializer
-
-        read[ReferenceableInstance](jsonStr)
-    }
-
-    def traitFromJson(jsonStr: String): ITypedInstance = {
-      implicit val formats = 
org.json4s.native.Serialization.formats(NoTypeHints) + new 
TypedStructSerializer +
-        new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + 
new BigIntegerSerializer
-
-        read[StructInstance](jsonStr)
-    }
-
-    def arrayFromJson(jsonStr: String): ITypedInstance = {
-        implicit val formats = 
org.json4s.native.Serialization.formats(NoTypeHints) + new 
TypedStructSerializer +
-          new TypedReferenceableInstanceSerializer + new BigDecimalSerializer 
+ new BigIntegerSerializer
-
-        read[StructInstance](jsonStr)
-    }
-}
-
-

Reply via email to