ATLAS-2251: restored bunch of deleted tests

Project: http://git-wip-us.apache.org/repos/asf/atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/atlas/commit/042fc557
Tree: http://git-wip-us.apache.org/repos/asf/atlas/tree/042fc557
Diff: http://git-wip-us.apache.org/repos/asf/atlas/diff/042fc557

Branch: refs/heads/ATLAS-2251
Commit: 042fc55755e17b3b0112a7a08d30c296300b95d9
Parents: 5f40541
Author: Madhan Neethiraj <mad...@apache.org>
Authored: Thu Nov 9 21:44:26 2017 -0800
Committer: Madhan Neethiraj <mad...@apache.org>
Committed: Thu Nov 9 21:44:26 2017 -0800

----------------------------------------------------------------------
 .../org/apache/atlas/query/QueryParser.java     |   11 +-
 .../impexp/AtlasImportRequestTest.java          |  145 +++
 .../repository/impexp/ExportServiceTest.java    |  358 ++++++
 .../repository/impexp/ImportServiceTest.java    |  224 ++++
 .../impexp/ImportServiceTestUtils.java          |  116 ++
 .../impexp/ImportTransformerJSONTest.java       |   47 +
 .../impexp/ImportTransformerTest.java           |  131 +++
 .../repository/impexp/ImportTransformsTest.java |  151 +++
 .../impexp/TypeAttributeDifferenceTest.java     |  150 +++
 .../atlas/repository/impexp/UniqueListTest.java |   69 ++
 .../impexp/ZipFileResourceTestUtils.java        |  220 ++++
 .../atlas/repository/impexp/ZipSinkTest.java    |  210 ++++
 .../atlas/repository/impexp/ZipSourceTest.java  |  189 ++++
 .../store/graph/AtlasTypeDefGraphStoreTest.java |  644 +++++++++++
 .../graph/v1/AtlasEntityDefStoreV1Test.java     |   73 ++
 ...AtlasEntityStoreV1BulkImportPercentTest.java |  167 +++
 .../store/graph/v1/AtlasEntityStoreV1Test.java  | 1043 ++++++++++++++++++
 .../v1/AtlasRelationshipDefStoreV1Test.java     |  330 ++++++
 .../AtlasRelationshipStoreHardDeleteV1Test.java |  112 ++
 .../AtlasRelationshipStoreSoftDeleteV1Test.java |  116 ++
 .../graph/v1/AtlasRelationshipStoreV1Test.java  |  623 +++++++++++
 .../InverseReferenceUpdateHardDeleteV1Test.java |   75 ++
 .../InverseReferenceUpdateSoftDeleteV1Test.java |   78 ++
 .../graph/v1/InverseReferenceUpdateV1Test.java  |  374 +++++++
 .../userprofile/UserProfileServiceTest.java     |  280 +++++
 .../atlas/services/MetricsServiceTest.java      |  123 +++
 .../utils/ObjectUpdateSynchronizerTest.java     |  218 ++++
 .../org/apache/atlas/util/RestUtilsTest.java    |  195 ++++
 28 files changed, 6471 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/main/java/org/apache/atlas/query/QueryParser.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/QueryParser.java 
b/repository/src/main/java/org/apache/atlas/query/QueryParser.java
index be2d9b1..1e5e5ff 100644
--- a/repository/src/main/java/org/apache/atlas/query/QueryParser.java
+++ b/repository/src/main/java/org/apache/atlas/query/QueryParser.java
@@ -19,11 +19,20 @@ package org.apache.atlas.query;
 
 import org.apache.atlas.query.Expressions.Expression;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
 
 
 public class QueryParser {
+    private static final Set<String> RESERVED_KEYWORDS =
+            new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", 
"!=", "<=", ">=", ",", "and", "or", "+", "-",
+                                        "*", "/", ".", "select", "from", 
"where", "groupby", "loop", "isa", "is", "has",
+                                        "as", "times", "withPath", "limit", 
"offset", "orderby", "count", "max", "min",
+                                        "sum", "by", "order", "like"));
+
     public static boolean isKeyword(String word) {
-        return false; // TODO:
+        return RESERVED_KEYWORDS.contains(word);
     }
 
     public static Expression apply(String queryStr, QueryParams params) {

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/AtlasImportRequestTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/AtlasImportRequestTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/AtlasImportRequestTest.java
new file mode 100644
index 0000000..b70b181
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/AtlasImportRequestTest.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.type.AtlasType;
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.*;
+
+public class AtlasImportRequestTest {
+    @Test
+    public void serializeAtlasImportRequstFromJsonWithEmptyOptions() {
+        String jsonData = "{ \"options\": {} }";
+
+        AtlasImportRequest request = AtlasType.fromJson(jsonData, 
AtlasImportRequest.class);
+
+        assertNotNull(request);
+        assertNotNull(request.getOptions());
+        
assertNull(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        ImportTransforms tr = 
ImportTransforms.fromJson(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        assertNull(tr);
+    }
+
+    @Test
+    public void serializeOptions_VerifyAccessors() {
+        String guid = "\"abcd\"";
+        String pos = "\"1\"";
+        String trueVal = "\"true\"";
+
+        String jsonData = "{ \"options\": " +
+                "               {" +
+                "\"startGuid\":" + guid + "," +
+                "\"startPosition\":" + pos + "," +
+                "\"updateTypeDefinition\":" + trueVal +
+                "}" +
+                "}";
+
+        AtlasImportRequest request = AtlasType.fromJson(jsonData, 
AtlasImportRequest.class);
+
+        assertNotNull(request);
+        assertNotNull(request.getStartGuid());
+        assertNotNull(request.getStartPosition());
+        assertNotNull(request.getUpdateTypeDefs());
+
+        assertEquals(request.getStartGuid(), guid.replace("\"", ""));
+        assertEquals(request.getStartPosition(), pos.replace("\"", ""));
+        assertEquals(request.getUpdateTypeDefs(), trueVal.replace("\"", ""));
+    }
+
+    @Test
+    public void optionsDefaultsTest() {
+        String jsonData = "{ \"options\": " +
+                "               {" +
+                    "}" +
+                "}";
+
+        AtlasImportRequest request = AtlasType.fromJson(jsonData, 
AtlasImportRequest.class);
+
+        assertNotNull(request);
+        assertNull(request.getStartGuid());
+        assertNull(request.getStartPosition());
+        assertNull(request.getUpdateTypeDefs());
+    }
+
+    @Test
+    public void serializeAtlasImportRequstFromJsonWithEmptyTransforms() {
+        String jsonData = "{ \"options\": { \"transforms\": \"{ }\" } }";
+
+        AtlasImportRequest request = AtlasType.fromJson(jsonData, 
AtlasImportRequest.class);
+
+        assertNotNull(request);
+        assertNotNull(request.getOptions());
+        
assertNotNull(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        ImportTransforms tr = 
ImportTransforms.fromJson(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        assertNotNull(tr);
+        assertNotNull(tr.getTransforms());
+        assertEquals(tr.getTransforms().size(), 0);
+    }
+
+    @Test
+    public void serializeAtlasImportRequstFromJsonWith1Transform() {
+        String jsonData = "{ \"options\": { \"transforms\": \"{ 
\\\"hive_db\\\": { \\\"qualifiedName\\\": [ \\\"replace:@cl1:@cl2\\\" ] } }\" } 
}";
+
+        AtlasImportRequest request = AtlasType.fromJson(jsonData, 
AtlasImportRequest.class);
+
+        assertNotNull(request);
+        assertNotNull(request.getOptions());
+        
assertNotNull(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        ImportTransforms tr = 
ImportTransforms.fromJson(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        assertNotNull(tr);
+        assertNotNull(tr.getTransforms());
+        assertEquals(tr.getTransforms().size(), 1);
+        assertTrue(tr.getTransforms().containsKey("hive_db"));
+        assertEquals(tr.getTransforms("hive_db").entrySet().size(), 1);
+        assertTrue(tr.getTransforms("hive_db").containsKey("qualifiedName"));
+        assertEquals(tr.getTransforms("hive_db").get("qualifiedName").size(), 
1);
+    }
+
+    @Test
+    public void serializeAtlasImportRequstFromJson() {
+        String jsonData = "{ \"options\": { \"transforms\": \"{ 
\\\"hive_db\\\": { \\\"qualifiedName\\\": [ \\\"replace:@cl1:@cl2\\\" ] }, 
\\\"hive_table\\\": { \\\"qualifiedName\\\": [ \\\"lowercase\\\", 
\\\"replace:@cl1:@cl2\\\" ] } }\" } } }";
+
+        AtlasImportRequest request = AtlasType.fromJson(jsonData, 
AtlasImportRequest.class);
+
+        assertNotNull(request);
+        assertNotNull(request.getOptions());
+        
assertNotNull(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        ImportTransforms tr = 
ImportTransforms.fromJson(request.getOptions().get(AtlasImportRequest.TRANSFORMS_KEY));
+
+        assertNotNull(tr);
+        assertNotNull(tr.getTransforms());
+        assertEquals(tr.getTransforms().size(), 2);
+        assertTrue(tr.getTransforms().containsKey("hive_db"));
+        assertEquals(tr.getTransforms("hive_db").entrySet().size(), 1);
+        assertTrue(tr.getTransforms("hive_db").containsKey("qualifiedName"));
+        assertEquals(tr.getTransforms("hive_db").get("qualifiedName").size(), 
1);
+        assertTrue(tr.getTransforms().containsKey("hive_table"));
+        assertEquals(tr.getTransforms("hive_table").entrySet().size(), 1);
+        
assertTrue(tr.getTransforms("hive_table").containsKey("qualifiedName"));
+        
assertEquals(tr.getTransforms("hive_table").get("qualifiedName").size(), 2);
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
new file mode 100644
index 0000000..2a8bdfa
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
@@ -0,0 +1,358 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+
+import org.apache.atlas.RequestContextV1;
+import org.apache.atlas.TestModules;
+import org.apache.atlas.TestUtilsV2;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasExportRequest;
+import org.apache.atlas.model.impexp.AtlasExportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasObjectId;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.repository.graph.AtlasGraphProvider;
+import 
org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
+import org.apache.atlas.repository.store.graph.v1.AtlasEntityChangeNotifier;
+import org.apache.atlas.repository.store.graph.v1.AtlasEntityStoreV1;
+import org.apache.atlas.repository.store.graph.v1.AtlasEntityStream;
+import org.apache.atlas.repository.store.graph.v1.DeleteHandlerV1;
+import org.apache.atlas.repository.store.graph.v1.EntityGraphMapper;
+import org.apache.atlas.repository.store.graph.v1.SoftDeleteHandlerV1;
+import org.apache.atlas.store.AtlasTypeDefStore;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Guice;
+import org.testng.annotations.Test;
+
+import javax.inject.Inject;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.mockito.Mockito.mock;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
+
+@Guice(modules = TestModules.TestOnlyModule.class)
+public class ExportServiceTest {
+    private static final Logger LOG = 
LoggerFactory.getLogger(ExportServiceTest.class);
+
+    @Inject
+    AtlasTypeRegistry typeRegistry;
+
+    @Inject
+    private AtlasTypeDefStore typeDefStore;
+
+    @Inject
+    private EntityGraphMapper graphMapper;
+    @Inject
+    ExportService exportService;
+    private DeleteHandlerV1 deleteHandler = mock(SoftDeleteHandlerV1.class);;
+    private AtlasEntityChangeNotifier mockChangeNotifier = 
mock(AtlasEntityChangeNotifier.class);
+    private AtlasEntityStoreV1 entityStore;
+
+    @BeforeTest
+    public void setupTest() {
+        RequestContextV1.clear();
+        RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
+    }
+
+    @BeforeClass
+    public void setupSampleData() throws AtlasBaseException {
+        entityStore = new AtlasEntityStoreV1(deleteHandler, typeRegistry, 
mockChangeNotifier, graphMapper);;
+
+        AtlasTypesDef sampleTypes = TestUtilsV2.defineDeptEmployeeTypes();
+        AtlasTypesDef typesToCreate = 
AtlasTypeDefStoreInitializer.getTypesToCreate(sampleTypes, typeRegistry);
+
+        if (!typesToCreate.isEmpty()) {
+            typeDefStore.createTypesDef(typesToCreate);
+        }
+
+        AtlasEntity.AtlasEntitiesWithExtInfo  hrDept = 
TestUtilsV2.createDeptEg2();
+
+        AtlasEntityStream entityStream = new AtlasEntityStream(hrDept);
+        entityStore.createOrUpdate(entityStream, false);
+        LOG.debug("==> setupSampleData: ", 
AtlasEntity.dumpObjects(hrDept.getEntities(), null).toString());
+    }
+
+    @AfterClass
+    public void clear() {
+        AtlasGraphProvider.cleanup();
+    }
+
+    private AtlasExportRequest getRequestForFullFetch() {
+        AtlasExportRequest request = new AtlasExportRequest();
+
+        List<AtlasObjectId> itemsToExport = new ArrayList<>();
+        itemsToExport.add(new AtlasObjectId("hive_db", "qualifiedName", 
"default@cl1"));
+        request.setItemsToExport(itemsToExport);
+
+        return request;
+    }
+
+    private AtlasExportRequest getRequestForDept(boolean addFetchType, String 
fetchTypeValue, boolean addMatchType, String matchTypeValue) {
+        AtlasExportRequest request = new AtlasExportRequest();
+
+        List<AtlasObjectId> itemsToExport = new ArrayList<>();
+        itemsToExport.add(new AtlasObjectId("Department", "name", "hr"));
+        request.setItemsToExport(itemsToExport);
+
+        setOptionsMap(request, addFetchType, fetchTypeValue, addMatchType, 
matchTypeValue);
+        return request;
+    }
+
+    private AtlasExportRequest getRequestForEmployee() {
+        AtlasExportRequest request = new AtlasExportRequest();
+
+        List<AtlasObjectId> itemsToExport = new ArrayList<>();
+        itemsToExport.add(new AtlasObjectId("Employee", "name", "Max"));
+        request.setItemsToExport(itemsToExport);
+
+        setOptionsMap(request, true, "CONNECTED", false, "");
+        return request;
+    }
+
+    private void setOptionsMap(AtlasExportRequest request,
+                               boolean addFetchType, String fetchTypeValue, 
boolean addMatchType, String matchTypeValue) {
+        Map<String, Object> optionsMap = null;
+        if(addFetchType) {
+            if(optionsMap == null) {
+                optionsMap = new HashMap<>();
+            }
+
+            optionsMap.put("fetchType", fetchTypeValue);
+            request.setOptions(optionsMap);
+        }
+
+        if(addMatchType) {
+            if(optionsMap == null) {
+                optionsMap = new HashMap<>();
+            }
+
+            optionsMap.put("matchType", matchTypeValue);
+        }
+
+        if(optionsMap != null) {
+            request.setOptions(optionsMap);
+        }
+    }
+
+    private ZipSource runExportWithParameters(AtlasExportRequest request) 
throws AtlasBaseException, IOException {
+        final String requestingIP = "1.0.0.0";
+        final String hostName = "localhost";
+        final String userName = "admin";
+
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ZipSink zipSink = new ZipSink(baos);
+        AtlasExportResult result = exportService.run(zipSink, request, 
userName, hostName, requestingIP);
+
+        zipSink.close();
+
+        ByteArrayInputStream bis = new 
ByteArrayInputStream(baos.toByteArray());
+        ZipSource zipSource = new ZipSource(bis);
+        return zipSource;
+    }
+
+    @Test
+    public void exportType_Succeeds() throws AtlasBaseException, 
FileNotFoundException {
+        String requestingIP = "1.0.0.0";
+        String hostName = "root";
+
+        AtlasExportRequest request = getRequestForFullFetch();
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ZipSink zipSink = new ZipSink(baos);
+        AtlasExportResult result = exportService.run(zipSink, request, 
"admin", hostName, requestingIP);
+
+        assertNotNull(exportService);
+        assertEquals(result.getHostName(), hostName);
+        assertEquals(result.getClientIpAddress(), requestingIP);
+        assertEquals(request, result.getRequest());
+    }
+
+    @Test
+    public void requestingEntityNotFound_NoData() throws AtlasBaseException, 
IOException {
+        String requestingIP = "1.0.0.0";
+        String hostName = "root";
+
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        ZipSink zipSink = new ZipSink(baos);
+        AtlasExportResult result = exportService.run(
+                zipSink, getRequestForFullFetch(), "admin", hostName, 
requestingIP);
+
+        Assert.assertNull(result.getData());
+
+        ByteArrayInputStream bais = new 
ByteArrayInputStream(baos.toByteArray());
+        ZipSource zipSource = new ZipSource(bais);
+
+        assertNotNull(exportService);
+        assertNotNull(zipSource.getCreationOrder());
+        Assert.assertFalse(zipSource.hasNext());
+    }
+
+    @Test
+    public void requestingEntityFoundDefaultFetch_ContainsData() throws 
Exception {
+        ZipSource source = runExportWithParameters(
+                getRequestForDept(false, "", false, ""));
+        verifyExportForHrData(source);
+    }
+
+    @Test
+    public void requestingHrEntityWithMatchTypeContains_ContainsData() throws 
Exception {
+        ZipSource source = runExportWithParameters(
+                getRequestForDept(false, "", true, "CONTAINS"));
+        verifyExportForHrData(source);
+    }
+
+    @Test
+    public void requestingHrEntityWithMatchTypeEndsWith_ContainsData() throws 
Exception {
+        ZipSource source = runExportWithParameters(
+                getRequestForDept(false, "", true, "ENDSWITH"));
+        verifyExportForHrData(source);
+    }
+
+    @Test
+    public void requestingDeptEntityFoundFullFetch_ContainsData() throws 
Exception {
+        ZipSource source = runExportWithParameters(
+                getRequestForDept(true, "FULL", false, ""));
+        verifyExportForHrData(source);
+    }
+
+    @Test
+    public void requestingDeptEntityFoundConnectedFetch_ContainsData() throws 
Exception {
+        ZipSource source = runExportWithParameters(
+                getRequestForDept(true, "CONNECTED", false, ""));
+        verifyExportForHrDataForConnected(source);
+    }
+
+    @Test
+    public void requestingEmployeeEntityFoundConnectedFetch_ContainsData() 
throws Exception {
+        ZipSource zipSource = runExportWithParameters(getRequestForEmployee());
+        verifyExportForEmployeeData(zipSource);
+    }
+
+    @Test
+    public void verifyOverallStatus() throws Exception {
+
+//        ExportService service = new ExportService(typeRegistry);
+        assertEquals(AtlasExportResult.OperationStatus.FAIL, 
exportService.getOverallOperationStatus());
+
+        assertEquals(AtlasExportResult.OperationStatus.SUCCESS, 
exportService.getOverallOperationStatus(AtlasExportResult.OperationStatus.SUCCESS));
+
+        assertEquals(AtlasExportResult.OperationStatus.SUCCESS, 
exportService.getOverallOperationStatus(
+                                AtlasExportResult.OperationStatus.SUCCESS,
+                                AtlasExportResult.OperationStatus.SUCCESS,
+                                AtlasExportResult.OperationStatus.SUCCESS));
+
+        assertEquals(AtlasExportResult.OperationStatus.PARTIAL_SUCCESS, 
exportService.getOverallOperationStatus(
+                AtlasExportResult.OperationStatus.FAIL,
+                AtlasExportResult.OperationStatus.PARTIAL_SUCCESS,
+                AtlasExportResult.OperationStatus.SUCCESS));
+
+        assertEquals(AtlasExportResult.OperationStatus.PARTIAL_SUCCESS, 
exportService.getOverallOperationStatus(
+                AtlasExportResult.OperationStatus.FAIL,
+                AtlasExportResult.OperationStatus.FAIL,
+                AtlasExportResult.OperationStatus.PARTIAL_SUCCESS));
+
+        assertEquals(AtlasExportResult.OperationStatus.FAIL, 
exportService.getOverallOperationStatus(
+                AtlasExportResult.OperationStatus.FAIL,
+                AtlasExportResult.OperationStatus.FAIL,
+                AtlasExportResult.OperationStatus.FAIL));
+    }
+
+    @Test
+    public void requestingExportOfNonExistentEntity_ReturnsFailure() throws 
Exception {
+        AtlasExportRequest request = getRequestForEmployee();
+        tamperEmployeeRequest(request);
+        ZipSource zipSource = runExportWithParameters(request);
+
+        assertNotNull(zipSource.getCreationOrder());
+        assertEquals(zipSource.getCreationOrder().size(), 0);
+        assertEquals(AtlasExportResult.OperationStatus.FAIL, 
zipSource.getExportResult().getOperationStatus());
+    }
+
+    private void tamperEmployeeRequest(AtlasExportRequest request) {
+        AtlasObjectId objectId = request.getItemsToExport().get(0);
+        objectId.getUniqueAttributes().remove("name");
+        objectId.getUniqueAttributes().put("qualifiedName", "XXX@121");
+    }
+
+    private void verifyExportForEmployeeData(ZipSource zipSource) throws 
AtlasBaseException {
+        final List<String> expectedEntityTypes = Arrays.asList(new 
String[]{"Manager", "Employee", "Department"});
+
+        assertNotNull(zipSource.getCreationOrder());
+        assertEquals(zipSource.getCreationOrder().size(), 2);
+        assertTrue(zipSource.hasNext());
+
+        while (zipSource.hasNext()) {
+            AtlasEntity entity = zipSource.next();
+
+            assertNotNull(entity);
+            assertEquals(AtlasEntity.Status.ACTIVE, entity.getStatus());
+            assertTrue(expectedEntityTypes.contains(entity.getTypeName()));
+        }
+
+        verifyTypeDefs(zipSource);
+    }
+
+    private void verifyExportForHrData(ZipSource zipSource) throws 
IOException, AtlasBaseException {
+        assertNotNull(zipSource.getCreationOrder());
+        assertTrue(zipSource.getCreationOrder().size() == 1);
+        assertTrue(zipSource.hasNext());
+
+        AtlasEntity entity = zipSource.next();
+
+        assertNotNull(entity);
+        assertTrue(entity.getTypeName().equals("Department"));
+        assertEquals(entity.getStatus(), AtlasEntity.Status.ACTIVE);
+        verifyTypeDefs(zipSource);
+    }
+
+    private void verifyExportForHrDataForConnected(ZipSource zipSource) throws 
IOException, AtlasBaseException {
+        assertNotNull(zipSource.getCreationOrder());
+        assertTrue(zipSource.getCreationOrder().size() == 2);
+        assertTrue(zipSource.hasNext());
+
+        AtlasEntity entity = zipSource.next();
+
+        assertNotNull(entity);
+        assertTrue(entity.getTypeName().equals("Department"));
+        assertEquals(entity.getStatus(), AtlasEntity.Status.ACTIVE);
+        verifyTypeDefs(zipSource);
+    }
+
+    private void verifyTypeDefs(ZipSource zipSource) throws AtlasBaseException 
{
+        assertEquals(zipSource.getTypesDef().getEnumDefs().size(), 1);
+        assertEquals(zipSource.getTypesDef().getClassificationDefs().size(), 
0);
+        assertEquals(zipSource.getTypesDef().getStructDefs().size(), 1);
+        assertEquals(zipSource.getTypesDef().getEntityDefs().size(), 4);
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
new file mode 100644
index 0000000..b24774d
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTest.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import com.google.inject.Inject;
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.RequestContextV1;
+import org.apache.atlas.TestModules;
+import org.apache.atlas.TestUtilsV2;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.store.AtlasTypeDefStore;
+import org.apache.atlas.type.AtlasClassificationType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.ITestContext;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Guice;
+import org.testng.annotations.Test;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
+@Guice(modules = TestModules.TestOnlyModule.class)
+public class ImportServiceTest {
+    private static final Logger LOG = 
LoggerFactory.getLogger(ImportServiceTest.class);
+    private final ImportService importService;
+
+    @Inject
+    AtlasTypeRegistry typeRegistry;
+
+    @Inject
+    private AtlasTypeDefStore typeDefStore;
+
+    @Inject
+    public ImportServiceTest(ImportService importService) {
+        this.importService = importService;
+    }
+
+    @BeforeTest
+    public void setupTest() {
+        RequestContextV1.clear();
+        RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
+    }
+
+    @DataProvider(name = "sales")
+    public static Object[][] getDataFromQuickStart_v1_Sales(ITestContext 
context) throws IOException {
+        return getZipSource("sales-v1-full.zip");
+    }
+
+    @Test(dataProvider = "sales")
+    public void importDB1(ZipSource zipSource) throws AtlasBaseException, 
IOException {
+        loadBaseModel();
+        runAndVerifyQuickStart_v1_Import(importService, zipSource);
+    }
+
+    @DataProvider(name = "reporting")
+    public static Object[][] getDataFromReporting() throws IOException {
+        return getZipSource("reporting-v1-full.zip");
+    }
+
+    @Test(dataProvider = "reporting")
+    public void importDB2(ZipSource zipSource) throws AtlasBaseException, 
IOException {
+        loadBaseModel();
+        runAndVerifyQuickStart_v1_Import(importService, zipSource);
+    }
+
+    private void loadBaseModel() throws IOException, AtlasBaseException {
+        loadModelFromJson("0000-Area0/0010-base_model.json", typeDefStore, 
typeRegistry);
+    }
+
+    @DataProvider(name = "logging")
+    public static Object[][] getDataFromLogging(ITestContext context) throws 
IOException {
+        return getZipSource("logging-v1-full.zip");
+    }
+
+    @Test(dataProvider = "logging")
+    public void importDB3(ZipSource zipSource) throws AtlasBaseException, 
IOException {
+        loadBaseModel();
+        runAndVerifyQuickStart_v1_Import(importService, zipSource);
+    }
+
+    @DataProvider(name = "salesNewTypeAttrs")
+    public static Object[][] getDataFromSalesNewTypeAttrs(ITestContext 
context) throws IOException {
+        return getZipSource("salesNewTypeAttrs.zip");
+    }
+
+    @Test(dataProvider = "salesNewTypeAttrs", dependsOnMethods = "importDB1")
+    public void importDB4(ZipSource zipSource) throws AtlasBaseException, 
IOException {
+        loadBaseModel();
+        runImportWithParameters(importService, getDefaultImportRequest(), 
zipSource);
+    }
+
+    @DataProvider(name = "salesNewTypeAttrs-next")
+    public static Object[][] getDataFromSalesNewTypeAttrsNext(ITestContext 
context) throws IOException {
+        return getZipSource("salesNewTypeAttrs-next.zip");
+    }
+
+
+    @Test(dataProvider = "salesNewTypeAttrs-next", dependsOnMethods = 
"importDB4")
+    public void importDB5(ZipSource zipSource) throws AtlasBaseException, 
IOException {
+        final String newEnumDefName = "database_action";
+
+        assertNotNull(typeDefStore.getEnumDefByName(newEnumDefName));
+
+        AtlasImportRequest request = getDefaultImportRequest();
+        Map<String, String> options = new HashMap<>();
+        options.put("updateTypeDefinition", "false");
+        request.setOptions(options);
+
+        runImportWithParameters(importService, request, zipSource);
+        assertNotNull(typeDefStore.getEnumDefByName(newEnumDefName));
+        
assertEquals(typeDefStore.getEnumDefByName(newEnumDefName).getElementDefs().size(),
 4);
+    }
+
+    @Test(dataProvider = "salesNewTypeAttrs-next", dependsOnMethods = 
"importDB4")
+    public void importDB6(ZipSource zipSource) throws AtlasBaseException, 
IOException {
+        final String newEnumDefName = "database_action";
+
+        assertNotNull(typeDefStore.getEnumDefByName(newEnumDefName));
+
+        AtlasImportRequest request = getDefaultImportRequest();
+        Map<String, String> options = new HashMap<>();
+        options.put("updateTypeDefinition", "true");
+        request.setOptions(options);
+
+        runImportWithParameters(importService, request, zipSource);
+        assertNotNull(typeDefStore.getEnumDefByName(newEnumDefName));
+        
assertEquals(typeDefStore.getEnumDefByName(newEnumDefName).getElementDefs().size(),
 8);
+    }
+
+    @DataProvider(name = "ctas")
+    public static Object[][] getDataFromCtas(ITestContext context) throws 
IOException {
+        return getZipSource("ctas.zip");
+    }
+
+    @Test(dataProvider = "ctas")
+    public void importCTAS(ZipSource zipSource) throws IOException, 
AtlasBaseException {
+        loadBaseModel();
+        loadHiveModel();
+
+        runImportWithNoParameters(importService, zipSource);
+    }
+
+    @DataProvider(name = "hdfs_path1")
+    public static Object[][] getDataFromHdfsPath1(ITestContext context) throws 
IOException {
+        return getZipSource("hdfs_path1.zip");
+    }
+
+
+    @Test(dataProvider = "hdfs_path1", expectedExceptions = 
AtlasBaseException.class)
+    public void importHdfs_path1(ZipSource zipSource) throws IOException, 
AtlasBaseException {
+        loadBaseModel();
+        loadFsModel();
+        loadModelFromResourcesJson("tag1.json", typeDefStore, typeRegistry);
+
+        try {
+            runImportWithNoParameters(importService, zipSource);
+        } catch (AtlasBaseException e) {
+            assertEquals(e.getAtlasErrorCode(), 
AtlasErrorCode.INVALID_IMPORT_ATTRIBUTE_TYPE_CHANGED);
+            AtlasClassificationType tag1 = 
typeRegistry.getClassificationTypeByName("tag1");
+            assertNotNull(tag1);
+            assertEquals(tag1.getAllAttributes().size(), 2);
+            throw e;
+        }
+    }
+
+    @Test
+    public void importServiceProcessesIOException() {
+        ImportService importService = new ImportService(typeDefStore, 
typeRegistry, null);
+        AtlasImportRequest req = mock(AtlasImportRequest.class);
+
+        Answer<Map> answer = new Answer<Map>() {
+            @Override
+            public Map answer(InvocationOnMock invocationOnMock) throws 
Throwable {
+                throw new IOException("file is read only");
+            }
+        };
+
+        when(req.getFileName()).thenReturn("some-file.zip");
+        when(req.getOptions()).thenAnswer(answer);
+
+        try {
+            importService.run(req, "a", "b", "c");
+        }
+        catch (AtlasBaseException ex) {
+            assertEquals(ex.getAtlasErrorCode().getErrorCode(), 
AtlasErrorCode.INVALID_PARAMETERS.getErrorCode());
+        }
+    }
+
+    private void loadFsModel() throws IOException, AtlasBaseException {
+        loadModelFromJson("1000-Hadoop/1020-fs_model.json", typeDefStore, 
typeRegistry);
+    }
+
+    private void loadHiveModel() throws IOException, AtlasBaseException {
+        loadModelFromJson("1000-Hadoop/1030-hive_model.json", typeDefStore, 
typeRegistry);
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTestUtils.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTestUtils.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTestUtils.java
new file mode 100644
index 0000000..7289512
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportServiceTestUtils.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import com.google.common.collect.Sets;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasExportResult;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+import 
org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
+import org.apache.atlas.store.AtlasTypeDefStore;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.testng.Assert;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class ImportServiceTestUtils {
+
+    public static void verifyImportedEntities(List<String> creationOrder, 
List<String> processedEntities) {
+        Set<String> lhs = 
com.google.common.collect.Sets.newHashSet(creationOrder);
+        Set<String> rhs = 
com.google.common.collect.Sets.newHashSet(processedEntities);
+        Set<String> difference = Sets.difference(lhs, rhs);
+
+        Assert.assertNotNull(difference);
+        Assert.assertEquals(difference.size(), 0);
+    }
+
+    public static void verifyImportedMetrics(AtlasExportResult exportResult, 
AtlasImportResult importResult) {
+        Map<String, Integer> metricsForCompare = 
getImportMetricsForCompare(importResult);
+        for (Map.Entry<String, Integer> entry : 
exportResult.getMetrics().entrySet()) {
+            if(entry.getKey().startsWith("entity") == false ||
+                    entry.getKey().contains("withExtInfo") ||
+                    entry.getKey().contains("Column") ||
+                    entry.getKey().contains("StorageDesc")) continue;
+
+            Assert.assertTrue(metricsForCompare.containsKey(entry.getKey()));
+            Assert.assertEquals(entry.getValue(), 
metricsForCompare.get(entry.getKey()));
+        }
+    }
+
+    private static Map<String,Integer> 
getImportMetricsForCompare(AtlasImportResult result) {
+        Map<String, Integer> r = new HashMap<>();
+        for (Map.Entry<String, Integer> entry : 
result.getMetrics().entrySet()) {
+            r.put(entry.getKey().replace(":updated", "").replace(":created", 
""), entry.getValue());
+        }
+
+        return r;
+    }
+
+
+    public static void loadModelFromJson(String fileName, AtlasTypeDefStore 
typeDefStore, AtlasTypeRegistry typeRegistry) throws IOException, 
AtlasBaseException {
+        AtlasTypesDef typesFromJson = getAtlasTypesDefFromFile(fileName);
+        createTypesAsNeeded(typesFromJson, typeDefStore, typeRegistry);
+    }
+
+    private static void createTypesAsNeeded(AtlasTypesDef typesFromJson, 
AtlasTypeDefStore typeDefStore, AtlasTypeRegistry typeRegistry) throws 
AtlasBaseException {
+        AtlasTypesDef typesToCreate = 
AtlasTypeDefStoreInitializer.getTypesToCreate(typesFromJson, typeRegistry);
+
+        if (!typesToCreate.isEmpty()) {
+            typeDefStore.createTypesDef(typesToCreate);
+        }
+    }
+
+    private static AtlasTypesDef getAtlasTypesDefFromFile(String fileName) 
throws IOException {
+        String sampleTypes = ZipFileResourceTestUtils.getModelJson(fileName);
+        return AtlasType.fromJson(sampleTypes, AtlasTypesDef.class);
+    }
+
+    public static AtlasImportRequest getDefaultImportRequest() {
+        return new AtlasImportRequest();
+    }
+
+
+    public static AtlasImportResult runImportWithParameters(ImportService 
importService, AtlasImportRequest request, ZipSource source) throws 
AtlasBaseException, IOException {
+        final String requestingIP = "1.0.0.0";
+        final String hostName = "localhost";
+        final String userName = "admin";
+
+        AtlasImportResult result = importService.run(source, request, 
userName, hostName, requestingIP);
+        Assert.assertEquals(result.getOperationStatus(), 
AtlasImportResult.OperationStatus.SUCCESS);
+        return result;
+    }
+
+    public static void runAndVerifyQuickStart_v1_Import(ImportService 
importService, ZipSource zipSource) throws AtlasBaseException, IOException {
+        AtlasExportResult exportResult = zipSource.getExportResult();
+        List<String> creationOrder = zipSource.getCreationOrder();
+
+        AtlasImportRequest request = getDefaultImportRequest();
+        AtlasImportResult result = runImportWithParameters(importService, 
request, zipSource);
+
+        Assert.assertNotNull(result);
+        verifyImportedMetrics(exportResult, result);
+        verifyImportedEntities(creationOrder, result.getProcessedEntities());
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerJSONTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerJSONTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerJSONTest.java
new file mode 100644
index 0000000..7044e71
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerJSONTest.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+
+import static org.testng.Assert.*;
+
+public class ImportTransformerJSONTest {
+
+
+    @Test
+    public void createAtlasImportTransformFromJson() throws Exception {
+        String hiveTableType  = "hive_table";
+        String qualifiedName  = "qualifiedName";
+        String jsonTransforms = "{ \"hive_table\": { \"qualifiedName\":[ 
\"lowercase\", \"replace:@cl1:@cl2\" ] } }";
+
+        ImportTransforms transforms = 
ImportTransforms.fromJson(jsonTransforms);
+
+        assertNotNull(transforms);
+        assertEquals(transforms.getTransforms().entrySet().size(), 1);
+        
assertEquals(transforms.getTransforms().get(hiveTableType).entrySet().size(), 
1);
+        
assertEquals(transforms.getTransforms().get(hiveTableType).get(qualifiedName).size(),
 2);
+        
Assert.assertEquals(transforms.getTransforms().get(hiveTableType).get(qualifiedName).get(0).getTransformType(),
 "lowercase");
+        
assertEquals(transforms.getTransforms().get(hiveTableType).get(qualifiedName).get(1).getTransformType(),
 "replace");
+        
assertTrue(transforms.getTransforms().get(hiveTableType).get(qualifiedName).get(1)
 instanceof ImportTransformer.Replace);
+        
assertEquals(((ImportTransformer.Replace)transforms.getTransforms().get(hiveTableType).get(qualifiedName).get(1)).getToFindStr(),
 "@cl1");
+        
assertEquals(((ImportTransformer.Replace)transforms.getTransforms().get(hiveTableType).get(qualifiedName).get(1)).getReplaceStr(),
 "@cl2");
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerTest.java
new file mode 100644
index 0000000..7ce34c8
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformerTest.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.exception.AtlasBaseException;
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+public class ImportTransformerTest {
+
+    @Test
+    public void createWithCorrectParameters() throws AtlasBaseException, 
IllegalAccessException {
+        String param1 = "@cl1";
+        String param2 = "@cl2";
+
+        ImportTransformer e = 
ImportTransformer.getTransformer(String.format("%s:%s:%s", "replace", param1, 
param2));
+
+        assertTrue(e instanceof ImportTransformer.Replace);
+        assertEquals(((ImportTransformer.Replace)e).getToFindStr(), param1);
+        assertEquals(((ImportTransformer.Replace)e).getReplaceStr(), param2);
+    }
+
+    @Test
+    public void createSeveralWithCorrectParameters() throws 
AtlasBaseException, IllegalAccessException {
+        String param1 = "@cl1";
+        String param2 = "@cl2";
+
+        ImportTransformer e1 = 
ImportTransformer.getTransformer(String.format("%s:%s:%s", "replace", param1, 
param2));
+        ImportTransformer e2 = 
ImportTransformer.getTransformer(String.format("replace:tt1:tt2"));
+
+        assertTrue(e1 instanceof ImportTransformer.Replace);
+        assertEquals(((ImportTransformer.Replace)e1).getToFindStr(), param1);
+        assertEquals(((ImportTransformer.Replace)e1).getReplaceStr(), param2);
+
+        assertTrue(e2 instanceof ImportTransformer.Replace);
+        assertEquals(((ImportTransformer.Replace)e2).getToFindStr(), "tt1");
+        assertEquals(((ImportTransformer.Replace)e2).getReplaceStr(), "tt2");
+    }
+
+    @Test
+    public void createWithDefaultParameters() throws AtlasBaseException {
+        ImportTransformer e1 = 
ImportTransformer.getTransformer("replace:@cl1");
+        ImportTransformer e2 = ImportTransformer.getTransformer("replace");
+
+        assertTrue(e1 instanceof ImportTransformer.Replace);
+        assertEquals(((ImportTransformer.Replace)e1).getToFindStr(), "@cl1");
+        assertEquals(((ImportTransformer.Replace)e1).getReplaceStr(), "");
+
+        assertTrue(e2 instanceof ImportTransformer.Replace);
+        assertEquals(((ImportTransformer.Replace)e2).getToFindStr(), "");
+        assertEquals(((ImportTransformer.Replace)e2).getReplaceStr(), "");
+    }
+
+    @Test
+    public void applyLowercaseTransformer() throws AtlasBaseException {
+        ImportTransformer e = ImportTransformer.getTransformer("lowercase");
+
+        assertEquals(e.apply("@CL1"), "@cl1");
+        assertEquals(e.apply("@cl1"), "@cl1");
+        assertEquals(e.apply(""), ""); // empty string
+        assertEquals(e.apply(null), null); // null value: no change
+        assertEquals(e.apply(Integer.valueOf(5)), Integer.valueOf(5)); // 
non-string value: no change
+    }
+
+    @Test
+    public void applyUppercaseTransformer() throws AtlasBaseException {
+        ImportTransformer e = ImportTransformer.getTransformer("uppercase");
+
+        assertEquals(e.apply("@CL1"), "@CL1");
+        assertEquals(e.apply("@cl1"), "@CL1");
+        assertEquals(e.apply(""), ""); // empty string
+        assertEquals(e.apply(null), null); // null value: no change
+        assertEquals(e.apply(Integer.valueOf(5)), Integer.valueOf(5)); // 
non-string value: no change
+    }
+
+    @Test
+    public void applyReplaceTransformer1() throws AtlasBaseException {
+        ImportTransformer e = 
ImportTransformer.getTransformer("replace:@cl1:@cl2");
+
+        assertEquals(e.apply("@cl1"), "@cl2");
+        assertEquals(e.apply("default@cl1"), "default@cl2");
+        assertEquals(e.apply("@cl11"), "@cl21");
+        assertEquals(e.apply("@cl2"), "@cl2");
+        assertEquals(e.apply(""), ""); // empty string
+        assertEquals(e.apply(null), null); // null value
+        assertEquals(e.apply(Integer.valueOf(5)), Integer.valueOf(5)); // 
non-string value: no change
+    }
+
+    @Test
+    public void applyReplaceTransformer2() throws AtlasBaseException {
+        ImportTransformer e = ImportTransformer.getTransformer("replace:@cl1");
+
+        assertEquals(e.apply("@cl1"), "");
+        assertEquals(e.apply("default@cl1"), "default");
+        assertEquals(e.apply("@cl11"), "1");
+        assertEquals(e.apply("@cl2"), "@cl2");
+        assertEquals(e.apply(""), ""); // empty string
+        assertEquals(e.apply(null), null); // null value
+        assertEquals(e.apply(Integer.valueOf(5)), Integer.valueOf(5)); // 
non-string value: no change
+    }
+
+    @Test
+    public void applyReplaceTransformer3() throws AtlasBaseException {
+        ImportTransformer e = ImportTransformer.getTransformer("replace");
+
+        assertEquals(e.apply("@cl1"), "@cl1");
+        assertEquals(e.apply("default@cl1"), "default@cl1");
+        assertEquals(e.apply("@cl11"), "@cl11");
+        assertEquals(e.apply("@cl2"), "@cl2");
+        assertEquals(e.apply(""), ""); // empty string
+        assertEquals(e.apply(null), null); // null value
+        assertEquals(e.apply(Integer.valueOf(5)), Integer.valueOf(5)); // 
non-string value: no change
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformsTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformsTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformsTest.java
new file mode 100644
index 0000000..ccedeb1
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ImportTransformsTest.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
+public class ImportTransformsTest {
+    private final String qualifiedName  = "qualifiedName";
+    private final String lowerCaseCL1   = "@cl1";
+    private final String lowerCaseCL2   = "@cl2";
+    private final String jsonTransforms = "{ \"hive_table\": { 
\"qualifiedName\":[ \"lowercase\", \"replace:@cl1:@cl2\" ] } }";
+
+    private ImportTransforms transform;
+
+    @BeforeTest
+    public void setup() throws AtlasBaseException {
+        transform = ImportTransforms.fromJson(jsonTransforms);
+    }
+
+    @Test
+    public void transformEntityWith2Transforms() throws AtlasBaseException {
+        AtlasEntity entity    = getHiveTableAtlasEntity();
+        String      attrValue = (String) entity.getAttribute(qualifiedName);
+
+        transform.apply(entity);
+
+        assertEquals(entity.getAttribute(qualifiedName), 
applyDefaultTransform(attrValue));
+    }
+
+    @Test
+    public void transformEntityWithExtInfo() throws AtlasBaseException {
+        addColumnTransform(transform);
+
+        AtlasEntityWithExtInfo entityWithExtInfo = getAtlasEntityWithExtInfo();
+        AtlasEntity            entity            = 
entityWithExtInfo.getEntity();
+        String                 attrValue         = (String) 
entity.getAttribute(qualifiedName);
+        String[]               expectedValues    = 
getExtEntityExpectedValues(entityWithExtInfo);
+
+        transform.apply(entityWithExtInfo);
+
+        
assertEquals(entityWithExtInfo.getEntity().getAttribute(qualifiedName), 
applyDefaultTransform(attrValue));
+
+        for (int i = 0; i < expectedValues.length; i++) {
+            
assertEquals(entityWithExtInfo.getReferredEntities().get(Integer.toString(i)).getAttribute(qualifiedName),
 expectedValues[i]);
+        }
+    }
+
+    @Test
+    public void transformEntityWithExtInfoNullCheck() throws 
AtlasBaseException {
+        addColumnTransform(transform);
+
+        AtlasEntityWithExtInfo entityWithExtInfo = getAtlasEntityWithExtInfo();
+
+        entityWithExtInfo.setReferredEntities(null);
+
+        AtlasEntityWithExtInfo transformedEntityWithExtInfo = 
transform.apply(entityWithExtInfo);
+
+        assertNotNull(transformedEntityWithExtInfo);
+        assertEquals(entityWithExtInfo.getEntity().getGuid(), 
transformedEntityWithExtInfo.getEntity().getGuid());
+    }
+
+    private String[] getExtEntityExpectedValues(AtlasEntityWithExtInfo 
entityWithExtInfo) {
+        String[] ret = new 
String[entityWithExtInfo.getReferredEntities().size()];
+
+        for (int i = 0; i < ret.length; i++) {
+            String attrValue = (String) 
entityWithExtInfo.getReferredEntities().get(Integer.toString(i)).getAttribute(qualifiedName);
+
+            ret[i] = attrValue.replace(lowerCaseCL1, lowerCaseCL2);
+        }
+
+        return ret;
+    }
+
+    private void addColumnTransform(ImportTransforms transform) throws 
AtlasBaseException {
+        Map<String, List<ImportTransformer>> tr     = new HashMap<>();
+        List<ImportTransformer>              trList = new ArrayList<>();
+
+        
trList.add(ImportTransformer.getTransformer(String.format("replace:%s:%s", 
lowerCaseCL1, lowerCaseCL2)));
+
+        tr.put(qualifiedName, trList);
+
+        transform.getTransforms().put("hive_column", tr);
+    }
+
+    private String applyDefaultTransform(String attrValue) {
+        return attrValue.toLowerCase().replace(lowerCaseCL1, lowerCaseCL2);
+    }
+
+    private AtlasEntity getHiveTableAtlasEntity() {
+        AtlasEntity entity = new AtlasEntity("hive_table");
+
+        Map<String, Object> attributes = new HashMap<>();
+        attributes.put(qualifiedName, "TABLE1.default" + lowerCaseCL1);
+        attributes.put("dbname", "someDB");
+        attributes.put("name", "somename");
+
+        entity.setAttributes(attributes);
+        return entity;
+    }
+
+    private AtlasEntity getHiveColumnAtlasEntity(int index) {
+        AtlasEntity entity = new AtlasEntity("hive_column");
+
+        Map<String, Object> attributes = new HashMap<>();
+        attributes.put(qualifiedName, 
String.format("col%s.TABLE1.default@cl1", index));
+        attributes.put("name", "col" + index);
+
+        entity.setAttributes(attributes);
+        return entity;
+    }
+
+    private AtlasEntityWithExtInfo getAtlasEntityWithExtInfo() {
+        AtlasEntityWithExtInfo ret = new 
AtlasEntityWithExtInfo(getHiveTableAtlasEntity());
+
+        Map<String, AtlasEntity> referredEntities = new HashMap<>();
+        referredEntities.put("0", getHiveColumnAtlasEntity(1));
+        referredEntities.put("1", getHiveColumnAtlasEntity(2));
+        referredEntities.put("2", getHiveColumnAtlasEntity(3));
+
+        ret.setReferredEntities(referredEntities);
+
+        return ret;
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/TypeAttributeDifferenceTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/TypeAttributeDifferenceTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/TypeAttributeDifferenceTest.java
new file mode 100644
index 0000000..87cb1d4
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/TypeAttributeDifferenceTest.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.model.typedef.AtlasEntityDef;
+import org.apache.atlas.model.typedef.AtlasEnumDef;
+import org.apache.atlas.model.typedef.AtlasStructDef;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class TypeAttributeDifferenceTest {
+    private TypeAttributeDifference typeAttributeDifference;
+
+    @BeforeClass
+    public void setup() {
+        typeAttributeDifference = new TypeAttributeDifference(null, null);
+    }
+
+    private List<AtlasEnumDef.AtlasEnumElementDef> getEnumElementDefs(int 
startIndex, String... names) {
+        int i = startIndex;
+        List<AtlasEnumDef.AtlasEnumElementDef> list = new ArrayList<>();
+        for (String s: names) {
+            list.add(new AtlasEnumDef.AtlasEnumElementDef(s, s, i++));
+        }
+
+        return list;
+    }
+
+    private List<AtlasStructDef.AtlasAttributeDef> 
getAtlasAttributeDefs(String... names) {
+        List<AtlasStructDef.AtlasAttributeDef> list = new ArrayList<>();
+        for (String s : names) {
+            list.add(new AtlasStructDef.AtlasAttributeDef(s, 
AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+        }
+
+        return list;
+    }
+
+    private AtlasEntityDef getAtlasEntityDefWithAttributes(String... 
attributeNames) {
+        AtlasEntityDef e = new AtlasEntityDef();
+        for (AtlasStructDef.AtlasAttributeDef a : 
getAtlasAttributeDefs(attributeNames)) {
+            e.addAttribute(a);
+        }
+
+        return e;
+    }
+
+    @Test
+    public void entityDefWithNoAttributes() throws Exception {
+        AtlasEntityDef existing = new AtlasEntityDef();
+        AtlasEntityDef incoming = new AtlasEntityDef();
+        List<AtlasStructDef.AtlasAttributeDef> expectedAttributes = new 
ArrayList<>();
+        List<AtlasStructDef.AtlasAttributeDef> actualAttributes = 
invokeGetAttributesAbsentInExisting(existing, incoming);
+
+        Assert.assertEquals(actualAttributes, expectedAttributes);
+    }
+
+    private List<AtlasStructDef.AtlasAttributeDef> 
invokeGetAttributesAbsentInExisting(AtlasStructDef existing, AtlasStructDef 
incoming) throws Exception {
+        return typeAttributeDifference.getElementsAbsentInExisting(existing, 
incoming);
+    }
+
+    private List<AtlasEnumDef.AtlasEnumElementDef> 
invokeGetAttributesAbsentInExisting(AtlasEnumDef existing, AtlasEnumDef 
incoming) throws Exception {
+        return typeAttributeDifference.getElementsAbsentInExisting(existing, 
incoming);
+    }
+
+    private AtlasEnumDef getAtlasEnumWithAttributes(String... elements) {
+        AtlasEnumDef enumDef = new AtlasEnumDef();
+        for (AtlasEnumDef.AtlasEnumElementDef ed : getEnumElementDefs(0, 
elements)) {
+            enumDef.addElement(ed);
+        }
+
+        return enumDef;
+    }
+
+    @Test
+    public void bothSame_DifferenceIsEmptyList() throws Exception {
+        AtlasEntityDef existing = getAtlasEntityDefWithAttributes("name", 
"qualifiedName");
+        AtlasEntityDef incoming = getAtlasEntityDefWithAttributes("name", 
"qualifiedName");
+
+        List<AtlasStructDef.AtlasAttributeDef> expectedAttributes = 
getAtlasAttributeDefs();
+        List<AtlasStructDef.AtlasAttributeDef> actualAttributes = 
invokeGetAttributesAbsentInExisting(existing, incoming);
+
+        Assert.assertEquals(actualAttributes, expectedAttributes);
+    }
+
+    @Test
+    public void different_ReturnsDifference() throws Exception {
+        AtlasEntityDef existing = getAtlasEntityDefWithAttributes("name");
+        AtlasEntityDef incoming = getAtlasEntityDefWithAttributes("name", 
"qualifiedName");
+        List<AtlasStructDef.AtlasAttributeDef> expectedAttributes = 
getAtlasAttributeDefs( "qualifiedName");
+
+        List<AtlasStructDef.AtlasAttributeDef> actualAttributes = 
invokeGetAttributesAbsentInExisting(existing, incoming);
+        Assert.assertEquals(actualAttributes, expectedAttributes);
+    }
+
+    @Test
+    public void differentSubset_ReturnsDifference() throws Exception {
+        AtlasEntityDef existing = getAtlasEntityDefWithAttributes("name", 
"qualifiedName");
+        AtlasEntityDef incoming = getAtlasEntityDefWithAttributes("name");
+        List<AtlasStructDef.AtlasAttributeDef> actualAttributes = 
invokeGetAttributesAbsentInExisting(existing, incoming);
+
+        List<AtlasStructDef.AtlasAttributeDef> expectedAttributes = 
getAtlasAttributeDefs();
+        Assert.assertEquals(actualAttributes, expectedAttributes);
+    }
+
+    @Test
+    public void differentEnumDef_ReturnsDifference () throws Exception {
+        AtlasEnumDef existing = getAtlasEnumWithAttributes("Alpha", "Bravo");
+        AtlasEnumDef incoming = getAtlasEnumWithAttributes("Alpha", "Bravo", 
"Delta", "Echo");
+        List<AtlasEnumDef.AtlasEnumElementDef> actualAttributes = 
invokeGetAttributesAbsentInExisting(existing, incoming);
+
+        List<AtlasEnumDef.AtlasEnumElementDef> expectedAttributes = 
getEnumElementDefs(2, "Delta", "Echo");
+        Assert.assertEquals(actualAttributes, expectedAttributes);
+    }
+
+    @Test
+    public void differentEnumDefs_ReturnsDifference () throws Exception {
+        AtlasEnumDef existing = getAtlasEnumWithAttributes("Alpha", "Bravo");
+        AtlasEnumDef incoming = getAtlasEnumWithAttributes("Alpha", "Bravo", 
"Delta", "Echo");
+        boolean ret = invokeUpdate(existing, incoming);
+
+        List<AtlasEnumDef.AtlasEnumElementDef> expectedAttributes = 
getEnumElementDefs(0, "Alpha", "Bravo", "Delta", "Echo");
+
+        Assert.assertTrue(ret, "Update took place");
+        Assert.assertEquals(existing.getElementDefs(), expectedAttributes);
+    }
+
+    private boolean invokeUpdate(AtlasEnumDef existing, AtlasEnumDef incoming) 
throws Exception {
+        return typeAttributeDifference.addElements(existing, incoming);
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/UniqueListTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/UniqueListTest.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/UniqueListTest.java
new file mode 100644
index 0000000..93aa518
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/UniqueListTest.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.assertEquals;
+
+public class UniqueListTest {
+    private final String firstElement = "firstElement";
+    private ExportService.UniqueList<String> uniqueList;
+
+    @BeforeClass
+    public void setup() {
+        uniqueList = new ExportService.UniqueList();
+        uniqueList.add(firstElement);
+        uniqueList.add("def");
+        uniqueList.add("firstElement");
+        uniqueList.add("ghi");
+    }
+
+    @Test
+    public void add3Elements_ListHas2() {
+        assertEquals(3, uniqueList.size());
+    }
+
+    @Test
+    public void addAllList_ListHas2() {
+        ExportService.UniqueList<String> uniqueList2 = new 
ExportService.UniqueList<>();
+        uniqueList2.addAll(uniqueList);
+
+        assertEquals(3, uniqueList2.size());
+    }
+
+    @Test
+    public void attemptClear_SizeIsZero() {
+        ExportService.UniqueList<String> uniqueList2 = new 
ExportService.UniqueList<>();
+        uniqueList2.addAll(uniqueList);
+        uniqueList2.clear();
+
+        assertEquals(0, uniqueList2.size());
+    }
+
+    @Test
+    public void attemptOneRemove_SizeIsReduced() {
+        ExportService.UniqueList<String> uniqueList2 = new 
ExportService.UniqueList<>();
+        uniqueList2.addAll(uniqueList);
+        String removedElement = uniqueList2.remove(0);
+
+        assertEquals(2, uniqueList2.size());
+        assertEquals(firstElement, removedElement);
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ZipFileResourceTestUtils.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ZipFileResourceTestUtils.java
 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ZipFileResourceTestUtils.java
new file mode 100644
index 0000000..5ab8c01
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ZipFileResourceTestUtils.java
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+import com.google.common.collect.Sets;
+import org.apache.atlas.RequestContextV1;
+import org.apache.atlas.TestUtilsV2;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasExportResult;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+import 
org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
+import org.apache.atlas.store.AtlasTypeDefStore;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.atlas.utils.TestResourceFileUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertFalse;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
+
+public class ZipFileResourceTestUtils {
+    public static final Logger LOG = 
LoggerFactory.getLogger(ZipFileResourceTestUtils.class);
+
+    public static FileInputStream getFileInputStream(String fileName) {
+        return TestResourceFileUtils.getFileInputStream(fileName);
+    }
+
+    public static String getModelJson(String fileName) throws IOException {
+        String  ret                 = null;
+        File   topModelsDir         = new File(System.getProperty("user.dir") 
+ "/../addons/models");
+        File[] topModelsDirContents = topModelsDir.exists() ? 
topModelsDir.listFiles() : null;
+
+        assertTrue(topModelsDirContents != null, 
topModelsDir.getAbsolutePath() + ": unable to find/read directory");
+        if(topModelsDirContents != null) {
+            Arrays.sort(topModelsDirContents);
+            for (File modelDir : topModelsDirContents) {
+                if (modelDir.exists() && modelDir.isDirectory()) {
+                    ret = getFileContents(modelDir, fileName);
+
+                    if (ret != null) {
+                        break;
+                    }
+                }
+            }
+
+            if (ret == null) {
+                ret = getFileContents(topModelsDir, fileName);
+            }
+
+            assertTrue(ret != null, fileName + ": unable to find model file");
+        } else {
+            throw new IOException("Unable to retrieve model contents.");
+        }
+
+        return ret;
+    }
+
+    public static String getFileContents(File dir, String fileName) throws 
IOException {
+        if (dir.exists() && dir.isDirectory()) {
+            File file = new File(dir, fileName);
+
+            if (file.exists() && file.isFile()) {
+                return FileUtils.readFileToString(file);
+            }
+        }
+
+        return null;
+    }
+
+    public static String getModelJsonFromResources(String fileName) throws 
IOException {
+        String filePath = TestResourceFileUtils.getTestFilePath(fileName);
+        File f = new File(filePath);
+        String s = FileUtils.readFileToString(f);
+        assertFalse(StringUtils.isEmpty(s), "Model file read correctly from 
resources!");
+
+        return s;
+    }
+
+    public static Object[][] getZipSource(String fileName) throws IOException {
+        FileInputStream fs = 
ZipFileResourceTestUtils.getFileInputStream(fileName);
+
+        return new Object[][]{{new ZipSource(fs)}};
+    }
+
+
+    public static void verifyImportedEntities(List<String> creationOrder, 
List<String> processedEntities) {
+        Set<String> lhs = 
com.google.common.collect.Sets.newHashSet(creationOrder);
+        Set<String> rhs = 
com.google.common.collect.Sets.newHashSet(processedEntities);
+        Set<String> difference = Sets.difference(lhs, rhs);
+
+        assertNotNull(difference);
+        assertEquals(difference.size(), 0);
+    }
+
+    public static void verifyImportedMetrics(AtlasExportResult exportResult, 
AtlasImportResult importResult) {
+        Map<String, Integer> metricsForCompare = 
getImportMetricsForCompare(importResult);
+        for (Map.Entry<String, Integer> entry : 
exportResult.getMetrics().entrySet()) {
+            if(entry.getKey().startsWith("entity") == false ||
+                    entry.getKey().contains("withExtInfo") ||
+                    entry.getKey().contains("Column") ||
+                    entry.getKey().contains("StorageDesc")) continue;
+
+            assertTrue(metricsForCompare.containsKey(entry.getKey()), 
entry.getKey());
+            assertEquals(entry.getValue(), 
metricsForCompare.get(entry.getKey()), entry.getKey());
+        }
+    }
+
+    private static Map<String,Integer> 
getImportMetricsForCompare(AtlasImportResult result) {
+        Map<String, Integer> r = new HashMap<>();
+        for (Map.Entry<String, Integer> entry : 
result.getMetrics().entrySet()) {
+            r.put(entry.getKey().replace(":updated", "").replace(":created", 
""), entry.getValue());
+        }
+
+        return r;
+    }
+
+
+    public static void loadModelFromJson(String fileName, AtlasTypeDefStore 
typeDefStore, AtlasTypeRegistry typeRegistry) throws IOException, 
AtlasBaseException {
+        AtlasTypesDef typesFromJson = getAtlasTypesDefFromFile(fileName);
+        createTypesAsNeeded(typesFromJson, typeDefStore, typeRegistry);
+    }
+
+    public static void loadModelFromResourcesJson(String fileName, 
AtlasTypeDefStore typeDefStore, AtlasTypeRegistry typeRegistry) throws 
IOException, AtlasBaseException {
+        AtlasTypesDef typesFromJson = 
getAtlasTypesDefFromResourceFile(fileName);
+        createTypesAsNeeded(typesFromJson, typeDefStore, typeRegistry);
+    }
+
+    private static void createTypesAsNeeded(AtlasTypesDef typesFromJson, 
AtlasTypeDefStore typeDefStore, AtlasTypeRegistry typeRegistry) throws 
AtlasBaseException {
+        if(typesFromJson == null) {
+            return;
+        }
+
+        AtlasTypesDef typesToCreate = 
AtlasTypeDefStoreInitializer.getTypesToCreate(typesFromJson, typeRegistry);
+        if (typesToCreate != null && !typesToCreate.isEmpty()) {
+            typeDefStore.createTypesDef(typesToCreate);
+        }
+    }
+
+    private static AtlasTypesDef getAtlasTypesDefFromFile(String fileName) 
throws IOException {
+        String sampleTypes = ZipFileResourceTestUtils.getModelJson(fileName);
+        if(sampleTypes == null) return null;
+        return AtlasType.fromJson(sampleTypes, AtlasTypesDef.class);
+    }
+
+    private static AtlasTypesDef getAtlasTypesDefFromResourceFile(String 
fileName) throws IOException {
+        String sampleTypes = getModelJsonFromResources(fileName);
+        return AtlasType.fromJson(sampleTypes, AtlasTypesDef.class);
+    }
+
+    public static AtlasImportRequest getDefaultImportRequest() {
+        return new AtlasImportRequest();
+    }
+
+
+    public static AtlasImportResult runImportWithParameters(ImportService 
importService, AtlasImportRequest request, ZipSource source) throws 
AtlasBaseException, IOException {
+        final String requestingIP = "1.0.0.0";
+        final String hostName = "localhost";
+        final String userName = "admin";
+
+        AtlasImportResult result = importService.run(source, request, 
userName, hostName, requestingIP);
+        assertEquals(result.getOperationStatus(), 
AtlasImportResult.OperationStatus.SUCCESS);
+        return result;
+    }
+
+    public static AtlasImportResult runImportWithNoParameters(ImportService 
importService, ZipSource source) throws AtlasBaseException, IOException {
+        final String requestingIP = "1.0.0.0";
+        final String hostName = "localhost";
+        final String userName = "admin";
+
+        AtlasImportResult result = importService.run(source, userName, 
hostName, requestingIP);
+        assertEquals(result.getOperationStatus(), 
AtlasImportResult.OperationStatus.SUCCESS);
+        return result;
+    }
+
+    public static void runAndVerifyQuickStart_v1_Import(ImportService 
importService, ZipSource zipSource) throws AtlasBaseException, IOException {
+        AtlasExportResult exportResult = zipSource.getExportResult();
+        List<String> creationOrder = zipSource.getCreationOrder();
+
+        RequestContextV1.clear();
+        RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
+
+        AtlasImportRequest request = getDefaultImportRequest();
+        AtlasImportResult result = runImportWithParameters(importService, 
request, zipSource);
+
+        assertNotNull(result);
+        verifyImportedMetrics(exportResult, result);
+        verifyImportedEntities(creationOrder, result.getProcessedEntities());
+    }
+}

http://git-wip-us.apache.org/repos/asf/atlas/blob/042fc557/repository/src/test/java/org/apache/atlas/repository/impexp/ZipSinkTest.java
----------------------------------------------------------------------
diff --git 
a/repository/src/test/java/org/apache/atlas/repository/impexp/ZipSinkTest.java 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ZipSinkTest.java
new file mode 100644
index 0000000..e8bbeb5
--- /dev/null
+++ 
b/repository/src/test/java/org/apache/atlas/repository/impexp/ZipSinkTest.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.repository.impexp;
+
+
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.impexp.AtlasExportRequest;
+import org.apache.atlas.model.impexp.AtlasExportResult;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasObjectId;
+import org.apache.atlas.type.AtlasType;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+import static org.testng.Assert.*;
+
+public class ZipSinkTest {
+    private ByteArrayOutputStream byteArrayOutputStream = new 
ByteArrayOutputStream();
+    private ZipSink zipSink;
+    private List<String> defaultExportOrder = new 
ArrayList<>(Arrays.asList("a", "b", "c", "d"));
+    private AtlasExportResult defaultExportResult;
+    private String knownEntityGuidFormat = "111-222-333-%s";
+
+
+    private void initZipSinkWithExportOrder() throws AtlasBaseException {
+        zipSink = new ZipSink(byteArrayOutputStream);
+        zipSink.setExportOrder(defaultExportOrder);
+        zipSink.close();
+    }
+
+    private AtlasExportResult getDefaultExportResult() {
+        AtlasExportRequest request = new AtlasExportRequest();
+
+        List<AtlasObjectId> itemsToExport = new ArrayList<>();
+        itemsToExport.add(new AtlasObjectId("hive_db", "qualifiedName", 
"default"));
+        request.setItemsToExport(itemsToExport);
+
+        defaultExportResult = new AtlasExportResult(request, "admin", 
"1.0.0.0", "root", 100);
+        return defaultExportResult;
+    }
+
+    private ZipInputStream getZipInputStreamForDefaultExportOrder() throws 
AtlasBaseException {
+        initZipSinkWithExportOrder();
+
+        ByteArrayInputStream bis = new 
ByteArrayInputStream(byteArrayOutputStream.toByteArray());
+        return new ZipInputStream(bis);
+    }
+
+    private String getZipEntryAsStream(ZipInputStream zis) throws IOException {
+        byte[] buf = new byte[1024];
+        int n = 0;
+        ByteArrayOutputStream bos = new ByteArrayOutputStream();
+        while ((n = zis.read(buf, 0, 1024)) > -1) {
+            bos.write(buf, 0, n);
+        }
+
+        Assert.assertNotNull(bos);
+        return bos.toString();
+    }
+
+    @Test
+    public void correctInit_succeeds() throws AtlasBaseException {
+        initZipSinkWithExportOrder();
+        assertTrue(true);
+        Assert.assertNotNull(zipSink);
+    }
+
+    @Test
+    public void zipWithExactlyOneEntry_succeeds() {
+
+        try {
+            ZipInputStream zis = getZipInputStreamForDefaultExportOrder();
+
+            try {
+                Assert.assertNotNull(zis.getNextEntry());
+                Assert.assertNull(zis.getNextEntry());
+            } catch (IOException e) {
+
+                assertTrue(false);
+            }
+        } catch (AtlasBaseException e) {
+
+            assertTrue(false, "No exception should be thrown.");
+        }
+    }
+
+    @Test
+    public void verifyExportOrderEntryName_verifies() throws 
AtlasBaseException, IOException {
+
+        ZipInputStream zis = getZipInputStreamForDefaultExportOrder();
+        ZipEntry ze = zis.getNextEntry();
+
+        assertEquals(ze.getName().replace(".json", ""), 
ZipExportFileNames.ATLAS_EXPORT_ORDER_NAME.toString());
+    }
+
+    @Test
+    public void zipWithExactlyOneEntry_ContentsVerified() throws 
AtlasBaseException, IOException {
+
+        ZipInputStream zis = getZipInputStreamForDefaultExportOrder();
+        zis.getNextEntry();
+
+        assertEquals(getZipEntryAsStream(zis).replace("\"", "'"), 
"['a','b','c','d']");
+    }
+
+    @Test
+    public void zipWithExactlyTwoEntries_ContentsVerified() throws 
AtlasBaseException, IOException {
+
+        ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+        useZipSinkToCreateEntries(byteOutputStream);
+
+        ByteArrayInputStream bis = new 
ByteArrayInputStream(byteOutputStream.toByteArray());
+        ZipInputStream zipStream = new ZipInputStream(bis);
+        ZipEntry entry = zipStream.getNextEntry();
+
+        assertEquals(getZipEntryAsStream(zipStream), 
"[\"a\",\"b\",\"c\",\"d\"]");
+        assertEquals(entry.getName().replace(".json", ""), 
ZipExportFileNames.ATLAS_EXPORT_ORDER_NAME.toString());
+
+        entry = zipStream.getNextEntry();
+        assertEquals(entry.getName().replace(".json", ""), 
ZipExportFileNames.ATLAS_EXPORT_INFO_NAME.toString());
+        assertTrue(compareJsonWithObject(getZipEntryAsStream(zipStream), 
defaultExportResult));
+    }
+
+    @Test
+    public void recordsEntityEntries() throws AtlasBaseException {
+        ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+        ZipSink zs = new ZipSink(byteOutputStream);
+
+        AtlasEntity entity = new AtlasEntity();
+        entity.setGuid(String.format(knownEntityGuidFormat, 0));
+
+        zs.add(entity);
+        assertTrue(zs.hasEntity(String.format(knownEntityGuidFormat, 0)));
+
+        zs.close();
+    }
+
+    @Test
+    public void recordsEntityWithExtInfoEntries() throws AtlasBaseException {
+        final int max_entries = 3;
+        ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+        ZipSink zs = new ZipSink(byteOutputStream);
+
+        AtlasEntity entity = new AtlasEntity();
+        entity.setGuid(String.format(knownEntityGuidFormat, 0));
+
+        AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = new 
AtlasEntity.AtlasEntityWithExtInfo(entity);
+        addReferredEntities(entityWithExtInfo, max_entries);
+
+        zs.add(entityWithExtInfo);
+        for (int i = 0; i <= max_entries; i++) {
+            String g = String.format(knownEntityGuidFormat, i);
+            assertTrue(zs.hasEntity(g));
+        }
+
+        zs.close();
+    }
+
+    private void addReferredEntities(AtlasEntity.AtlasEntityWithExtInfo 
entityWithExtInfo, int maxEntries) {
+
+        for (int i = 1; i <= maxEntries; i++) {
+            AtlasEntity entity1 = new AtlasEntity();
+            entity1.setGuid(String.format(knownEntityGuidFormat, i));
+            entityWithExtInfo.addReferredEntity(entity1);
+        }
+    }
+
+    @Test
+    public void recordsDoesNotRecordEntityEntries() throws AtlasBaseException {
+        initZipSinkWithExportOrder();
+
+        assertNotNull(zipSink);
+        
assertFalse(zipSink.hasEntity(ZipExportFileNames.ATLAS_EXPORT_ORDER_NAME.toString()));
+    }
+
+    private void useZipSinkToCreateEntries(ByteArrayOutputStream 
byteOutputStream) throws AtlasBaseException {
+        ZipSink zs = new ZipSink(byteOutputStream);
+        zs.setExportOrder(defaultExportOrder);
+        zs.setResult(getDefaultExportResult());
+        zs.close();
+    }
+
+    private boolean compareJsonWithObject(String s, AtlasExportResult 
defaultExportResult) {
+        String json = AtlasType.toJson(defaultExportResult);
+        return json.equals(s);
+    }
+}

Reply via email to