Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 a72dbc40a -> 8dbdbf66d


http://git-wip-us.apache.org/repos/asf/ambari/blob/8dbdbf66/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
new file mode 100644
index 0000000..08e8b81
--- /dev/null
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/AggregatorTest.java
@@ -0,0 +1,396 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.resources.jobs;
+
+import org.apache.ambari.view.hive.persistence.utils.FilteringStrategy;
+import org.apache.ambari.view.hive.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.hive.resources.IResourceManager;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.HiveQueryId;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.IATSParser;
+import org.apache.ambari.view.hive.resources.jobs.atsJobs.TezDagId;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.Job;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.*;
+
+public class AggregatorTest {
+
+  public static final String SOME_QUERY = "some query";
+
+  @Test
+  public void testReadJobOutsideOfHS2() throws Exception {
+    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
+    ensureOperationIdUnset(hiveQueryId);
+
+    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
+
+
+    Aggregator aggregator = new Aggregator(getEmptyJobResourceManager(),
+        getEmptyOperationHandleResourceManager(),
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("ENTITY-NAME", job.getId());
+    Assert.assertEquals(SOME_QUERY, job.getTitle());
+  }
+
+  @Test
+  public void testReadJobWithHS2OutsideOfView() throws Exception {
+    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
+    ensureOperationIdSet(hiveQueryId);
+
+    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
+    Aggregator aggregator = new Aggregator(getEmptyJobResourceManager(),
+        getEmptyOperationHandleResourceManager(),
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("ENTITY-NAME", job.getId());
+    Assert.assertEquals(SOME_QUERY, job.getTitle());
+  }
+
+  @Test
+  public void testJobWithoutOperationIdShouldBeIgnored() throws Exception {
+    MockJobResourceManager jobResourceManager = 
getJobResourceManagerWithJobs(getSampleViewJob("1"));
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        getEmptyOperationHandleResourceManager(),
+        getEmptyATSParser());
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(0, aggregated.size());
+  }
+
+  @Test
+  public void testReadJobOnlyInView() throws Exception {
+    MockJobResourceManager jobResourceManager = 
getJobResourceManagerWithJobs(getSampleViewJob("1"));
+
+    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
+    MockOperationHandleResourceManager operationHandleResourceManager = 
getOperationHandleRMWithEntities(operationHandle);
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        operationHandleResourceManager,
+        getEmptyATSParser());
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("1", job.getId());
+  }
+
+  private MockOperationHandleResourceManager 
getOperationHandleRMWithEntities(StoredOperationHandle... operationHandles) {
+    MockOperationHandleResourceManager operationHandleResourceManager = 
getEmptyOperationHandleResourceManager();
+    HashMap<String, StoredOperationHandle> storage = new HashMap<String, 
StoredOperationHandle>();
+    for (StoredOperationHandle handle : operationHandles) {
+      storage.put(handle.getJobId(), handle);
+    }
+    operationHandleResourceManager.setStorage(storage);
+    return operationHandleResourceManager;
+  }
+
+  @Test
+  public void testReadJobBothATSAndView() throws Exception {
+    HiveQueryId hiveQueryId = getSampleHiveQueryId("ENTITY-NAME");
+    hiveQueryId.operationId = Aggregator.hexStringToUrlSafeBase64("1b2b");
+    MockATSParser atsParser = getMockATSWithQueries(hiveQueryId);
+
+    MockJobResourceManager jobResourceManager = 
getJobResourceManagerWithJobs(getSampleViewJob("1"));
+
+    StoredOperationHandle operationHandle = getSampleOperationHandle("5", "1");
+    operationHandle.setGuid("1b2b");
+    MockOperationHandleResourceManager operationHandleResourceManager = 
getOperationHandleRMWithEntities(operationHandle);
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        operationHandleResourceManager,
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(1, aggregated.size());
+    Job job = aggregated.get(0);
+    Assert.assertEquals("1", job.getId());
+  }
+
+  @Test
+  public void testReadJobComplex() throws Exception {
+    //job both on ATS and View
+    HiveQueryId hiveQueryId1 = getSampleHiveQueryId("ENTITY-NAME");
+    hiveQueryId1.operationId = Aggregator.hexStringToUrlSafeBase64("1a1b");
+    Job job1 = getSampleViewJob("1");
+    StoredOperationHandle operationHandle1 = getSampleOperationHandle("5", 
"1");
+    operationHandle1.setGuid("1a1b");
+
+    //job only on ATS
+    HiveQueryId hiveQueryId2 = getSampleHiveQueryId("ENTITY-NAME2");
+    hiveQueryId2.operationId = Aggregator.hexStringToUrlSafeBase64("2a2a");
+
+    //job only in View
+    Job job3 = getSampleViewJob("3");
+    StoredOperationHandle operationHandle3 = getSampleOperationHandle("6", 
"3");
+    operationHandle3.setGuid("3c3d");
+
+
+    MockATSParser atsParser = getMockATSWithQueries(
+        hiveQueryId1, hiveQueryId2);
+    MockJobResourceManager jobResourceManager = getJobResourceManagerWithJobs(
+        job1, job3);
+    MockOperationHandleResourceManager operationHandleRM = 
getOperationHandleRMWithEntities(
+        operationHandle1, operationHandle3);
+
+    Aggregator aggregator = new Aggregator(jobResourceManager,
+        operationHandleRM,
+        atsParser);
+
+    List<Job> aggregated = aggregator.readAll("luke");
+
+    Assert.assertEquals(3, aggregated.size());
+  }
+
+  private MockJobResourceManager getJobResourceManagerWithJobs(Job... jobs) {
+    MockJobResourceManager jobResourceManager = getEmptyJobResourceManager();
+    jobResourceManager.setJobs(Arrays.asList(jobs));
+    return jobResourceManager;
+  }
+
+  private MockATSParser getEmptyATSParser() {
+    return new MockATSParser();
+  }
+
+  private void ensureOperationIdUnset(HiveQueryId hiveQueryId) {
+    hiveQueryId.operationId = null;
+  }
+
+  public void ensureOperationIdSet(HiveQueryId hiveQueryId) {
+    hiveQueryId.operationId = "operation-id";
+  }
+
+  private MockOperationHandleResourceManager 
getEmptyOperationHandleResourceManager() {
+    return new MockOperationHandleResourceManager();
+  }
+
+  private MockJobResourceManager getEmptyJobResourceManager() {
+    return new MockJobResourceManager();
+  }
+
+  private MockATSParser getMockATSWithQueries(HiveQueryId... hiveQueryIds) {
+    MockATSParser atsParser = getEmptyATSParser();
+    atsParser.setHiveQueryIds(Arrays.asList(hiveQueryIds));
+    return atsParser;
+  }
+
+  private JobImpl getSampleViewJob(String id) {
+    JobImpl job = new JobImpl();
+    job.setTitle("Test");
+    job.setId(id);
+    job.setOwner("luke");
+    return job;
+  }
+
+  private StoredOperationHandle getSampleOperationHandle(String id, String 
jobId) {
+    StoredOperationHandle opHandle = new StoredOperationHandle();
+    opHandle.setId(id);
+    opHandle.setJobId(jobId);
+    opHandle.setGuid("1b2b");
+    return opHandle;
+  }
+
+  private HiveQueryId getSampleHiveQueryId(String id) {
+    HiveQueryId hiveQueryId = new HiveQueryId();
+    hiveQueryId.entity = id;
+    hiveQueryId.query = SOME_QUERY;
+    hiveQueryId.user = "luke";
+    hiveQueryId.operationId = "fUjdt-VMRYuKRPCDTUr_rg";
+    hiveQueryId.dagNames = new LinkedList<String>();
+    return hiveQueryId;
+  }
+
+  @Test
+  public void testGetJobByOperationId() throws Exception {
+
+  }
+
+  @Test
+  public void testUrlSafeBase64ToHexString() throws Exception {
+    String urlSafe = Aggregator.hexStringToUrlSafeBase64("1a1b");
+    Assert.assertEquals("Ghs", urlSafe);
+  }
+
+  @Test
+  public void testHexStringToUrlSafeBase64() throws Exception {
+    String hex = Aggregator.urlSafeBase64ToHexString("Ghs");
+    Assert.assertEquals("1a1b", hex);
+  }
+
+  public static class MockJobResourceManager implements IResourceManager<Job> {
+
+    private List<Job> jobs = new LinkedList<Job>();
+
+    @Override
+    public Job create(Job object) {
+      return null;
+    }
+
+    @Override
+    public Job read(Object id) throws ItemNotFound {
+      for(Job job : jobs) {
+        if (job.getId().equals(id))
+          return job;
+      }
+      throw new ItemNotFound();
+    }
+
+    @Override
+    public List<Job> readAll(FilteringStrategy filteringStrategy) {
+      return jobs;
+    }
+
+    @Override
+    public Job update(Job newObject, String id) throws ItemNotFound {
+      return null;
+    }
+
+    @Override
+    public void delete(Object resourceId) throws ItemNotFound {
+
+    }
+
+    public List<Job> getJobs() {
+      return jobs;
+    }
+
+    public void setJobs(List<Job> jobs) {
+      this.jobs = jobs;
+    }
+  }
+
+  public static class MockOperationHandleResourceManager implements 
IOperationHandleResourceManager {
+    private HashMap<String, StoredOperationHandle> storage = new 
HashMap<String, StoredOperationHandle>();
+
+    public MockOperationHandleResourceManager() {
+
+    }
+
+    @Override
+    public List<StoredOperationHandle> readJobRelatedHandles(Job job) {
+      LinkedList<StoredOperationHandle> storedOperationHandles = new 
LinkedList<StoredOperationHandle>();
+      StoredOperationHandle operationHandle = storage.get(job.getId());
+      if (operationHandle != null)
+        storedOperationHandles.add(operationHandle);
+      return storedOperationHandles;
+    }
+
+    @Override
+    public void putHandleForJob(TOperationHandle h, Job job) {
+
+    }
+
+    @Override
+    public boolean containsHandleForJob(Job job) {
+      return false;
+    }
+
+    @Override
+    public TOperationHandle getHandleForJob(Job job) throws ItemNotFound {
+      List<StoredOperationHandle> handles = readJobRelatedHandles(job);
+      if (handles.size() == 0)
+        throw new ItemNotFound();
+      return handles.get(0).toTOperationHandle();
+    }
+
+    @Override
+    public StoredOperationHandle create(StoredOperationHandle object) {
+      return null;
+    }
+
+    @Override
+    public StoredOperationHandle read(Object id) throws ItemNotFound {
+      return null;
+    }
+
+    @Override
+    public List<StoredOperationHandle> readAll(FilteringStrategy 
filteringStrategy) {
+      LinkedList<StoredOperationHandle> storedOperationHandles = new 
LinkedList<StoredOperationHandle>();
+      for (StoredOperationHandle handle : storage.values()) {
+        if (filteringStrategy.isConform(handle))
+          storedOperationHandles.add(handle);
+      }
+      return storedOperationHandles;
+    }
+
+    @Override
+    public StoredOperationHandle update(StoredOperationHandle newObject, 
String id) throws ItemNotFound {
+      return null;
+    }
+
+    @Override
+    public void delete(Object resourceId) throws ItemNotFound {
+
+    }
+
+    public HashMap<String, StoredOperationHandle> getStorage() {
+      return storage;
+    }
+
+    public void setStorage(HashMap<String, StoredOperationHandle> storage) {
+      this.storage = storage;
+    }
+  }
+
+  public static class MockATSParser implements IATSParser {
+
+    private List<HiveQueryId> hiveQueryIds = new LinkedList<HiveQueryId>();
+
+    public MockATSParser() {
+    }
+
+    @Override
+    public List<HiveQueryId> getHiveQuieryIdsList(String username) {
+      return hiveQueryIds;
+    }
+
+    @Override
+    public HiveQueryId getHiveQuieryIdByOperationId(byte[] guid) {
+      return null;
+    }
+
+    @Override
+    public TezDagId getTezDAGByName(String name) {
+      return null;
+    }
+
+    public List<HiveQueryId> getHiveQueryIds() {
+      return hiveQueryIds;
+    }
+
+    public void setHiveQueryIds(List<HiveQueryId> hiveQueryIds) {
+      this.hiveQueryIds = hiveQueryIds;
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dbdbf66/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
index 78b6f1f..1c3444e 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobServiceTest.java
@@ -20,9 +20,9 @@ package org.apache.ambari.view.hive.resources.jobs;
 
 import org.apache.ambari.view.hive.ServiceTestUtils;
 import org.apache.ambari.view.hive.BaseHiveTest;
+import org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl;
 import org.apache.ambari.view.hive.utils.HdfsApiMock;
 import org.apache.ambari.view.hive.client.Connection;
-import org.apache.ambari.view.hive.client.ConnectionPool;
 import org.apache.ambari.view.hive.client.HiveClientException;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQuery;
 import org.apache.ambari.view.hive.resources.savedQueries.SavedQueryService;
@@ -53,7 +53,12 @@ public class JobServiceTest extends BaseHiveTest {
   @AfterClass
   public static void shutDown() throws Exception {
     BaseHiveTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    jobService.getSharedObjectsFactory().clear(HdfsApi.class);
   }
 
   @Override
@@ -65,7 +70,7 @@ public class JobServiceTest extends BaseHiveTest {
 
     Connection hiveConnection = configureHiveConnectionMock();
 
-    ConnectionPool.setInstance(context, hiveConnection);
+    jobService.getSharedObjectsFactory().setInstance(Connection.class, 
hiveConnection);
   }
 
   @Test
@@ -77,7 +82,6 @@ public class JobServiceTest extends BaseHiveTest {
     jobCreationRequest.job = new JobImpl();
     jobCreationRequest.job.setQueryId(savedQueryForJob.getId());
 
-
     Response response = jobService.create(jobCreationRequest,
         ServiceTestUtils.getResponseWithLocation(), 
ServiceTestUtils.getDefaultUriInfo());
     ServiceTestUtils.assertHTTPResponseCreated(response);
@@ -113,7 +117,7 @@ public class JobServiceTest extends BaseHiveTest {
   public void createJobNoSource() throws IOException, InterruptedException {
     HdfsApi hdfsApi = createNiceMock(HdfsApi.class);
     expect(hdfsApi.mkdir(anyString())).andReturn(true).anyTimes();
-    HdfsApi.setInstance(context, hdfsApi);
+    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
     replay(hdfsApi);
 
     JobService.JobRequest request = new JobService.JobRequest();
@@ -196,7 +200,7 @@ public class JobServiceTest extends BaseHiveTest {
   private HdfsApiMock setupHdfsApiMock() throws IOException, 
InterruptedException {
     HdfsApiMock hdfsApiMock = new HdfsApiMock("select * from Z");
     HdfsApi hdfsApi = hdfsApiMock.getHdfsApi();
-    HdfsApi.setInstance(context, hdfsApi);
+    jobService.getSharedObjectsFactory().setInstance(HdfsApi.class, hdfsApi);
     replay(hdfsApi);
     return hdfsApiMock;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dbdbf66/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
index 0c060ed..1e04dd7 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/LogParserTest.java
@@ -45,8 +45,8 @@ public class LogParserTest {
             "INFO : Ended Job = job_1421248330903_0003";
 
         LogParser p = LogParser.parseLog(log);
-        Assert.assertEquals(1, p.getJobsList().size());
-        
Assert.assertEquals("application_1421248330903_0003",(((LogParser.JobId) 
(p.getJobsList().toArray())[0])
+        Assert.assertEquals(1, p.getAppsList().size());
+        
Assert.assertEquals("application_1421248330903_0003",(((LogParser.AppId) 
(p.getAppsList().toArray())[0])
                                                             .getIdentifier()));
     }
 
@@ -66,8 +66,8 @@ public class LogParserTest {
             "INFO : Map 1: 1/1 Reducer 2: 1/1 ";
 
         LogParser p = LogParser.parseLog(log);
-        Assert.assertEquals(1, p.getJobsList().size());
-        
Assert.assertEquals("application_1423156117563_0003",(((LogParser.JobId) 
(p.getJobsList().toArray())[0])
+        Assert.assertEquals(1, p.getAppsList().size());
+        
Assert.assertEquals("application_1423156117563_0003",(((LogParser.AppId) 
(p.getAppsList().toArray())[0])
             .getIdentifier()));
     }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dbdbf66/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
index 06d5269..026acc3 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/resources/FileResourceServiceTest.java
@@ -86,7 +86,7 @@ public class FileResourceServiceTest extends BaseHiveTest {
   @Test
   public void updateFileResourceItem() {
     Response createdFileResourceItem = doCreateFileResourceItem();
-    Integer createdUdfId = ((FileResourceItem) ((JSONObject) 
createdFileResourceItem.getEntity()).get("fileResource")).getId();
+    Object createdUdfId = ((FileResourceItem) ((JSONObject) 
createdFileResourceItem.getEntity()).get("fileResource")).getId();
 
     FileResourceService.ResourceRequest request = new 
FileResourceService.ResourceRequest();
     request.fileResource = new FileResourceItem();
@@ -108,7 +108,7 @@ public class FileResourceServiceTest extends BaseHiveTest {
   @Test
   public void deleteFileResourceItem() {
     Response createdFileResourceItem = doCreateFileResourceItem();
-    Integer createdUdfId = ((FileResourceItem) ((JSONObject) 
createdFileResourceItem.getEntity()).get("fileResource")).getId();
+    Object createdUdfId = ((FileResourceItem) ((JSONObject) 
createdFileResourceItem.getEntity()).get("fileResource")).getId();
 
     Response response = resourceService.delete(String.valueOf(createdUdfId));
     Assert.assertEquals(204, response.getStatus());

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dbdbf66/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
index d369bb2..9b26a5b 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/savedQueries/SavedQueryServiceTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.view.hive.resources.savedQueries;
 import org.apache.ambari.view.hive.HDFSTest;
 import org.apache.ambari.view.hive.utils.HdfsApi;
 import org.apache.ambari.view.hive.utils.NotFoundFormattedException;
+import org.apache.ambari.view.hive.utils.SharedObjectsFactory;
 import org.json.simple.JSONObject;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
@@ -43,13 +44,12 @@ public class SavedQueryServiceTest extends HDFSTest {
 
   @BeforeClass
   public static void startUp() throws Exception {
-      HDFSTest.startUp(); // super
+    HDFSTest.startUp(); // super
   }
 
   @AfterClass
   public static void shutDown() throws Exception {
     HDFSTest.shutDown(); // super
-    HdfsApi.dropAllConnections(); //cleanup API connection
   }
 
   @Override
@@ -57,7 +57,13 @@ public class SavedQueryServiceTest extends HDFSTest {
   public void setUp() throws Exception {
     super.setUp();
     savedQueryService = getService(SavedQueryService.class, handler, context);
-    SavedQueryResourceManager.getViewSingletonObjects().clear();
+    savedQueryService.getSharedObjectsFactory().clear();
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    super.tearDown();
   }
 
   @Override
@@ -124,7 +130,7 @@ public class SavedQueryServiceTest extends HDFSTest {
   @Test
   public void update() {
     Response created = doCreateSavedQuery();
-    Integer createdId = ((SavedQuery) ((JSONObject) 
created.getEntity()).get("savedQuery")).getId();
+    Object createdId = ((SavedQuery) ((JSONObject) 
created.getEntity()).get("savedQuery")).getId();
 
     SavedQueryService.SavedQueryRequest request = new 
SavedQueryService.SavedQueryRequest();
     request.savedQuery = new SavedQuery();
@@ -144,7 +150,7 @@ public class SavedQueryServiceTest extends HDFSTest {
   @Test
   public void delete() {
     Response created = doCreateSavedQuery();
-    Integer createdId = ((SavedQuery) ((JSONObject) 
created.getEntity()).get("savedQuery")).getId();
+    Object createdId = ((SavedQuery) ((JSONObject) 
created.getEntity()).get("savedQuery")).getId();
 
     Response response = savedQueryService.delete(String.valueOf(createdId));
     Assert.assertEquals(204, response.getStatus());

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dbdbf66/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
index 9271e97..c8b70a8 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/udfs/UDFServiceTest.java
@@ -86,7 +86,7 @@ public class UDFServiceTest extends BaseHiveTest {
   @Test
   public void updateUDF() {
     Response createdUDF = doCreateUDF();
-    Integer createdUdfId = ((UDF) ((JSONObject) 
createdUDF.getEntity()).get("udf")).getId();
+    Object createdUdfId = ((UDF) ((JSONObject) 
createdUDF.getEntity()).get("udf")).getId();
 
     UDFService.UDFRequest request = new UDFService.UDFRequest();
     request.udf = new UDF();
@@ -108,7 +108,7 @@ public class UDFServiceTest extends BaseHiveTest {
   @Test
   public void deleteUDF() {
     Response createdUDF = doCreateUDF();
-    Integer createdUdfId = ((UDF) ((JSONObject) 
createdUDF.getEntity()).get("udf")).getId();
+    Object createdUdfId = ((UDF) ((JSONObject) 
createdUDF.getEntity()).get("udf")).getId();
 
     Response response = udfService.delete(String.valueOf(createdUdfId));
     Assert.assertEquals(204, response.getStatus());

Reply via email to