Repository: incubator-griffin
Updated Branches:
  refs/heads/master bc6eab942 -> 8fe05d251


merge new service

Author: Chen <[email protected]>

Closes #30 from justACT/merge1.


Project: http://git-wip-us.apache.org/repos/asf/incubator-griffin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-griffin/commit/8fe05d25
Tree: http://git-wip-us.apache.org/repos/asf/incubator-griffin/tree/8fe05d25
Diff: http://git-wip-us.apache.org/repos/asf/incubator-griffin/diff/8fe05d25

Branch: refs/heads/master
Commit: 8fe05d251b19f13157cabdde3ec8f9243546579d
Parents: bc6eab9
Author: Chen <[email protected]>
Authored: Wed May 17 16:46:36 2017 +0800
Committer: Liu <[email protected]>
Committed: Wed May 17 16:46:36 2017 +0800

----------------------------------------------------------------------
 .gitignore                                      |   2 +-
 .../griffin/core/GriffinWebApplication.java     |   8 +-
 .../core/measure/repo/ConnectorConfigRepo.java  |   0
 .../core/metastore/HiveMetastoreService.java    |   2 -
 .../griffin/core/metric/MetricController.java   |   4 +-
 .../griffin/core/schedule/SparkSubmitJob.java   |   6 +-
 .../griffin/core/service/GriffinController.java |  46 +++++++-
 .../src/main/resources/application.properties   |   6 +-
 service/src/main/resources/sparkJob.properties  |  21 ++--
 .../metastore/HiveMetastoreControllerTest.java  | 100 ++++++++++++++++
 .../metastore/HiveMetastoreServiceTest.java     |  82 ++++++++++++-
 .../metastore/KafkaSchemaControllerTest.java    |  91 ++++++++++++++
 .../core/metastore/KafkaSchemaServiceTest.java  |  25 ++--
 .../core/schedule/SparkSubmitJobTest.java       |  41 ++++---
 .../core/service/GriffinControllerTest.java     | 118 +++++++++++++++++++
 ui/js/services/services.js                      |  13 +-
 16 files changed, 504 insertions(+), 61 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 9090598..146c012 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,7 +12,7 @@ target/**
 *.war
 *.ear
 target
-service/src/main/resource/public/**
+service/src/main/resources/public/
 
 .project
 .settings/

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java 
b/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
index 0c2f557..035e4a2 100644
--- a/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
+++ b/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
@@ -37,8 +37,6 @@ public class GriffinWebApplication implements 
CommandLineRunner{
     @Autowired
     DataConnectorRepo connectorRepo;
 
-
-
     public void run(String... strings) throws Exception {
         HashMap<String,String> configMap1=new HashMap<>();
         configMap1.put("database","default");
@@ -56,19 +54,19 @@ public class GriffinWebApplication implements 
CommandLineRunner{
 
         EvaluateRule eRule = new EvaluateRule(1,rules);
 
-        Measure measure = new Measure("bevssoj","bevssoj description", 
Measure.MearuseType.accuracy, "bullyeye", source, target, eRule,"test1");
+        Measure measure = new Measure("viewitem_hourly","bevssoj description", 
Measure.MearuseType.accuracy, "bullyeye", source, target, eRule,"test1");
         measureRepo.save(measure);
 
         DataConnector source2 = new DataConnector(ConnectorType.HIVE, "1.2", 
configJson1);
         DataConnector target2 = new DataConnector(ConnectorType.HIVE, "1.2", 
configJson2);
         EvaluateRule eRule2 = new EvaluateRule(1,rules);
-        Measure measure2 = new Measure("test","test description", 
Measure.MearuseType.accuracy, "bullyeye", source2, target2, eRule2,"test1");
+        Measure measure2 = new Measure("search_hourly","test description", 
Measure.MearuseType.accuracy, "bullyeye", source2, target2, eRule2,"test1");
         measureRepo.save(measure2);
 
         DataConnector source3 = new DataConnector(ConnectorType.HIVE, "1.2", 
configJson1);
         DataConnector target3 = new DataConnector(ConnectorType.HIVE, "1.2", 
configJson2);
         EvaluateRule eRule3 = new EvaluateRule(1,rules);
-        Measure measure3 = new Measure("just_inthere","test_just_inthere 
description", Measure.MearuseType.accuracy, "hadoop", source3, target3, 
eRule3,"test1");
+        Measure measure3 = new Measure("buy_hourly","test_just_inthere 
description", Measure.MearuseType.accuracy, "hadoop", source3, target3, 
eRule3,"test1");
         measureRepo.save(measure3);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/java/org/apache/griffin/core/measure/repo/ConnectorConfigRepo.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/measure/repo/ConnectorConfigRepo.java
 
b/service/src/main/java/org/apache/griffin/core/measure/repo/ConnectorConfigRepo.java
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/java/org/apache/griffin/core/metastore/HiveMetastoreService.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/metastore/HiveMetastoreService.java
 
b/service/src/main/java/org/apache/griffin/core/metastore/HiveMetastoreService.java
index ba7ade4..856959b 100644
--- 
a/service/src/main/java/org/apache/griffin/core/metastore/HiveMetastoreService.java
+++ 
b/service/src/main/java/org/apache/griffin/core/metastore/HiveMetastoreService.java
@@ -1,7 +1,5 @@
 package org.apache.griffin.core.metastore;
 
-
-import org.apache.avro.generic.GenericData;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/metric/MetricController.java 
b/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
index 5b9f610..c8bce3f 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
@@ -20,9 +20,11 @@ public class MetricController {
 private static final Logger log = 
LoggerFactory.getLogger(MetricController.class);
     @Autowired
     MeasureRepo measureRepo;
-    @RequestMapping("/org/{measureName}")
+    @RequestMapping("/{measureName}/org")
     public String getOrgByMeasureName(@PathVariable("measureName") String 
measureName){
         return measureRepo.findOrgByName(measureName);
     }
 
+
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/java/org/apache/griffin/core/schedule/SparkSubmitJob.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/schedule/SparkSubmitJob.java 
b/service/src/main/java/org/apache/griffin/core/schedule/SparkSubmitJob.java
index a8ba755..c0aca15 100644
--- a/service/src/main/java/org/apache/griffin/core/schedule/SparkSubmitJob.java
+++ b/service/src/main/java/org/apache/griffin/core/schedule/SparkSubmitJob.java
@@ -74,6 +74,10 @@ public class SparkSubmitJob implements Job {
         JobDetail jd = context.getJobDetail();
         String measureName = jd.getJobDataMap().getString("measure");
         measure = measureRepo.findByName(measureName);
+        if (measure==null) {
+            logger.info(measureName + " is not find!");
+            return;
+        }
         sourcePattern = jd.getJobDataMap().getString("sourcePat");
         targetPattern = jd.getJobDataMap().getString("targetPat");
         dataStartTimestamp = 
jd.getJobDataMap().getString("dataStartTimestamp");
@@ -191,7 +195,7 @@ public class SparkSubmitJob implements Job {
         sparkJobDO.setJars(jars);
 
         List<String> files = new ArrayList<>();
-        files.add(props.getProperty("sparkJob.files_1"));
+//        files.add(props.getProperty("sparkJob.files_1"));
         sparkJobDO.setFiles(files);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/service/GriffinController.java 
b/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
index 889b441..e8a771f 100644
--- 
a/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
+++ 
b/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
@@ -1,6 +1,7 @@
 package org.apache.griffin.core.service;
 
 
+import org.apache.griffin.core.measure.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -9,7 +10,7 @@ import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RestController;
 
-import java.util.List;
+import java.util.*;
 
 
 @RestController
@@ -25,7 +26,7 @@ public class GriffinController {
     MeasureRepo measureRepo;
 
     @RequestMapping("/org")
-    public List<String> getOrg(){
+    public List<String> getOrgs(){
         return measureRepo.findOrganizations();
     }
 
@@ -33,5 +34,46 @@ public class GriffinController {
     public List<String> getMetricNameListByOrg(@PathVariable("org") String 
org){
         return measureRepo.findNameByOrganization(org);
     }
+
+    @RequestMapping("/orgWithMetrics")
+    public Map<String,List<String>> getOrgsWithMetrics(){
+        Map<String,List<String>> orgWithMetricsMap=new HashMap<>();
+        List<String> orgList=measureRepo.findOrganizations();
+        for (String org:orgList){
+            if(org!=null){
+                
orgWithMetricsMap.put(org,measureRepo.findNameByOrganization(org));
+            }
+        }
+        return orgWithMetricsMap;
+    }
+
+    @RequestMapping("/dataAssetsWithMetrics")
+    public Map<String,List<String>> getDataAssetsWithMetrics(){
+        Map<String,List<String>> daWithMetricsMap=new HashMap<>();
+        Iterable<Measure> measureList=measureRepo.findAll();
+        for (Measure m:measureList){
+            switch (m.getType()){
+                case accuracy:
+                    String[] 
tableNames={m.getSource().getConfig().get("table.name"),m.getTarget().getConfig().get("table.name")};
+                    for (String taName:tableNames){
+                        if(taName!=null) {
+                            if(daWithMetricsMap.get(taName)==null){
+                                daWithMetricsMap.put(taName, new 
ArrayList<>(Arrays.asList(m.getName())));
+                            }else{
+                                List<String> 
measureNameList=daWithMetricsMap.get(taName);
+                                measureNameList.add(m.getName());
+                                daWithMetricsMap.put(taName, measureNameList);
+                            }
+                        }
+                    }
+                    break;
+                default:
+                    log.info("invalid measure type!");
+            }
+
+        }
+        return daWithMetricsMap;
+    }
+
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/resources/application.properties
----------------------------------------------------------------------
diff --git a/service/src/main/resources/application.properties 
b/service/src/main/resources/application.properties
index d0fea6e..bd7fab1 100644
--- a/service/src/main/resources/application.properties
+++ b/service/src/main/resources/application.properties
@@ -1,4 +1,4 @@
-spring.datasource.url= jdbc:mysql://localhost:3306/metastore
+spring.datasource.url= 
jdbc:mysql://localhost:3306/metastore?autoReconnect=true&useSSL=false
 spring.datasource.username =griffin
 spring.datasource.password =123456
 
@@ -15,8 +15,8 @@ 
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.MySQL5Dialect
 spring.jpa.hibernate.naming-strategy = org.hibernate.cfg.ImprovedNamingStrategy
 
 # hive metastore
-hive.metastore.uris = thrift://localhost:9083
+hive.metastore.uris = thrift://10.9.246.187:9083
 hive.metastore.dbname = default
 
 # kafka schema registry
-kafka.schema.registry.url = http://localhost:8081
\ No newline at end of file
+kafka.schema.registry.url = http://10.65.159.119:8081
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/main/resources/sparkJob.properties
----------------------------------------------------------------------
diff --git a/service/src/main/resources/sparkJob.properties 
b/service/src/main/resources/sparkJob.properties
index b6f9932..e3eddcc 100644
--- a/service/src/main/resources/sparkJob.properties
+++ b/service/src/main/resources/sparkJob.properties
@@ -1,16 +1,17 @@
-sparkJob.file=hdfs:///griffin/griffin-measure-batch.jar
+sparkJob.file=/exe/griffin-measure-batch-0.0.1-SNAPSHOT.jar
 sparkJob.className=org.apache.griffin.measure.batch.Application
-sparkJob.args_1=hdfs:///griffin/json/env.json
+sparkJob.args_1=/benchmark/test/env.json
 sparkJob.args_3=hdfs,raw
-sparkJob.name=griffin
+sparkJob.name=griffin-livy-rrr
 sparkJob.queue=default
 sparkJob.numExecutors=2
-sparkJob.executorCores=1
-sparkJob.driverMemory=1g
-sparkJob.executorMemory=1g
+sparkJob.executorCores=4
+sparkJob.driverMemory=2g
+sparkJob.executorMemory=2g
 sparkJob.spark.jars.packages=com.databricks:spark-avro_2.10:2.0.1
-sparkJob.jars_1=hdfs:///livy/datanucleus-api-jdo-3.2.6.jar
-sparkJob.jars_2=hdfs:///livy/datanucleus-core-3.2.10.jar
-sparkJob.jars_3=hdfs:///livy/datanucleus-rdbms-3.2.9.jar
+sparkJob.jars_1=/livy/datanucleus-api-jdo-3.2.6.jar
+sparkJob.jars_2=/livy/datanucleus-core-3.2.10.jar
+sparkJob.jars_3=/livy/datanucleus-rdbms-3.2.9.jar
+sparkJob.files_1=/livy/hive-site.xml
 sparkJob.dateAndHour=dt,hour
-sparkJob.uri=http://localhost:8998/batches
\ No newline at end of file
+sparkJob.uri=http://10.9.246.187:8998/batches
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreControllerTest.java
----------------------------------------------------------------------
diff --git 
a/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreControllerTest.java
 
b/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreControllerTest.java
new file mode 100644
index 0000000..48fae62
--- /dev/null
+++ 
b/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreControllerTest.java
@@ -0,0 +1,100 @@
+package org.apache.griffin.core.metastore;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.test.context.web.WebAppConfiguration;
+import org.springframework.test.web.servlet.MockMvc;
+import org.springframework.test.web.servlet.setup.MockMvcBuilders;
+
+import static org.mockito.Mockito.verify;
+import static 
org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.mockito.Mockito.when;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+/**
+ * Created by xiangrchen on 5/16/17.
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@WebAppConfiguration
+public class HiveMetastoreControllerTest {
+    private MockMvc mockMvc;
+
+    @Mock
+    HiveMetastoreService hiveMetastoreService;
+
+    @InjectMocks
+    private HiveMetastoreController hiveMetastoreController;
+
+    @Before
+    public void setup(){
+        MockitoAnnotations.initMocks(this);
+        this.mockMvc = 
MockMvcBuilders.standaloneSetup(hiveMetastoreController).build();
+    }
+
+    @Test
+    public void test_getAllDatabases() throws Exception {
+        when(hiveMetastoreService.getAllDatabases()).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/db"))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getAllDatabases();
+    }
+
+    @Test
+    public void test_getDefAllTables() throws Exception{
+        when(hiveMetastoreService.getAllTableNames("")).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/table"))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getAllTableNames("");
+    }
+
+    @Test
+    public void test_getAllTableNamess() throws Exception {
+        String db="default";
+        when(hiveMetastoreService.getAllTableNames(db)).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/{db}/table",db))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getAllTableNames(db);
+    }
+
+    @Test
+    public void test_getAllTables() throws Exception {
+        String db="default";
+        when(hiveMetastoreService.getAllTable(db)).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/{db}/alltables",db))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getAllTable(db);
+    }
+
+    @Test
+    public void test_getAllTables2() throws Exception {
+        when(hiveMetastoreService.getAllTable()).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/alltables"))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getAllTable();
+    }
+
+    @Test
+    public void test_getDefTable() throws Exception {
+        String dbName="";
+        String tableName="cout";
+        when(hiveMetastoreService.getTable(dbName,tableName)).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/table/{table}",tableName))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getTable(dbName,tableName);
+    }
+
+    @Test
+    public void test_getTable() throws Exception{
+        String db="default";
+        String table="cout";
+        when(hiveMetastoreService.getTable(db,table)).thenReturn(null);
+        mockMvc.perform(get("/metadata/hive/{db}/table/{table}",db,table))
+                .andExpect(status().isOk());
+        verify(hiveMetastoreService).getTable(db,table);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreServiceTest.java
----------------------------------------------------------------------
diff --git 
a/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreServiceTest.java
 
b/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreServiceTest.java
index e5223bc..2df93fe 100644
--- 
a/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreServiceTest.java
+++ 
b/service/src/test/java/org/apache/griffin/core/metastore/HiveMetastoreServiceTest.java
@@ -2,12 +2,18 @@ package org.apache.griffin.core.metastore;
 
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TException;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 import org.powermock.reflect.Whitebox;
 
-import java.lang.reflect.Field;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import static org.mockito.Mockito.*;
@@ -16,15 +22,20 @@ import static 
org.springframework.test.util.AssertionErrors.assertEquals;
 /**
  * Created by xiangrchen on 5/10/17.
  */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({HiveMetastoreService.class})
 public class HiveMetastoreServiceTest {
 
-    HiveMetastoreService hiveMetastoreService;
+    @InjectMocks
+    private HiveMetastoreService hiveMetastoreService;
+
     @Before
     public void setup() throws NoSuchFieldException, IllegalAccessException {
-        Field defaultDbName = 
HiveMetastoreService.class.getDeclaredField("defaultDbName");
-        defaultDbName.setAccessible(true);
-        hiveMetastoreService=new HiveMetastoreService();
-        defaultDbName.set(hiveMetastoreService,"default");
+//        Field defaultDbName = 
HiveMetastoreService.class.getDeclaredField("defaultDbName");
+//        defaultDbName.setAccessible(true);
+//        hiveMetastoreService=new HiveMetastoreService();
+//        defaultDbName.set(hiveMetastoreService,"default");
+        
Whitebox.setInternalState(hiveMetastoreService,"defaultDbName","default");
         hiveMetastoreService.client=mock(HiveMetaStoreClient.class);
     }
 
@@ -49,4 +60,63 @@ public class HiveMetastoreServiceTest {
         when(hiveMetastoreService.client.getAllDatabases()).thenThrow(new 
MetaException());
         hiveMetastoreService.getAllDatabases();
     }
+
+    @Test
+    public void test_getAllTableNames() throws Exception{
+        String dbName="dfs";
+        List<String> res= new ArrayList<>();
+        when(hiveMetastoreService.client.getAllTables(dbName)).thenReturn(res);
+        hiveMetastoreService.getAllTableNames(dbName);
+        verify(hiveMetastoreService.client).getAllTables(dbName);
+
+        when(hiveMetastoreService.client.getAllTables(dbName)).thenThrow(new 
MetaException());
+        hiveMetastoreService.getAllTableNames(dbName);
+    }
+
+    @Test
+    public void test_getAllTable() throws Exception{
+        String db="dff";
+        List<String> tables= new ArrayList<>(Arrays.asList("cout","cout1"));
+        when(hiveMetastoreService.client.getAllTables(db)).thenReturn(tables);
+        for (String table:tables){
+            
when(hiveMetastoreService.client.getTable(db,table)).thenReturn(new Table());
+        }
+        hiveMetastoreService.getAllTable(db);
+        verify(hiveMetastoreService.client).getAllTables(db);
+        verify(hiveMetastoreService.client).getTable(db,tables.get(0));
+
+        when(hiveMetastoreService.client.getAllTables(db)).thenThrow(new 
MetaException());
+        hiveMetastoreService.getAllTable(db);
+    }
+
+    @Test
+    public void test_getAllTable2() throws TException {
+        List<String> dbs=new ArrayList<>(Arrays.asList("dff","dgg"));
+        List<String> tables= new ArrayList<>(Arrays.asList("cout","cout1"));
+        when(hiveMetastoreService.getAllDatabases()).thenReturn(dbs);
+        for (String db:dbs){
+            
when(hiveMetastoreService.client.getAllTables(db)).thenReturn(tables);
+            for (String table:tables){
+                
when(hiveMetastoreService.client.getTable(db,table)).thenReturn(new Table());
+            }
+        }
+        hiveMetastoreService.getAllTable();
+        verify(hiveMetastoreService.client).getAllTables(dbs.get(0));
+        verify(hiveMetastoreService.client).getTable(dbs.get(0),tables.get(0));
+
+        
when(hiveMetastoreService.client.getAllTables(dbs.get(0))).thenThrow(new 
MetaException());
+        hiveMetastoreService.getAllTable();
+    }
+
+    @Test
+    public void test_getTable() throws Exception{
+        String dbName="aaa";
+        String tableName="ccc";
+        
when(hiveMetastoreService.client.getTable(dbName,tableName)).thenReturn(new 
Table());
+        hiveMetastoreService.getTable(dbName,tableName);
+        verify(hiveMetastoreService.client).getTable(dbName,tableName);
+
+        
when(hiveMetastoreService.client.getTable(dbName,tableName)).thenThrow(new 
MetaException());
+        hiveMetastoreService.getTable(dbName,tableName);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaControllerTest.java
----------------------------------------------------------------------
diff --git 
a/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaControllerTest.java
 
b/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaControllerTest.java
new file mode 100644
index 0000000..2bead50
--- /dev/null
+++ 
b/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaControllerTest.java
@@ -0,0 +1,91 @@
+package org.apache.griffin.core.metastore;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.test.context.web.WebAppConfiguration;
+import org.springframework.test.web.servlet.MockMvc;
+import org.springframework.test.web.servlet.setup.MockMvcBuilders;
+
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static 
org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+/**
+ * Created by xiangrchen on 5/16/17.
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@WebAppConfiguration
+public class KafkaSchemaControllerTest {
+    private MockMvc mockMvc;
+
+    @Mock
+    KafkaSchemaService kafkaSchemaService;
+
+    @InjectMocks
+    private KafkaSchemaController kafkaSchemaController;
+
+    @Before
+    public void setup(){
+        MockitoAnnotations.initMocks(this);
+        this.mockMvc = 
MockMvcBuilders.standaloneSetup(kafkaSchemaController).build();
+    }
+
+    @Test
+    public void test_getSubjects() throws Exception {
+        int id=1;
+        when(kafkaSchemaService.getSchemaString(id)).thenReturn(null);
+        mockMvc.perform(get("/metadata/kafka/schema/{id}",id))
+                .andExpect(status().isOk());
+        verify(kafkaSchemaService).getSchemaString(id);
+    }
+
+    @Test
+    public void test_getSchemaString() throws Exception {
+        when(kafkaSchemaService.getSubjects()).thenReturn(null);
+        mockMvc.perform(get("/metadata/kafka/subject"))
+                .andExpect(status().isOk());
+        verify(kafkaSchemaService).getSubjects();
+    }
+
+    @Test
+    public void test_getSubjectVersions() throws Exception {
+        String subject="sss";
+        when(kafkaSchemaService.getSubjectVersions(subject)).thenReturn(null);
+        
mockMvc.perform(get("/metadata/kafka/subject/{subject}/version",subject))
+                .andExpect(status().isOk());
+        verify(kafkaSchemaService).getSubjectVersions(subject);
+    }
+
+    @Test
+    public void test_getSubjectSchema() throws Exception {
+        String subject="ss.s";
+        String version="ss";
+        when(kafkaSchemaService.getSubjectSchema(subject, 
version)).thenReturn(null);
+        
mockMvc.perform(get("/metadata/kafka/subject/{subject}/version/{version}",subject,version))
+                .andExpect(status().isOk());
+        verify(kafkaSchemaService).getSubjectSchema(subject, version);
+    }
+
+    @Test
+    public void test_getTopLevelConfig() throws Exception {
+        when(kafkaSchemaService.getTopLevelConfig()).thenReturn(null);
+        mockMvc.perform(get("/metadata/kafka/config"))
+                .andExpect(status().isOk());
+        verify(kafkaSchemaService).getTopLevelConfig();
+    }
+
+    @Test
+    public void test_getSubjectLevelConfig() throws Exception {
+        String subject="sss";
+        
when(kafkaSchemaService.getSubjectLevelConfig(subject)).thenReturn(null);
+        mockMvc.perform(get("/metadata/kafka/config/{subject}",subject))
+                .andExpect(status().isOk());
+        verify(kafkaSchemaService).getSubjectLevelConfig(subject);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaServiceTest.java
----------------------------------------------------------------------
diff --git 
a/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaServiceTest.java
 
b/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaServiceTest.java
index 103de3c..5bd4461 100644
--- 
a/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaServiceTest.java
+++ 
b/service/src/test/java/org/apache/griffin/core/metastore/KafkaSchemaServiceTest.java
@@ -2,12 +2,14 @@ package org.apache.griffin.core.metastore;
 
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 import org.powermock.reflect.Whitebox;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.reflect.Field;
-
 import static org.springframework.test.util.AssertionErrors.assertEquals;
 
 /**
@@ -15,20 +17,23 @@ import static 
org.springframework.test.util.AssertionErrors.assertEquals;
  */
 //@RunWith(SpringJUnit4ClassRunner.class)
 //@ContextConfiguration
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({HiveMetastoreService.class})
 public class KafkaSchemaServiceTest {
 
     private static final Logger log = 
LoggerFactory.getLogger(KafkaSchemaServiceTest.class);
-//    @Value("${kafka.schema.registry.url}")
-//    private String url;
 
+    @InjectMocks
     private KafkaSchemaService kafkaSchemaService;
 
     @Before
     public void setup() throws NoSuchFieldException, IllegalAccessException {
-        Field url = KafkaSchemaService.class.getDeclaredField("url");
-        url.setAccessible(true);
-        kafkaSchemaService=new KafkaSchemaService();
-        url.set(kafkaSchemaService, "http://localhost:8080";);
+//        Field url = KafkaSchemaService.class.getDeclaredField("url");
+//        url.setAccessible(true);
+//        kafkaSchemaService=new KafkaSchemaService();
+//        url.set(kafkaSchemaService, "http://localhost:8080";);
+        
Whitebox.setInternalState(kafkaSchemaService,"url","http://localhost:8080";);
+//        kafkaSchemaService.client=mock(HiveMetaStoreClient.class);
     }
 
     @Test
@@ -45,6 +50,10 @@ public class KafkaSchemaServiceTest {
         result = Whitebox.invokeMethod(kafkaSchemaService, "registryUrl", 
path);
         assertEquals("success",result,path);
     }
+    @Test
+    public void test_getSchemaString(){
+
+    }
 
 //    @Test
 //    public void test_getSchemaString(){

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/test/java/org/apache/griffin/core/schedule/SparkSubmitJobTest.java
----------------------------------------------------------------------
diff --git 
a/service/src/test/java/org/apache/griffin/core/schedule/SparkSubmitJobTest.java
 
b/service/src/test/java/org/apache/griffin/core/schedule/SparkSubmitJobTest.java
index ea9c50d..a381c80 100644
--- 
a/service/src/test/java/org/apache/griffin/core/schedule/SparkSubmitJobTest.java
+++ 
b/service/src/test/java/org/apache/griffin/core/schedule/SparkSubmitJobTest.java
@@ -67,14 +67,13 @@ public class SparkSubmitJobTest {
         DataConnector target = new 
DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
         String rules = "$source.uage > 100 AND $source.uid = $target.uid AND 
$source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes 
+ 1 + 1";
         EvaluateRule eRule = new EvaluateRule(1,rules);
-        Measure measure = new Measure("bevssoj","bevssoj description", 
Measure.MearuseType.accuracy, "bullyeye", source, target, eRule,"test1");
+        Measure measure = new Measure("viewitem_hourly","bevssoj description", 
Measure.MearuseType.accuracy, "bullyeye", source, target, eRule,"test1");
 
         when(ssj.measureRepo.findByName("bevssoj")).thenReturn(measure);
-//        ssj.execute(context);
+        ssj.execute(context);
 
         RestTemplate restTemplate =mock(RestTemplate.class);
-//        String uri="http://10.9.246.187:8998/batches";;
-        String uri="";
+        String uri="http://10.9.246.187:8998/batches";;
         SparkJobDO sparkJobDO=mock(SparkJobDO.class);
         when(restTemplate.postForObject(uri, sparkJobDO, 
String.class)).thenReturn(null);
 
@@ -82,8 +81,8 @@ public class SparkSubmitJobTest {
         long currentSystemTimestamp=System.currentTimeMillis();
         long currentTimstamp = ssj.setCurrentTimestamp(currentSystemTimestamp);
 
-//        verify(ssj.measureRepo).findByName("bevssoj");
-//        verify(jdmap,atLeast(2)).put("lastTime",currentTimstamp+"");
+        verify(ssj.measureRepo).findByName("bevssoj");
+        verify(jdmap,atLeast(2)).put("lastTime",currentTimstamp+"");
     }
 
     @Test
@@ -98,22 +97,22 @@ public class SparkSubmitJobTest {
         assertEquals(verifyMap,par);
     }
 
-   @Test
-   public void test_setDataConnectorPartitions(){
-       DataConnector dc=mock(DataConnector.class);
-       String[] patternItemSet={"YYYYMMDD","HH"};
-       String[] partitionItemSet={"date","hour"};
-       long timestamp=1460174400000l;
-       
ssj.setDataConnectorPartitions(dc,patternItemSet,partitionItemSet,timestamp);
+    @Test
+    public void test_setDataConnectorPartitions(){
+        DataConnector dc=mock(DataConnector.class);
+        String[] patternItemSet={"YYYYMMDD","HH"};
+        String[] partitionItemSet={"date","hour"};
+        long timestamp=1460174400000l;
+        
ssj.setDataConnectorPartitions(dc,patternItemSet,partitionItemSet,timestamp);
 //       
doNothing().when(ssj).setDataConnectorPartitions(dataConnector,patternItemSet,partitionItemSet,timestamp);
-       Map<String,String> map=new HashMap<>();
-       map.put("partitions","date=20160409, hour=12");
-       try {
-           verify(dc).setConfig(map);
-       } catch (JsonProcessingException e) {
-           e.printStackTrace();
-       }
-   }
+        Map<String,String> map=new HashMap<>();
+        map.put("partitions","date=20160409, hour=12");
+        try {
+            verify(dc).setConfig(map);
+        } catch (JsonProcessingException e) {
+            e.printStackTrace();
+        }
+    }
 
     @Test
     public void test_setCurrentTimestamp(){

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
----------------------------------------------------------------------
diff --git 
a/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
 
b/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
new file mode 100644
index 0000000..d1a039f
--- /dev/null
+++ 
b/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
@@ -0,0 +1,118 @@
+package org.apache.griffin.core.service;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.griffin.core.measure.DataConnector;
+import org.apache.griffin.core.measure.EvaluateRule;
+import org.apache.griffin.core.measure.Measure;
+import org.apache.griffin.core.measure.repo.MeasureRepo;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+import org.springframework.test.context.web.WebAppConfiguration;
+import org.springframework.test.web.servlet.MockMvc;
+import org.springframework.test.web.servlet.setup.MockMvcBuilders;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static 
org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+/**
+ * Created by xiangrchen on 5/16/17.
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@WebAppConfiguration
+public class GriffinControllerTest {
+    private MockMvc mockMvc;
+
+    @Mock
+    MeasureRepo measureRepo;
+
+    @InjectMocks
+    private GriffinController griffinController;
+
+    @Before
+    public void setup(){
+        MockitoAnnotations.initMocks(this);
+        this.mockMvc = 
MockMvcBuilders.standaloneSetup(griffinController).build();
+    }
+
+    @Test
+    public void test_greeting() throws Exception {
+        mockMvc.perform(get("/version"))
+                .andExpect(status().isOk())
+                .andExpect(content().string(is("0.1.0")));
+    }
+
+    @Test
+    public void test_getOrgs() throws Exception {
+        when(measureRepo.findOrganizations()).thenReturn(new 
ArrayList<String>());
+        mockMvc.perform(get("/org"))
+                .andExpect(status().isOk());
+        verify(measureRepo).findOrganizations();
+    }
+
+    @Test
+    public void test_getMetricNameListByOrg() throws Exception{
+        String org="hadoop";
+        when(measureRepo.findNameByOrganization(org)).thenReturn(new 
ArrayList<String>());
+        mockMvc.perform(get("/org/{org}",org))
+                .andExpect(status().isOk())
+                .andExpect(content().string(is("[]")));
+        verify(measureRepo).findNameByOrganization(org);
+    }
+
+    @Test
+    public void test_getOrgsWithMetrics() throws Exception{
+        String org="hadoop";
+        List<String> orgList=new ArrayList<>(Arrays.asList(org));
+        when(measureRepo.findOrganizations()).thenReturn(orgList);
+
+        
when(measureRepo.findNameByOrganization(org)).thenReturn(Arrays.asList("viewitem_hourly"));
+        mockMvc.perform(get("/orgWithMetrics"))
+                .andExpect(status().isOk());
+        verify(measureRepo).findOrganizations();
+        verify(measureRepo).findNameByOrganization(org);
+    }
+
+    @Test
+    public void test_getMeasureNameByDataAssets() throws Exception{
+        HashMap<String,String> configMap1=new HashMap<>();
+        configMap1.put("database","default");
+        configMap1.put("table.name","test_data_src");
+        HashMap<String,String> configMap2=new HashMap<>();
+        configMap2.put("database","default");
+        configMap2.put("table.name","test_data_tgt");
+        String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
+        String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
+        DataConnector source = new 
DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
+        DataConnector target = new 
DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
+        String rules = "$source.uage > 100 AND $source.uid = $target.uid AND 
$source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes 
+ 1 + 1";
+        EvaluateRule eRule = new EvaluateRule(1,rules);
+        Measure measure = new Measure("viewitem_hourly","bevssoj description", 
Measure.MearuseType.accuracy, "bullyeye", source, target, eRule,"test1");
+
+        DataConnector source2 = new 
DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
+        DataConnector target2 = new 
DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
+        EvaluateRule eRule2 = new EvaluateRule(1,rules);
+        Measure measure2 = new Measure("search_hourly","test description", 
Measure.MearuseType.accuracy, "bullyeye", source2, target2, eRule2,"test1");
+
+        
when(measureRepo.findAll()).thenReturn(Arrays.asList(measure,measure2));
+        mockMvc.perform(get("/dataAssetsWithMetrics"))
+                .andExpect(status().isOk())
+                .andDo(print());
+        verify(measureRepo).findAll();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/8fe05d25/ui/js/services/services.js
----------------------------------------------------------------------
diff --git a/ui/js/services/services.js b/ui/js/services/services.js
index bed9f3e..f06632f 100644
--- a/ui/js/services/services.js
+++ b/ui/js/services/services.js
@@ -20,9 +20,18 @@ define(['./module'], function (services) {
     services.factory('$config', function(){
 
 
+//    var BACKEND_SERVER = '';
+      var BACKEND_SERVER = 'http://localhost:8080';
+    //   var BACKEND_SERVER = 'http://localhost:8080'; //dev env
+
+
+//    var BACKEND_SERVER = 'http://10.249.74.51';
+
+
 //    var BACKEND_SERVER = 'http://10.149.247.156:38080';
 //      var BACKEND_SERVER = 'http://localhost:8080';
-      var BACKEND_SERVER = '';
+//      var BACKEND_SERVER = '';
+
       var API_ROOT_PATH = '/api/v1';
       var ES_SERVER = 'http://10.149.247.156:39200';
 
@@ -51,6 +60,8 @@ define(['./module'], function (services) {
               heatmap: BACKEND_SERVER + API_ROOT_PATH + '/metrics/heatmap' ,
               metricdetail: BACKEND_SERVER + API_ROOT_PATH + 
'/metrics/complete',
               rulemetric: BACKEND_SERVER + API_ROOT_PATH + '/metrics/brief',
+//              dashboard: BACKEND_SERVER + API_ROOT_PATH + 
'/metrics/dashboard' ,
+              organization:'http://10.249.75.109:8080/org',
 //              organization:BACKEND_SERVER+'/org',
               orgmap: BACKEND_SERVER+'/metrics/org',
 


Reply via email to