http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java index 4f43ad0..5c63ec6 100755 --- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java +++ b/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java @@ -49,29 +49,22 @@ public class TypeSystemTest extends BaseTest { @Test public void testGetTypeNames() throws Exception { - getTypeSystem().defineEnumType("enum_test", - new EnumValue("0", 0), - new EnumValue("1", 1), - new EnumValue("2", 2), + getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2), new EnumValue("3", 3)); Assert.assertTrue(getTypeSystem().getTypeNames().contains("enum_test")); } @Test public void testIsRegistered() throws Exception { - getTypeSystem().defineEnumType("enum_test", - new EnumValue("0", 0), - new EnumValue("1", 1), - new EnumValue("2", 2), + getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2), new EnumValue("3", 3)); Assert.assertTrue(getTypeSystem().isRegistered("enum_test")); } @Test public void testGetTraitsNames() throws Exception { - HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = - TypesUtil.createTraitTypeDef("Classification", - ImmutableList.<String>of(), + HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil + .createTraitTypeDef("Classification", ImmutableList.<String>of(), TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE)); HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil.createTraitTypeDef("PII", ImmutableList.<String>of()); @@ -86,23 +79,13 @@ public class TypeSystemTest extends BaseTest { HierarchicalTypeDefinition<TraitType> financeTrait = TypesUtil.createTraitTypeDef("Finance", ImmutableList.<String>of()); - getTypeSystem().defineTypes( - ImmutableList.<StructTypeDefinition>of(), - ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, - soxTrait, secTrait, financeTrait), - ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); + getTypeSystem().defineTypes(ImmutableList.<StructTypeDefinition>of(), ImmutableList + .of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, + financeTrait), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of()); final ImmutableList<String> traitsNames = getTypeSystem().getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT); Assert.assertEquals(traitsNames.size(), 7); - List traits = Arrays.asList(new String[]{ - "Classification", - "PII", - "PHI", - "PCI", - "SOX", - "SEC", - "Finance", - }); + List traits = Arrays.asList(new String[]{"Classification", "PII", "PHI", "PCI", "SOX", "SEC", "Finance",}); Assert.assertFalse(Collections.disjoint(traitsNames, traits)); } @@ -122,17 +105,16 @@ public class TypeSystemTest extends BaseTest { String structName = random(); String attrType = random(); - StructTypeDefinition structType = createStructTypeDef(structName, - createRequiredAttrDef(attrType, DataTypes.STRING_TYPE)); + StructTypeDefinition structType = + createStructTypeDef(structName, createRequiredAttrDef(attrType, DataTypes.STRING_TYPE)); String className = random(); - HierarchicalTypeDefinition<ClassType> classType = - createClassTypeDef(className, ImmutableList.<String>of(), - createRequiredAttrDef(attrType, DataTypes.STRING_TYPE)); + HierarchicalTypeDefinition<ClassType> classType = createClassTypeDef(className, ImmutableList.<String>of(), + createRequiredAttrDef(attrType, DataTypes.STRING_TYPE)); String traitName = random(); - HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName, - ImmutableList.<String>of(), createRequiredAttrDef(attrType, DataTypes.INT_TYPE)); + HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName, ImmutableList.<String>of(), + createRequiredAttrDef(attrType, DataTypes.INT_TYPE)); ts.defineTypes(ImmutableList.of(structType), ImmutableList.of(traitType), ImmutableList.of(classType)); }
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java ---------------------------------------------------------------------- diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java index 29638de..67641cc 100644 --- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java +++ b/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java @@ -25,11 +25,10 @@ import org.testng.annotations.Test; public class ValidationTest { @DataProvider(name = "attributeData") private Object[][] createAttributeData() { - return new String[][]{ - {null, "type"}, {"", "type"}, {"name", null}, {"name", ""}}; + return new String[][]{{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}}; } - @Test (dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class}) + @Test(dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class}) public void testAttributes(String name, String type) { TypesUtil.createRequiredAttrDef(name, type); } @@ -39,7 +38,7 @@ public class ValidationTest { return new String[][]{{null}, {""}}; } - @Test (dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class}) + @Test(dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class}) public void testEnumValue(String name) { new EnumValue(name, 1); } @@ -50,7 +49,7 @@ public class ValidationTest { return new Object[][]{{null, value}, {"", value}, {"name"}}; } - @Test (dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class}) + @Test(dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class}) public void testEnumType(String name, EnumValue... values) { new EnumTypeDefinition(name, values); } @@ -61,7 +60,7 @@ public class ValidationTest { return new Object[][]{{null, value}, {"", value}, {"name"}}; } - @Test (dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class}) + @Test(dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class}) public void testStructType(String name, AttributeDefinition... values) { new StructTypeDefinition(name, values); } @@ -71,15 +70,17 @@ public class ValidationTest { return new Object[][]{{null}, {""}}; } - @Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class}) + @Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class}) public void testClassType(String name) { - AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");; + AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type"); + ; TypesUtil.createClassTypeDef(name, ImmutableList.of("super"), value); } - @Test (dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class}) + @Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class}) public void testTraitType(String name) { - AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");; + AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type"); + ; TypesUtil.createTraitTypeDef(name, ImmutableList.of("super"), value); } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/pom.xml ---------------------------------------------------------------------- diff --git a/webapp/pom.xml b/webapp/pom.xml index e200b69..cc8bf48 100755 --- a/webapp/pom.xml +++ b/webapp/pom.xml @@ -307,7 +307,7 @@ </systemProperty> <systemProperty> <name>truststore.file</name> - <value>${project.build.directory}/../../webapp/target/atlas.keystore </value> + <value>${project.build.directory}/../../webapp/target/atlas.keystore</value> </systemProperty> <systemProperty> <name>atlas.home</name> http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/Main.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/Main.java b/webapp/src/main/java/org/apache/atlas/Main.java index 3545794..ab2cb43 100755 --- a/webapp/src/main/java/org/apache/atlas/Main.java +++ b/webapp/src/main/java/org/apache/atlas/Main.java @@ -64,8 +64,7 @@ public final class Main { public static void main(String[] args) throws Exception { CommandLine cmd = parseArgs(args); - PropertiesConfiguration buildConfiguration = - new PropertiesConfiguration("atlas-buildinfo.properties"); + PropertiesConfiguration buildConfiguration = new PropertiesConfiguration("atlas-buildinfo.properties"); String appPath = "webapp/target/atlas-webapp-" + getProjectVersion(buildConfiguration); if (cmd.hasOption(APP_PATH)) { @@ -103,21 +102,18 @@ public final class Main { appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT)); } else { // default : atlas.enableTLS is true - appPort = StringUtils.isEmpty(enableTLSFlag) - || enableTLSFlag.equals("true") ? 21443 : 21000; + appPort = StringUtils.isEmpty(enableTLSFlag) || enableTLSFlag.equals("true") ? 21443 : 21000; } return appPort; } private static boolean isTLSEnabled(String enableTLSFlag, int appPort) { - return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag) - ? System.getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false") - : enableTLSFlag); + return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag) ? + System.getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false") : enableTLSFlag); } - private static void showStartupInfo(PropertiesConfiguration buildConfiguration, - boolean enableTLS, int appPort) { + private static void showStartupInfo(PropertiesConfiguration buildConfiguration, boolean enableTLS, int appPort) { StringBuilder buffer = new StringBuilder(); buffer.append("\n############################################"); buffer.append("############################################"); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java index 88b14c9..3da7b04 100755 --- a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java +++ b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java @@ -79,10 +79,9 @@ public class QuickStart { private static final String LOAD_PROCESS_TYPE = "LoadProcess"; private static final String STORAGE_DESC_TYPE = "StorageDesc"; - private static final String[] TYPES = { - DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE, - "JdbcAccess", "ETL", "Metric", "PII", "Fact", "Dimension" - }; + private static final String[] TYPES = + {DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE, "JdbcAccess", + "ETL", "Metric", "PII", "Fact", "Dimension"}; private final AtlasClient metadataServiceClient; @@ -102,97 +101,62 @@ public class QuickStart { } TypesDef createTypeDefinitions() throws Exception { - HierarchicalTypeDefinition<ClassType> dbClsDef - = TypesUtil.createClassTypeDef(DATABASE_TYPE, null, - attrDef("name", DataTypes.STRING_TYPE), - attrDef("description", DataTypes.STRING_TYPE), - attrDef("locationUri", DataTypes.STRING_TYPE), - attrDef("owner", DataTypes.STRING_TYPE), - attrDef("createTime", DataTypes.INT_TYPE) - ); - - HierarchicalTypeDefinition<ClassType> storageDescClsDef = - TypesUtil.createClassTypeDef(STORAGE_DESC_TYPE, null, - attrDef("location", DataTypes.STRING_TYPE), - attrDef("inputFormat", DataTypes.STRING_TYPE), - attrDef("outputFormat", DataTypes.STRING_TYPE), - attrDef("compressed", DataTypes.STRING_TYPE, - Multiplicity.REQUIRED, false, null) - ); - - HierarchicalTypeDefinition<ClassType> columnClsDef = - TypesUtil.createClassTypeDef(COLUMN_TYPE, null, - attrDef("name", DataTypes.STRING_TYPE), - attrDef("dataType", DataTypes.STRING_TYPE), - attrDef("comment", DataTypes.STRING_TYPE) - ); - - HierarchicalTypeDefinition<ClassType> tblClsDef = - TypesUtil.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"), - new AttributeDefinition("db", DATABASE_TYPE, - Multiplicity.REQUIRED, false, null), - new AttributeDefinition("sd", STORAGE_DESC_TYPE, - Multiplicity.REQUIRED, true, null), - attrDef("owner", DataTypes.STRING_TYPE), - attrDef("createTime", DataTypes.INT_TYPE), - attrDef("lastAccessTime", DataTypes.INT_TYPE), - attrDef("retention", DataTypes.INT_TYPE), + HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil + .createClassTypeDef(DATABASE_TYPE, null, attrDef("name", DataTypes.STRING_TYPE), + attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE), + attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE)); + + HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil + .createClassTypeDef(STORAGE_DESC_TYPE, null, attrDef("location", DataTypes.STRING_TYPE), + attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE), + attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null)); + + HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil + .createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE), + attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE)); + + HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil + .createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"), + new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null), + new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true, null), + attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE), + attrDef("lastAccessTime", DataTypes.INT_TYPE), attrDef("retention", DataTypes.INT_TYPE), attrDef("viewOriginalText", DataTypes.STRING_TYPE), - attrDef("viewExpandedText", DataTypes.STRING_TYPE), - attrDef("tableType", DataTypes.STRING_TYPE), + attrDef("viewExpandedText", DataTypes.STRING_TYPE), attrDef("tableType", DataTypes.STRING_TYPE), attrDef("temporary", DataTypes.BOOLEAN_TYPE), - new AttributeDefinition("columns", - DataTypes.arrayTypeName(COLUMN_TYPE), - Multiplicity.COLLECTION, true, null) - ); - - HierarchicalTypeDefinition<ClassType> loadProcessClsDef = - TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"), - attrDef("userName", DataTypes.STRING_TYPE), - attrDef("startTime", DataTypes.INT_TYPE), + new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE), + Multiplicity.COLLECTION, true, null)); + + HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil + .createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"), + attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.INT_TYPE), attrDef("endTime", DataTypes.INT_TYPE), attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), - attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED) - ); - - HierarchicalTypeDefinition<ClassType> viewClsDef = - TypesUtil.createClassTypeDef(VIEW_TYPE, null, - attrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("db", DATABASE_TYPE, - Multiplicity.REQUIRED, false, null), - new AttributeDefinition("inputTables", - DataTypes.arrayTypeName(TABLE_TYPE), - Multiplicity.COLLECTION, false, null) - ); - - HierarchicalTypeDefinition<TraitType> dimTraitDef = - TypesUtil.createTraitTypeDef("Dimension", null); - - HierarchicalTypeDefinition<TraitType> factTraitDef = - TypesUtil.createTraitTypeDef("Fact", null); - - HierarchicalTypeDefinition<TraitType> piiTraitDef = - TypesUtil.createTraitTypeDef("PII", null); - - HierarchicalTypeDefinition<TraitType> metricTraitDef = - TypesUtil.createTraitTypeDef("Metric", null); - - HierarchicalTypeDefinition<TraitType> etlTraitDef = - TypesUtil.createTraitTypeDef("ETL", null); - - HierarchicalTypeDefinition<TraitType> jdbcTraitDef = - TypesUtil.createTraitTypeDef("JdbcAccess", null); - - return TypeUtils.getTypesDef( - ImmutableList.<EnumTypeDefinition>of(), - ImmutableList.<StructTypeDefinition>of(), - ImmutableList.of(dimTraitDef, factTraitDef, - piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef), - ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, - tblClsDef, loadProcessClsDef, viewClsDef) - ); + attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED)); + + HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil + .createClassTypeDef(VIEW_TYPE, null, attrDef("name", DataTypes.STRING_TYPE), + new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null), + new AttributeDefinition("inputTables", DataTypes.arrayTypeName(TABLE_TYPE), + Multiplicity.COLLECTION, false, null)); + + HierarchicalTypeDefinition<TraitType> dimTraitDef = TypesUtil.createTraitTypeDef("Dimension", null); + + HierarchicalTypeDefinition<TraitType> factTraitDef = TypesUtil.createTraitTypeDef("Fact", null); + + HierarchicalTypeDefinition<TraitType> piiTraitDef = TypesUtil.createTraitTypeDef("PII", null); + + HierarchicalTypeDefinition<TraitType> metricTraitDef = TypesUtil.createTraitTypeDef("Metric", null); + + HierarchicalTypeDefinition<TraitType> etlTraitDef = TypesUtil.createTraitTypeDef("ETL", null); + + HierarchicalTypeDefinition<TraitType> jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess", null); + + return TypeUtils.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), + ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef), + ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef)); } AttributeDefinition attrDef(String name, IDataType dT) { @@ -203,86 +167,73 @@ public class QuickStart { return attrDef(name, dT, m, false, null); } - AttributeDefinition attrDef(String name, IDataType dT, - Multiplicity m, boolean isComposite, String reverseAttributeName) { + AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite, + String reverseAttributeName) { Preconditions.checkNotNull(name); Preconditions.checkNotNull(dT); return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName); } void createEntities() throws Exception { - Id salesDB = database( - "Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); + Id salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); - Referenceable sd = rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", - "TextInputFormat", "TextOutputFormat", true); + Referenceable sd = + rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", + true); - List<Referenceable> salesFactColumns = ImmutableList.of( - rawColumn("time_id", "int", "time id"), - rawColumn("product_id", "int", "product id"), - rawColumn("customer_id", "int", "customer id", "PII"), - rawColumn("sales", "double", "product id", "Metric") - ); + List<Referenceable> salesFactColumns = ImmutableList + .of(rawColumn("time_id", "int", "time id"), rawColumn("product_id", "int", "product id"), + rawColumn("customer_id", "int", "customer id", "PII"), + rawColumn("sales", "double", "product id", "Metric")); - Id salesFact = table("sales_fact", "sales fact table", - salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact"); + Id salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact"); - List<Referenceable> productDimColumns = ImmutableList.of( - rawColumn("product_id", "int", "product id"), - rawColumn("product_name", "string", "product name"), - rawColumn("brand_name", "int", "brand name") - ); + List<Referenceable> productDimColumns = ImmutableList + .of(rawColumn("product_id", "int", "product id"), rawColumn("product_name", "string", "product name"), + rawColumn("brand_name", "int", "brand name")); - Id productDim = table("product_dim", "product dimension table", - salesDB, sd, "John Doe", "Managed", productDimColumns, "Dimension"); + Id productDim = + table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns, + "Dimension"); - List<Referenceable> timeDimColumns = ImmutableList.of( - rawColumn("time_id", "int", "time id"), - rawColumn("dayOfYear", "int", "day Of Year"), - rawColumn("weekDay", "int", "week Day") - ); + List<Referenceable> timeDimColumns = ImmutableList + .of(rawColumn("time_id", "int", "time id"), rawColumn("dayOfYear", "int", "day Of Year"), + rawColumn("weekDay", "int", "week Day")); - Id timeDim = table("time_dim", "time dimension table", - salesDB, sd, "John Doe", "External", timeDimColumns, "Dimension"); + Id timeDim = table("time_dim", "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns, + "Dimension"); - List<Referenceable> customerDimColumns = ImmutableList.of( - rawColumn("customer_id", "int", "customer id", "PII"), - rawColumn("name", "string", "customer name", "PII"), - rawColumn("address", "string", "customer address", "PII") - ); + List<Referenceable> customerDimColumns = ImmutableList.of(rawColumn("customer_id", "int", "customer id", "PII"), + rawColumn("name", "string", "customer name", "PII"), + rawColumn("address", "string", "customer address", "PII")); - Id customerDim = table("customer_dim", "customer dimension table", - salesDB, sd, "fetl", "External", customerDimColumns, "Dimension"); + Id customerDim = + table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns, + "Dimension"); - Id reportingDB = database("Reporting", "reporting database", "Jane BI", - "hdfs://host:8000/apps/warehouse/reporting"); + Id reportingDB = + database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting"); - Id salesFactDaily = table("sales_fact_daily_mv", - "sales fact daily materialized view", reportingDB, sd, - "Joe BI", "Managed", salesFactColumns, "Metric"); + Id salesFactDaily = + table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed", + salesFactColumns, "Metric"); - loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", - ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily), - "create table as select ", "plan", "id", "graph", - "ETL"); + loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", ImmutableList.of(salesFact, timeDim), + ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL"); - view("product_dim_view", reportingDB, - ImmutableList.of(productDim), "Dimension", "JdbcAccess"); + view("product_dim_view", reportingDB, ImmutableList.of(productDim), "Dimension", "JdbcAccess"); - view("customer_dim_view", reportingDB, - ImmutableList.of(customerDim), "Dimension", "JdbcAccess"); + view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension", "JdbcAccess"); - Id salesFactMonthly = table("sales_fact_monthly_mv", - "sales fact monthly materialized view", - reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric"); + Id salesFactMonthly = + table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI", + "Managed", salesFactColumns, "Metric"); - loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", - ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly), - "create table as select ", "plan", "id", "graph", - "ETL"); + loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily), + ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL"); } private Id createInstance(Referenceable referenceable) throws Exception { @@ -298,9 +249,8 @@ public class QuickStart { return new Id(guid, referenceable.getId().getVersion(), referenceable.getTypeName()); } - Id database(String name, String description, - String owner, String locationUri, - String... traitNames) throws Exception { + Id database(String name, String description, String owner, String locationUri, String... traitNames) + throws Exception { Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames); referenceable.set("name", name); referenceable.set("description", description); @@ -311,9 +261,8 @@ public class QuickStart { return createInstance(referenceable); } - Referenceable rawStorageDescriptor(String location, String inputFormat, - String outputFormat, - boolean compressed) throws Exception { + Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed) + throws Exception { Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE); referenceable.set("location", location); referenceable.set("inputFormat", inputFormat); @@ -323,8 +272,7 @@ public class QuickStart { return referenceable; } - Referenceable rawColumn(String name, String dataType, String comment, - String... traitNames) throws Exception { + Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws Exception { Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames); referenceable.set("name", name); referenceable.set("dataType", dataType); @@ -333,11 +281,8 @@ public class QuickStart { return referenceable; } - Id table(String name, String description, - Id dbId, Referenceable sd, - String owner, String tableType, - List<Referenceable> columns, - String... traitNames) throws Exception { + Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType, + List<Referenceable> columns, String... traitNames) throws Exception { Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames); referenceable.set("name", name); referenceable.set("description", description); @@ -353,12 +298,9 @@ public class QuickStart { return createInstance(referenceable); } - Id loadProcess(String name, String description, String user, - List<Id> inputTables, - List<Id> outputTables, - String queryText, String queryPlan, - String queryId, String queryGraph, - String... traitNames) throws Exception { + Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables, + String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames) + throws Exception { Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames); // super type attributes referenceable.set("name", name); @@ -378,9 +320,7 @@ public class QuickStart { return createInstance(referenceable); } - Id view(String name, Id dbId, - List<Id> inputTables, - String... traitNames) throws Exception { + Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception { Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames); referenceable.set("name", name); referenceable.set("db", dbId); @@ -398,69 +338,51 @@ public class QuickStart { } private String[] getDSLQueries() { - return new String[]{ - "from DB", - "DB", - "DB where name=\"Reporting\"", - "DB where DB.name=\"Reporting\"", - "DB name = \"Reporting\"", - "DB DB.name = \"Reporting\"", - "DB where name=\"Reporting\" select name, owner", - "DB where DB.name=\"Reporting\" select name, owner", - "DB has name", - "DB where DB has name", - "DB, Table", - "DB is JdbcAccess", + return new String[]{"from DB", "DB", "DB where name=\"Reporting\"", "DB where DB.name=\"Reporting\"", + "DB name = \"Reporting\"", "DB DB.name = \"Reporting\"", + "DB where name=\"Reporting\" select name, owner", "DB where DB.name=\"Reporting\" select name, owner", + "DB has name", "DB where DB has name", "DB, Table", "DB is JdbcAccess", /* "DB, hive_process has name", "DB as db1, Table where db1.name = \"Reporting\"", "DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()}, */ - "from Table", - "Table", - "Table is Dimension", - "Column where Column isa PII", - "View is Dimension", + "from Table", "Table", "Table is Dimension", "Column where Column isa PII", "View is Dimension", /*"Column where Column isa PII select Column.name",*/ - "Column select Column.name", - "Column select name", - "Column where Column.name=\"customer_id\"", - "from Table select Table.name", - "DB where (name = \"Reporting\")", - "DB where (name = \"Reporting\") select name as _col_0, owner as _col_1", - "DB where DB is JdbcAccess", - "DB where DB has name", - "DB Table", - "DB where DB has name", - "DB as db1 Table where (db1.name = \"Reporting\")", - "DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", + "Column select Column.name", "Column select name", "Column where Column.name=\"customer_id\"", + "from Table select Table.name", "DB where (name = \"Reporting\")", + "DB where (name = \"Reporting\") select name as _col_0, owner as _col_1", "DB where DB is JdbcAccess", + "DB where DB has name", "DB Table", "DB where DB has name", + "DB as db1 Table where (db1.name = \"Reporting\")", + "DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", /* todo: does not work "DB where (name = \"Reporting\") and ((createTime + 1) > 0)", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName", - "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name + as dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as + dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner + select db1.name as dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner + select db1.name as dbName, tab.name as tabName", */ - // trait searches - "Dimension", + // trait searches + "Dimension", /*"Fact", - todo: does not work*/ - "JdbcAccess", - "ETL", - "Metric", - "PII", + "JdbcAccess", "ETL", "Metric", "PII", /* // Lineage - todo - fix this, its not working "Table hive_process outputTables", "Table loop (hive_process outputTables)", "Table as _loop0 loop (hive_process outputTables) withPath", - "Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as destTable withPath", + "Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as + destTable withPath", */ - "Table where name=\"sales_fact\", columns", - "Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment", - "from DataSet", - "from Process", - }; + "Table where name=\"sales_fact\", columns", + "Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column" + + ".comment", + "from DataSet", "from Process",}; } private void search() throws Exception { http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java b/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java index 1a512a1..b714888 100755 --- a/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java +++ b/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java @@ -32,19 +32,20 @@ import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_ import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY; /** - * A utility class for generating a credential provider containing the entries required for supporting the SSL implementation + * A utility class for generating a credential provider containing the entries required for supporting the SSL + * implementation * of the DGC server. */ public class CredentialProviderUtility { - private static final String[] KEYS = new String[] {KEYSTORE_PASSWORD_KEY, - TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY}; + private static final String[] KEYS = + new String[]{KEYSTORE_PASSWORD_KEY, TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY}; public static abstract class TextDevice { public abstract void printf(String fmt, Object... params); - public abstract String readLine(String fmt, Object ... args); + public abstract String readLine(String fmt, Object... args); - public abstract char[] readPassword(String fmt, Object ... args); + public abstract char[] readPassword(String fmt, Object... args); } @@ -57,12 +58,12 @@ public class CredentialProviderUtility { } @Override - public String readLine(String fmt, Object ... args) { + public String readLine(String fmt, Object... args) { return console.readLine(fmt, args); } @Override - public char[] readPassword(String fmt, Object ... args) { + public char[] readPassword(String fmt, Object... args) { return console.readPassword(fmt, args); } }; @@ -101,7 +102,7 @@ public class CredentialProviderUtility { * Retrieves a password from the command line. * @param textDevice the system console. * @param key the password key/alias. - * @return the password. + * @return the password. */ private static char[] getPassword(TextDevice textDevice, String key) { boolean noMatch; @@ -113,7 +114,9 @@ public class CredentialProviderUtility { passwd2 = textDevice.readPassword("Please enter the password value for %s again:", key); noMatch = !Arrays.equals(passwd1, passwd2); if (noMatch) { - if (passwd1 != null) Arrays.fill(passwd1, ' '); + if (passwd1 != null) { + Arrays.fill(passwd1, ' '); + } textDevice.printf("Password entries don't match. Please try again.\n"); } else { if (passwd1.length == 0) { @@ -123,7 +126,9 @@ public class CredentialProviderUtility { cred = passwd1; } } - if (passwd2 != null) Arrays.fill(passwd2, ' '); + if (passwd2 != null) { + Arrays.fill(passwd2, ' '); + } } while (noMatch); return cred; } @@ -131,15 +136,17 @@ public class CredentialProviderUtility { /**\ * Returns a credential provider for the entered JKS path. * @param textDevice the system console. - * @return the Credential provider + * @return the Credential provider * @throws IOException */ private static CredentialProvider getCredentialProvider(TextDevice textDevice) throws IOException { String providerPath = textDevice.readLine("Please enter the full path to the credential provider:"); File file = new File(providerPath); if (file.exists()) { - textDevice.printf("%s already exists. You will need to specify whether existing entries should be overwritten " + - "(default is 'yes')\n", providerPath); + textDevice + .printf("%s already exists. You will need to specify whether existing entries should be " + + "overwritten " + + "(default is 'yes')\n", providerPath); } String providerURI = JavaKeyStoreProvider.SCHEME_NAME + "://file" + providerPath; Configuration conf = new Configuration(false); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/errors/LoggingExceptionMapper.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/errors/LoggingExceptionMapper.java b/webapp/src/main/java/org/apache/atlas/web/errors/LoggingExceptionMapper.java index bdf84a6..9830269 100755 --- a/webapp/src/main/java/org/apache/atlas/web/errors/LoggingExceptionMapper.java +++ b/webapp/src/main/java/org/apache/atlas/web/errors/LoggingExceptionMapper.java @@ -41,15 +41,12 @@ public class LoggingExceptionMapper<E extends Throwable> implements ExceptionMap final long id = ThreadLocalRandom.current().nextLong(); logException(id, exception); - return Response.serverError() - .entity(formatErrorMessage(id, exception)) - .build(); + return Response.serverError().entity(formatErrorMessage(id, exception)).build(); } @SuppressWarnings("UnusedParameters") protected String formatErrorMessage(long id, E exception) { - return String.format( - "There was an error processing your request. It has been logged (ID %016x).", id); + return String.format("There was an error processing your request. It has been logged (ID %016x).", id); } protected void logException(long id, E exception) { http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java index 7187ba4..703f4ee 100755 --- a/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java +++ b/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java @@ -53,9 +53,8 @@ public class AuditFilter implements Filter { } @Override - public void doFilter(ServletRequest request, - ServletResponse response, - FilterChain filterChain) throws IOException, ServletException { + public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) + throws IOException, ServletException { final String requestTimeISO9601 = DateTimeHelper.formatDateUTC(new Date()); final HttpServletRequest httpRequest = (HttpServletRequest) request; final String requestId = UUID.randomUUID().toString(); @@ -85,8 +84,8 @@ public class AuditFilter implements Filter { final String whatURL = Servlets.getRequestURL(httpRequest); final String whatAddrs = httpRequest.getLocalAddr(); - LOG.debug("Audit: {}/{} performed request {} {} ({}) at time {}", - who, fromAddress, whatRequest, whatURL, whatAddrs, whenISO9601); + LOG.debug("Audit: {}/{} performed request {} {} ({}) at time {}", who, fromAddress, whatRequest, whatURL, + whatAddrs, whenISO9601); audit(who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601); } @@ -96,10 +95,10 @@ public class AuditFilter implements Filter { return userFromRequest == null ? "UNKNOWN" : userFromRequest; } - private void audit(String who, String fromAddress, String fromHost, String whatURL, - String whatAddrs, String whenISO9601) { - AUDIT_LOG.info("Audit: {}/{}-{} performed request {} ({}) at time {}", - who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601); + private void audit(String who, String fromAddress, String fromHost, String whatURL, String whatAddrs, + String whenISO9601) { + AUDIT_LOG.info("Audit: {}/{}-{} performed request {} ({}) at time {}", who, fromAddress, fromHost, whatURL, + whatAddrs, whenISO9601); } @Override http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java b/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java index 926287b..18ce17c 100755 --- a/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java +++ b/webapp/src/main/java/org/apache/atlas/web/listeners/GuiceServletConfig.java @@ -24,12 +24,12 @@ import com.google.inject.servlet.GuiceServletContextListener; import com.sun.jersey.api.core.PackagesResourceConfig; import com.sun.jersey.guice.JerseyServletModule; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; -import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasClient; +import org.apache.atlas.AtlasException; import org.apache.atlas.PropertiesUtil; import org.apache.atlas.RepositoryMetadataModule; -import org.apache.atlas.web.filters.AuditFilter; import org.apache.atlas.web.filters.AtlasAuthenticationFilter; +import org.apache.atlas.web.filters.AuditFilter; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.slf4j.Logger; @@ -57,9 +57,7 @@ public class GuiceServletConfig extends GuiceServletContextListener { * .html */ if (injector == null) { - injector = Guice.createInjector( - new RepositoryMetadataModule(), - new JerseyServletModule() { + injector = Guice.createInjector(new RepositoryMetadataModule(), new JerseyServletModule() { @Override protected void configureServlets() { filter("/*").through(AuditFilter.class); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/listeners/LoginProcessor.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/listeners/LoginProcessor.java b/webapp/src/main/java/org/apache/atlas/web/listeners/LoginProcessor.java index 3c437b0..5f180f5 100644 --- a/webapp/src/main/java/org/apache/atlas/web/listeners/LoginProcessor.java +++ b/webapp/src/main/java/org/apache/atlas/web/listeners/LoginProcessor.java @@ -35,10 +35,9 @@ import java.net.UnknownHostException; /** * A class capable of performing a simple or kerberos login. */ -public class LoginProcessor { +public class LoginProcessor { - private static final Logger LOG = LoggerFactory - .getLogger(LoginProcessor.class); + private static final Logger LOG = LoggerFactory.getLogger(LoginProcessor.class); public static final String METADATA_AUTHENTICATION_PREFIX = "atlas.authentication."; public static final String AUTHENTICATION_METHOD = METADATA_AUTHENTICATION_PREFIX + "method"; public static final String AUTHENTICATION_PRINCIPAL = METADATA_AUTHENTICATION_PREFIX + "principal"; @@ -107,9 +106,9 @@ public class LoginProcessor { LOG.info("No authentication method configured. Defaulting to simple authentication"); authMethod = "simple"; } - SecurityUtil.setAuthenticationMethod( - UserGroupInformation.AuthenticationMethod.valueOf(authMethod.toUpperCase()), - hadoopConfig); + SecurityUtil + .setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.valueOf(authMethod.toUpperCase()), + hadoopConfig); } /** @@ -146,7 +145,7 @@ public class LoginProcessor { /** * Uses a hadoop shell to discern whether a hadoop cluster is available/configured. - * @return true if a hadoop cluster is detected. + * @return true if a hadoop cluster is detected. */ protected boolean isHadoopCluster() { boolean isHadoopCluster = false; http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java b/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java index f5b555e..9cfdb76 100755 --- a/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java +++ b/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java @@ -35,8 +35,7 @@ public abstract class AbstractParam<T> { * * @param input an input value from a client request */ - @SuppressWarnings({"AbstractMethodCallInConstructor", - "OverriddenMethodCallDuringObjectConstruction"}) + @SuppressWarnings({"AbstractMethodCallInConstructor", "OverriddenMethodCallDuringObjectConstruction"}) protected AbstractParam(String input) { try { this.value = parse(input); @@ -57,10 +56,7 @@ public abstract class AbstractParam<T> { * @return the {@link javax.ws.rs.core.Response} to be sent to the client */ protected Response error(String input, Exception e) { - return Response.status(getErrorStatus()) - .entity(errorMessage(input, e)) - .type(mediaType()) - .build(); + return Response.status(getErrorStatus()).entity(errorMessage(input, e)).type(mediaType()).build(); } /** http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java index 4ad27c8..38a4938 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/AdminResource.java @@ -80,13 +80,11 @@ public class AdminResource { public Response getVersion() { if (version == null) { try { - PropertiesConfiguration configProperties = - new PropertiesConfiguration("atlas-buildinfo.properties"); + PropertiesConfiguration configProperties = new PropertiesConfiguration("atlas-buildinfo.properties"); JSONObject response = new JSONObject(); response.put("Version", configProperties.getString("build.version", "UNKNOWN")); - response.put("Name", - configProperties.getString("project.name", "apache-atlas")); + response.put("Name", configProperties.getString("project.name", "apache-atlas")); response.put("Description", configProperties.getString("project.description", "Metadata Management and Data Governance Platform over Hadoop")); @@ -94,8 +92,7 @@ public class AdminResource { // response.put("Hadoop", VersionInfo.getVersion() + "-r" + VersionInfo.getRevision()); version = Response.ok(response).build(); } catch (JSONException | ConfigurationException e) { - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java index 2058c67..dcb460d 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java @@ -19,8 +19,8 @@ package org.apache.atlas.web.resources; import com.google.common.base.Preconditions; -import org.apache.atlas.AtlasException; import org.apache.atlas.AtlasClient; +import org.apache.atlas.AtlasException; import org.apache.atlas.ParamChecker; import org.apache.atlas.TypeNotFoundException; import org.apache.atlas.repository.EntityNotFoundException; @@ -101,23 +101,19 @@ public class EntityResource { JSONObject response = new JSONObject(); response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId()); response.put(AtlasClient.GUID, guid); - response.put(AtlasClient.DEFINITION, - metadataService.getEntityDefinition(guid)); + response.put(AtlasClient.DEFINITION, metadataService.getEntityDefinition(guid)); return Response.created(locationURI).entity(response).build(); - } catch(ValueConversionException ve) { + } catch (ValueConversionException ve) { LOG.error("Unable to persist entity instance due to a desrialization error ", ve); - throw new WebApplicationException( - Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Unable to persist entity instance", e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to persist entity instance", e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -144,24 +140,21 @@ public class EntityResource { response.put(AtlasClient.DEFINITION, entityDefinition); status = Response.Status.OK; } else { - response.put(AtlasClient.ERROR, Servlets.escapeJsonString( - String.format("An entity with GUID={%s} does not exist", guid))); + response.put(AtlasClient.ERROR, + Servlets.escapeJsonString(String.format("An entity with GUID={%s} does not exist", guid))); } return Response.status(status).entity(response).build(); } catch (EntityNotFoundException e) { LOG.error("An entity with GUID={} does not exist", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Bad GUID={}", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to get instance definition for GUID {}", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -188,16 +181,13 @@ public class EntityResource { return Response.ok(response).build(); } catch (NullPointerException e) { LOG.error("Entity type cannot be null", e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Unable to get entity list for type {}", entityType, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to get entity list for type {}", entityType, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -212,9 +202,8 @@ public class EntityResource { @Path("{guid}") @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) - public Response update(@PathParam("guid") String guid, - @QueryParam("property") String property, - @QueryParam("value") String value) { + public Response update(@PathParam("guid") String guid, @QueryParam("property") String property, + @QueryParam("value") String value) { try { Preconditions.checkNotNull(property, "Entity property cannot be null"); Preconditions.checkNotNull(value, "Entity value cannot be null"); @@ -226,20 +215,18 @@ public class EntityResource { return Response.ok(response).build(); } catch (EntityNotFoundException e) { LOG.error("An entity with GUID={} does not exist", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Unable to add property {} to entity id {}", property, guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to add property {} to entity id {}", property, guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } // Trait management functions + /** * Gets the list of trait names for a given entity represented by a guid. * @@ -263,16 +250,13 @@ public class EntityResource { return Response.ok(response).build(); } catch (EntityNotFoundException e) { LOG.error("An entity with GUID={} does not exist", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Unable to get trait names for entity {}", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to get trait names for entity {}", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -285,8 +269,7 @@ public class EntityResource { @Path("{guid}/traits") @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) - public Response addTrait(@Context HttpServletRequest request, - @PathParam("guid") String guid) { + public Response addTrait(@Context HttpServletRequest request, @PathParam("guid") String guid) { try { final String traitDefinition = Servlets.getRequestPayload(request); LOG.debug("Adding trait={} for entity={} ", traitDefinition, guid); @@ -302,16 +285,13 @@ public class EntityResource { return Response.created(locationURI).entity(response).build(); } catch (EntityNotFoundException | TypeNotFoundException e) { LOG.error("An entity with GUID={} does not exist", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Unable to add trait for entity={}", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to add trait for entity={}", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -325,9 +305,8 @@ public class EntityResource { @Path("{guid}/traits/{traitName}") @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) - public Response deleteTrait(@Context HttpServletRequest request, - @PathParam("guid") String guid, - @PathParam(TRAIT_NAME) String traitName) { + public Response deleteTrait(@Context HttpServletRequest request, @PathParam("guid") String guid, + @PathParam(TRAIT_NAME) String traitName) { LOG.debug("Deleting trait={} from entity={} ", traitName, guid); try { metadataService.deleteTrait(guid, traitName); @@ -340,16 +319,13 @@ public class EntityResource { return Response.ok(response).build(); } catch (EntityNotFoundException | TypeNotFoundException e) { LOG.error("An entity with GUID={} does not exist", guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (AtlasException | IllegalArgumentException e) { LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to delete trait name={} for entity={}", traitName, guid, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/resources/HiveLineageResource.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/HiveLineageResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/HiveLineageResource.java index ad6b508..7bcaf6b 100644 --- a/webapp/src/main/java/org/apache/atlas/web/resources/HiveLineageResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/HiveLineageResource.java @@ -71,8 +71,7 @@ public class HiveLineageResource { @Path("table/{tableName}/inputs/graph") @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) - public Response inputsGraph(@Context HttpServletRequest request, - @PathParam("tableName") String tableName) { + public Response inputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) { LOG.info("Fetching lineage inputs graph for tableName={}", tableName); try { @@ -87,16 +86,13 @@ public class HiveLineageResource { return Response.ok(response).build(); } catch (EntityNotFoundException e) { LOG.error("table entity not found for {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get lineage inputs graph for table {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to get lineage inputs graph for table {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -109,8 +105,7 @@ public class HiveLineageResource { @Path("table/{tableName}/outputs/graph") @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) - public Response outputsGraph(@Context HttpServletRequest request, - @PathParam("tableName") String tableName) { + public Response outputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) { LOG.info("Fetching lineage outputs graph for tableName={}", tableName); try { @@ -125,16 +120,13 @@ public class HiveLineageResource { return Response.ok(response).build(); } catch (EntityNotFoundException e) { LOG.error("table entity not found for {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get lineage outputs graph for table {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to get lineage outputs graph for table {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -147,8 +139,7 @@ public class HiveLineageResource { @Path("table/{tableName}/schema") @Consumes(Servlets.JSON_MEDIA_TYPE) @Produces(Servlets.JSON_MEDIA_TYPE) - public Response schema(@Context HttpServletRequest request, - @PathParam("tableName") String tableName) { + public Response schema(@Context HttpServletRequest request, @PathParam("tableName") String tableName) { LOG.info("Fetching schema for tableName={}", tableName); try { @@ -163,16 +154,13 @@ public class HiveLineageResource { return Response.ok(response).build(); } catch (EntityNotFoundException e) { LOG.error("table entity not found for {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND)); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get schema for table {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (Throwable e) { LOG.error("Unable to get schema for table {}", tableName, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java index b76eb7f..77ac3df 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java @@ -85,11 +85,9 @@ public class MetadataDiscoveryResource { if (query.startsWith("g.")) { // raw gremlin query return searchUsingGremlinQuery(query); } - + final String jsonResultStr = discoveryService.searchByDSL(query); - response = new DSLJSONResponseBuilder().results(jsonResultStr) - .query(query) - .build(); + response = new DSLJSONResponseBuilder().results(jsonResultStr).query(query).build(); } catch (IllegalArgumentException e) { LOG.error("Unable to get entity list for empty query", e); @@ -99,22 +97,18 @@ public class MetadataDiscoveryResource { try { //fall back to full-text final String jsonResultStr = discoveryService.searchByFullText(query); - response = new FullTextJSonResponseBuilder().results(jsonResultStr) - .query(query) - .build(); + response = new FullTextJSonResponseBuilder().results(jsonResultStr).query(query).build(); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get entity list for query {}", query, e); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); - } catch(Throwable e) { + } catch (Throwable e) { LOG.error("Unable to get entity list for query {}", query, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } - return Response.ok(response) - .build(); + return Response.ok(response).build(); } @@ -133,20 +127,15 @@ public class MetadataDiscoveryResource { ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null"); final String jsonResultStr = discoveryService.searchByDSL(dslQuery); - JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr) - .query(dslQuery) - .build(); + JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr).query(dslQuery).build(); - return Response.ok(response) - .build(); + return Response.ok(response).build(); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); - } catch(Throwable e) { + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + } catch (Throwable e) { LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -163,8 +152,7 @@ public class MetadataDiscoveryResource { public Response searchUsingGremlinQuery(@QueryParam("query") String gremlinQuery) { try { ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty"); - final List<Map<String, String>> results = discoveryService - .searchByGremlin(gremlinQuery); + final List<Map<String, String>> results = discoveryService.searchByGremlin(gremlinQuery); JSONObject response = new JSONObject(); response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId()); @@ -178,16 +166,13 @@ public class MetadataDiscoveryResource { response.put(AtlasClient.RESULTS, list); response.put(AtlasClient.COUNT, list.length()); - return Response.ok(response) - .build(); + return Response.ok(response).build(); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); - } catch(Throwable e) { + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); + } catch (Throwable e) { LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e); - throw new WebApplicationException( - Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); + throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } } @@ -207,15 +192,12 @@ public class MetadataDiscoveryResource { final String jsonResultStr = discoveryService.searchByFullText(query); JSONArray rowsJsonArr = new JSONArray(jsonResultStr); - JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr) - .query(query) - .build(); - return Response.ok(response) - .build(); + JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr).query(query).build(); + return Response.ok(response).build(); } catch (DiscoveryException | IllegalArgumentException e) { LOG.error("Unable to get entity list for query {}", query, e); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); - } catch(Throwable e) { + } catch (Throwable e) { LOG.error("Unable to get entity list for query {}", query, e); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); }
