Repository: incubator-atlas Updated Branches: refs/heads/master b7f5995aa -> e15629c2e
ATLAS-1033: fix for issues flagged by Coverity scan Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/e15629c2 Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/e15629c2 Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/e15629c2 Branch: refs/heads/master Commit: e15629c2eb10265676c8fae6b1444a07c005e1aa Parents: b7f5995 Author: Madhan Neethiraj <[email protected]> Authored: Tue Jul 19 17:32:12 2016 -0700 Committer: Suma Shivaprasad <[email protected]> Committed: Wed Jul 20 10:39:41 2016 -0700 ---------------------------------------------------------------------- .../atlas/falcon/bridge/FalconBridge.java | 31 +++++--- .../org/apache/atlas/hive/hook/HiveHook.java | 80 ++++++++++++-------- .../atlas/authorize/simple/PolicyParser.java | 13 +--- .../authorize/simple/SimpleAtlasAuthorizer.java | 11 ++- .../main/java/org/apache/atlas/AtlasClient.java | 18 ++++- .../org/apache/atlas/AtlasServiceException.java | 4 +- .../org/apache/atlas/utils/ParamChecker.java | 14 ---- .../apache/atlas/hook/FailedMessagesLogger.java | 16 ++-- release-log.txt | 1 + .../atlas/discovery/DataSetLineageService.java | 12 +-- .../graph/GraphBackedDiscoveryService.java | 5 ++ .../atlas/repository/graph/DeleteHandler.java | 43 ++++++----- .../graph/GraphBackedMetadataRepository.java | 10 ++- .../atlas/repository/graph/GraphHelper.java | 24 ++++-- .../graph/TypedInstanceToGraphMapper.java | 26 ++++--- .../typestore/GraphBackedTypeStore.java | 16 ++-- .../atlas/services/DefaultMetadataService.java | 63 ++++++++------- .../hbase/HBaseKeyColumnValueStore.java | 7 +- .../apache/atlas/typesystem/persistence/Id.java | 6 +- .../typesystem/types/AbstractDataType.java | 12 ++- .../typesystem/types/HierarchicalType.java | 25 +++--- .../typesystem/types/ObjectGraphTraversal.java | 4 +- .../typesystem/types/ObjectGraphWalker.java | 4 +- .../src/main/java/org/apache/atlas/Atlas.java | 7 +- .../org/apache/atlas/examples/QuickStart.java | 2 +- .../atlas/util/CredentialProviderUtility.java | 61 ++++++++------- .../java/org/apache/atlas/web/dao/UserDao.java | 14 +++- .../atlas/web/resources/EntityResource.java | 10 +-- .../resources/MetadataDiscoveryResource.java | 12 +-- .../security/AtlasAuthenticationProvider.java | 29 ++++--- 30 files changed, 326 insertions(+), 254 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java ---------------------------------------------------------------------- diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java index 1ed9619..919bd39 100644 --- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java +++ b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java @@ -28,6 +28,7 @@ import org.apache.atlas.hive.bridge.HiveMetaStoreBridge; import org.apache.atlas.hive.model.HiveDataModelGenerator; import org.apache.atlas.hive.model.HiveDataTypes; import org.apache.atlas.typesystem.Referenceable; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.falcon.entity.CatalogStorage; import org.apache.falcon.entity.FeedHelper; @@ -284,18 +285,26 @@ public class FalconBridge { Feed feed) throws Exception { org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName()); - final CatalogTable table = getTable(feedCluster, feed); - if (table != null) { - CatalogStorage storage = new CatalogStorage(cluster, table); - return createHiveTableInstance(cluster.getName(), storage.getDatabase().toLowerCase(), - storage.getTable().toLowerCase()); - } else { - List<Location> locations = FeedHelper.getLocations(feedCluster, feed); - Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA); - final String pathUri = normalize(dataLocation.getPath()); - LOG.info("Registering DFS Path {} ", pathUri); - return fillHDFSDataSet(pathUri, cluster.getName()); + if(feedCluster != null) { + final CatalogTable table = getTable(feedCluster, feed); + if (table != null) { + CatalogStorage storage = new CatalogStorage(cluster, table); + return createHiveTableInstance(cluster.getName(), storage.getDatabase().toLowerCase(), + storage.getTable().toLowerCase()); + } else { + List<Location> locations = FeedHelper.getLocations(feedCluster, feed); + if (CollectionUtils.isNotEmpty(locations)) { + Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA); + if (dataLocation != null) { + final String pathUri = normalize(dataLocation.getPath()); + LOG.info("Registering DFS Path {} ", pathUri); + return fillHDFSDataSet(pathUri, cluster.getName()); + } + } + } } + + return null; } private static CatalogTable getTable(org.apache.falcon.entity.v0.feed.Cluster cluster, Feed feed) { http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java ---------------------------------------------------------------------- diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java index 0e86333..cf2e865 100755 --- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java +++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java @@ -56,6 +56,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.MalformedURLException; +import java.net.URI; import java.util.ArrayList; import java.util.Comparator; import java.util.Date; @@ -491,37 +492,42 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { table = partition.getTable(); db = dgiBridge.hiveClient.getDatabase(table.getDbName()); break; + + default: + LOG.info("{}: entity-type not handled by Atlas hook. Ignored", entity.getType()); } - db = dgiBridge.hiveClient.getDatabase(db.getName()); - Referenceable dbEntity = dgiBridge.createDBInstance(db); + if (db != null) { + db = dgiBridge.hiveClient.getDatabase(db.getName()); + Referenceable dbEntity = dgiBridge.createDBInstance(db); - entities.add(dbEntity); - result.put(Type.DATABASE, dbEntity); + entities.add(dbEntity); + result.put(Type.DATABASE, dbEntity); - Referenceable tableEntity = null; + Referenceable tableEntity = null; - if (table != null) { - if (existTable != null) { - table = existTable; - } else { - table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName()); + if (table != null) { + if (existTable != null) { + table = existTable; + } else { + table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName()); + } + //If its an external table, even though the temp table skip flag is on, + // we create the table since we need the HDFS path to temp table lineage. + if (skipTempTables && + table.isTemporary() && + !TableType.EXTERNAL_TABLE.equals(table.getTableType())) { + LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name()); + + } else { + tableEntity = dgiBridge.createTableInstance(dbEntity, table); + entities.add(tableEntity); + result.put(Type.TABLE, tableEntity); + } } - //If its an external table, even though the temp table skip flag is on, - // we create the table since we need the HDFS path to temp table lineage. - if (skipTempTables && - table.isTemporary() && - !TableType.EXTERNAL_TABLE.equals(table.getTableType())) { - LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name()); - } else { - tableEntity = dgiBridge.createTableInstance(dbEntity, table); - entities.add(tableEntity); - result.put(Type.TABLE, tableEntity); - } + event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities)); } - - event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities)); return result; } @@ -620,13 +626,16 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { entities.addAll(result.values()); } } else if (entity.getType() == Type.DFS_DIR) { - final String pathUri = lower(new Path(entity.getLocation()).toString()); - LOG.debug("Registering DFS Path {} ", pathUri); - if (!dataSetsProcessed.contains(pathUri)) { - Referenceable hdfsPath = dgiBridge.fillHDFSDataSet(pathUri); - dataSets.put(entity, hdfsPath); - dataSetsProcessed.add(pathUri); - entities.add(hdfsPath); + URI location = entity.getLocation(); + if(location != null) { + final String pathUri = lower(new Path(location).toString()); + LOG.debug("Registering DFS Path {} ", pathUri); + if (!dataSetsProcessed.contains(pathUri)) { + Referenceable hdfsPath = dgiBridge.fillHDFSDataSet(pathUri); + dataSets.put(entity, hdfsPath); + dataSetsProcessed.add(pathUri); + entities.add(hdfsPath); + } } } } @@ -666,13 +675,17 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { private void handleExternalTables(final HiveMetaStoreBridge dgiBridge, final HiveEventContext event, final LinkedHashMap<Type, Referenceable> tables) throws HiveException, MalformedURLException { List<Referenceable> entities = new ArrayList<>(); final WriteEntity hiveEntity = (WriteEntity) getEntityByType(event.getOutputs(), Type.TABLE); - Table hiveTable = hiveEntity.getTable(); + + Table hiveTable = hiveEntity == null ? null : hiveEntity.getTable(); + //Refresh to get the correct location - hiveTable = dgiBridge.hiveClient.getTable(hiveTable.getDbName(), hiveTable.getTableName()); + if(hiveTable != null) { + hiveTable = dgiBridge.hiveClient.getTable(hiveTable.getDbName(), hiveTable.getTableName()); + } - final String location = lower(hiveTable.getDataLocation().toString()); if (hiveTable != null && TableType.EXTERNAL_TABLE.equals(hiveTable.getTableType())) { LOG.info("Registering external table process {} ", event.getQueryStr()); + final String location = lower(hiveTable.getDataLocation().toString()); final ReadEntity dfsEntity = new ReadEntity(); dfsEntity.setTyp(Type.DFS_DIR); dfsEntity.setName(location); @@ -702,6 +715,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext { entities.add(processReferenceable); event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities)); } + } private boolean isCreateOp(HiveEventContext hiveEvent) { http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/authorization/src/main/java/org/apache/atlas/authorize/simple/PolicyParser.java ---------------------------------------------------------------------- diff --git a/authorization/src/main/java/org/apache/atlas/authorize/simple/PolicyParser.java b/authorization/src/main/java/org/apache/atlas/authorize/simple/PolicyParser.java index 25692e1..fe81598 100644 --- a/authorization/src/main/java/org/apache/atlas/authorize/simple/PolicyParser.java +++ b/authorization/src/main/java/org/apache/atlas/authorize/simple/PolicyParser.java @@ -161,12 +161,7 @@ public class PolicyParser { if (def.getUsers() != null) { usersMap = def.getUsers(); } - List<AtlasActionTypes> userAutorities = usersMap.get(userAndRole[USERNAME]); - if (userAutorities == null) { - - userAutorities = new ArrayList<AtlasActionTypes>(); - } - userAutorities = getListOfAutorities(userAndRole[USER_AUTHORITIES]); + List<AtlasActionTypes> userAutorities = getListOfAutorities(userAndRole[USER_AUTHORITIES]); usersMap.put(userAndRole[USERNAME], userAutorities); def.setUsers(usersMap); } @@ -195,11 +190,7 @@ public class PolicyParser { if (def.getGroups() != null) { groupsMap = def.getGroups(); } - List<AtlasActionTypes> groupAutorities = groupsMap.get(groupAndRole[GROUPNAME]); - if (groupAutorities == null) { - groupAutorities = new ArrayList<AtlasActionTypes>(); - } - groupAutorities = getListOfAutorities(groupAndRole[GROUP_AUTHORITIES]); + List<AtlasActionTypes> groupAutorities = getListOfAutorities(groupAndRole[GROUP_AUTHORITIES]); groupsMap.put(groupAndRole[GROUPNAME], groupAutorities); def.setGroups(groupsMap); } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/authorization/src/main/java/org/apache/atlas/authorize/simple/SimpleAtlasAuthorizer.java ---------------------------------------------------------------------- diff --git a/authorization/src/main/java/org/apache/atlas/authorize/simple/SimpleAtlasAuthorizer.java b/authorization/src/main/java/org/apache/atlas/authorize/simple/SimpleAtlasAuthorizer.java index 23fc924..30e2067 100644 --- a/authorization/src/main/java/org/apache/atlas/authorize/simple/SimpleAtlasAuthorizer.java +++ b/authorization/src/main/java/org/apache/atlas/authorize/simple/SimpleAtlasAuthorizer.java @@ -32,6 +32,7 @@ import org.apache.atlas.authorize.AtlasAuthorizationException; import org.apache.atlas.authorize.AtlasAuthorizer; import org.apache.atlas.authorize.AtlasResourceTypes; import org.apache.atlas.utils.PropertiesUtil; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.configuration.Configuration; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOCase; @@ -224,10 +225,12 @@ public final class SimpleAtlasAuthorizer implements AtlasAuthorizer { LOG.debug("==> SimpleAtlasAuthorizer checkAccessForGroups"); } - for (String group : groups) { - isAccessAllowed = checkAccess(group, resourceType, resource, map); - if (isAccessAllowed) { - break; + if(CollectionUtils.isNotEmpty(groups)) { + for (String group : groups) { + isAccessAllowed = checkAccess(group, resourceType, resource, map); + if (isAccessAllowed) { + break; + } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/client/src/main/java/org/apache/atlas/AtlasClient.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/atlas/AtlasClient.java b/client/src/main/java/org/apache/atlas/AtlasClient.java index d7543f2..32e1bb0 100755 --- a/client/src/main/java/org/apache/atlas/AtlasClient.java +++ b/client/src/main/java/org/apache/atlas/AtlasClient.java @@ -144,8 +144,15 @@ public class AtlasClient { // New constuctor for Basic auth public AtlasClient(String[] baseUrl, String[] basicAuthUserNamepassword) { - this.basicAuthUser = basicAuthUserNamepassword[0]; - this.basicAuthPassword = basicAuthUserNamepassword[1]; + if (basicAuthUserNamepassword != null) { + if (basicAuthUserNamepassword.length > 0) { + this.basicAuthUser = basicAuthUserNamepassword[0]; + } + if (basicAuthUserNamepassword.length > 1) { + this.basicAuthPassword = basicAuthUserNamepassword[1]; + } + } + initializeState(baseUrl, null, null); } @@ -1119,7 +1126,8 @@ public class AtlasClient { private JSONObject callAPIWithResource(API api, WebResource resource, Object requestObject) throws AtlasServiceException { ClientResponse clientResponse = null; - for (int i = 0; i < getNumberOfRetries(); i++) { + int i = 0; + do { clientResponse = resource.accept(JSON_MEDIA_TYPE).type(JSON_MEDIA_TYPE) .method(api.getMethod(), ClientResponse.class, requestObject); @@ -1137,7 +1145,9 @@ public class AtlasClient { LOG.error("Got a service unavailable when calling: {}, will retry..", resource); sleepBetweenRetries(); } - } + + i++; + } while (i < getNumberOfRetries()); throw new AtlasServiceException(api, clientResponse); } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/client/src/main/java/org/apache/atlas/AtlasServiceException.java ---------------------------------------------------------------------- diff --git a/client/src/main/java/org/apache/atlas/AtlasServiceException.java b/client/src/main/java/org/apache/atlas/AtlasServiceException.java index 2117a6b..367d52d 100755 --- a/client/src/main/java/org/apache/atlas/AtlasServiceException.java +++ b/client/src/main/java/org/apache/atlas/AtlasServiceException.java @@ -37,8 +37,8 @@ public class AtlasServiceException extends Exception { } private AtlasServiceException(AtlasClient.API api, ClientResponse.Status status, String response) { - super("Metadata service API " + api + " failed with status " + status.getStatusCode() + "(" + - status.getReasonPhrase() + ") Response Body (" + response + ")"); + super("Metadata service API " + api + " failed with status " + (status != null ? status.getStatusCode() : -1) + + " (" + status + ") Response Body (" + response + ")"); this.status = status; } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/common/src/main/java/org/apache/atlas/utils/ParamChecker.java ---------------------------------------------------------------------- diff --git a/common/src/main/java/org/apache/atlas/utils/ParamChecker.java b/common/src/main/java/org/apache/atlas/utils/ParamChecker.java index 2b06f22..c8d7464 100644 --- a/common/src/main/java/org/apache/atlas/utils/ParamChecker.java +++ b/common/src/main/java/org/apache/atlas/utils/ParamChecker.java @@ -139,20 +139,6 @@ public final class ParamChecker { } /** - * Check that a list is not null and that none of its elements is null. If null or if the list has emtpy elements - * throws an IllegalArgumentException. - * @param list the list of strings. - * @param name parameter name for the exception message. - */ - public static Collection<String> notEmptyElements(Collection<String> list, String name) { - notEmpty(list, name); - for (String ele : list) { - notEmpty(ele, String.format("list %s element %s", name, ele)); - } - return list; - } - - /** * Checks that the given value is <= max value. * @param value * @param maxValue http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/notification/src/main/java/org/apache/atlas/hook/FailedMessagesLogger.java ---------------------------------------------------------------------- diff --git a/notification/src/main/java/org/apache/atlas/hook/FailedMessagesLogger.java b/notification/src/main/java/org/apache/atlas/hook/FailedMessagesLogger.java index 0b552d3..0b3723a 100644 --- a/notification/src/main/java/org/apache/atlas/hook/FailedMessagesLogger.java +++ b/notification/src/main/java/org/apache/atlas/hook/FailedMessagesLogger.java @@ -77,13 +77,15 @@ public class FailedMessagesLogger { org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger(); Enumeration allAppenders = rootLogger.getAllAppenders(); - while (allAppenders.hasMoreElements()) { - Appender appender = (Appender) allAppenders.nextElement(); - if (appender instanceof FileAppender) { - FileAppender fileAppender = (FileAppender) appender; - String rootLoggerFile = fileAppender.getFile(); - rootLoggerDirectory = new File(rootLoggerFile).getParent(); - break; + if (allAppenders != null) { + while (allAppenders.hasMoreElements()) { + Appender appender = (Appender) allAppenders.nextElement(); + if (appender instanceof FileAppender) { + FileAppender fileAppender = (FileAppender) appender; + String rootLoggerFile = fileAppender.getFile(); + rootLoggerDirectory = new File(rootLoggerFile).getParent(); + break; + } } } return rootLoggerDirectory; http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/release-log.txt ---------------------------------------------------------------------- diff --git a/release-log.txt b/release-log.txt index 7a854cd..1b5d899 100644 --- a/release-log.txt +++ b/release-log.txt @@ -6,6 +6,7 @@ INCOMPATIBLE CHANGES: ALL CHANGES: +ATLAS-1033 fix for issues flagged by Coverity scan (mneethiraj via sumasai) ATLAS-1036 Compilation error on java 1.8 - GraphBackedDiscoveryService (shwethags via sumasai) ATLAS-1034 Incorrect Falcon hook impl class name in Falcon hook shim (mneethiraj via shwethags) ATLAS-347 Atlas search APIs should allow pagination of results (shwethags) http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java index 1ded435..8aa389c 100644 --- a/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java @@ -101,7 +101,7 @@ public class DataSetLineageService implements LineageService { @GraphTransaction public String getOutputsGraph(String datasetName) throws AtlasException { LOG.info("Fetching lineage outputs graph for datasetName={}", datasetName); - ParamChecker.notEmpty(datasetName, "dataset name"); + datasetName = ParamChecker.notEmpty(datasetName, "dataset name"); ReferenceableInstance datasetInstance = validateDatasetNameExists(datasetName); return getOutputsGraphForId(datasetInstance.getId()._getId()); } @@ -116,7 +116,7 @@ public class DataSetLineageService implements LineageService { @GraphTransaction public String getInputsGraph(String tableName) throws AtlasException { LOG.info("Fetching lineage inputs graph for tableName={}", tableName); - ParamChecker.notEmpty(tableName, "table name"); + tableName = ParamChecker.notEmpty(tableName, "table name"); ReferenceableInstance datasetInstance = validateDatasetNameExists(tableName); return getInputsGraphForId(datasetInstance.getId()._getId()); } @@ -125,7 +125,7 @@ public class DataSetLineageService implements LineageService { @GraphTransaction public String getInputsGraphForEntity(String guid) throws AtlasException { LOG.info("Fetching lineage inputs graph for entity={}", guid); - ParamChecker.notEmpty(guid, "Entity id"); + guid = ParamChecker.notEmpty(guid, "Entity id"); validateDatasetExists(guid); return getInputsGraphForId(guid); } @@ -143,7 +143,7 @@ public class DataSetLineageService implements LineageService { @GraphTransaction public String getOutputsGraphForEntity(String guid) throws AtlasException { LOG.info("Fetching lineage outputs graph for entity guid={}", guid); - ParamChecker.notEmpty(guid, "Entity id"); + guid = ParamChecker.notEmpty(guid, "Entity id"); validateDatasetExists(guid); return getOutputsGraphForId(guid); } @@ -165,7 +165,7 @@ public class DataSetLineageService implements LineageService { @Override @GraphTransaction public String getSchema(String datasetName) throws AtlasException { - ParamChecker.notEmpty(datasetName, "table name"); + datasetName = ParamChecker.notEmpty(datasetName, "table name"); LOG.info("Fetching schema for tableName={}", datasetName); ReferenceableInstance datasetInstance = validateDatasetNameExists(datasetName); @@ -182,7 +182,7 @@ public class DataSetLineageService implements LineageService { @Override @GraphTransaction public String getSchemaForEntity(String guid) throws AtlasException { - ParamChecker.notEmpty(guid, "Entity id"); + guid = ParamChecker.notEmpty(guid, "Entity id"); LOG.info("Fetching schema for entity guid={}", guid); String typeName = validateDatasetExists(guid); return getSchemaForId(typeName, guid); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java b/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java index 82a49dc..df18f9e 100755 --- a/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java +++ b/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java @@ -167,6 +167,11 @@ public class GraphBackedDiscoveryService implements DiscoveryService { LOG.info("Executing gremlin query={}", gremlinQuery); ScriptEngineManager manager = new ScriptEngineManager(); ScriptEngine engine = manager.getEngineByName("gremlin-groovy"); + + if(engine == null) { + throw new DiscoveryException("gremlin-groovy: engine not found"); + } + Bindings bindings = engine.createBindings(); bindings.put("g", titanGraph); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java b/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java index e74e57f..8d31c1b 100644 --- a/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java @@ -334,28 +334,29 @@ public abstract class DeleteHandler { String keyPropertyName = GraphHelper.getQualifiedNameForMapKey(propertyName, key); String mapEdgeId = GraphHelper.getProperty(outVertex, keyPropertyName); Edge mapEdge = graphHelper.getEdgeByEdgeId(outVertex, keyPropertyName, mapEdgeId); - Vertex mapVertex = mapEdge.getVertex(Direction.IN); - if (mapVertex.getId().toString().equals(inVertex.getId().toString())) { - //TODO keys.size includes deleted items as well. should exclude - if (attributeInfo.multiplicity.nullAllowed() || keys.size() > attributeInfo.multiplicity.lower) { - edge = mapEdge; - } - else { - // Deleting this entry would violate the attribute's lower bound. - throw new NullRequiredAttributeException( - "Cannot remove map entry " + keyPropertyName + " from required attribute " + - GraphHelper.getQualifiedFieldName(type, attributeName) + " on " + string(outVertex) + " " + string(mapEdge)); - } - - if (shouldUpdateReverseAttribute) { - //remove this key - LOG.debug("Removing edge {}, key {} from the map attribute {}", string(mapEdge), key, - attributeName); - keys.remove(key); - GraphHelper.setProperty(outVertex, propertyName, keys); - GraphHelper.setProperty(outVertex, keyPropertyName, null); + if(mapEdge != null) { + Vertex mapVertex = mapEdge.getVertex(Direction.IN); + if (mapVertex.getId().toString().equals(inVertex.getId().toString())) { + //TODO keys.size includes deleted items as well. should exclude + if (attributeInfo.multiplicity.nullAllowed() || keys.size() > attributeInfo.multiplicity.lower) { + edge = mapEdge; + } else { + // Deleting this entry would violate the attribute's lower bound. + throw new NullRequiredAttributeException( + "Cannot remove map entry " + keyPropertyName + " from required attribute " + + GraphHelper.getQualifiedFieldName(type, attributeName) + " on " + string(outVertex) + " " + string(mapEdge)); + } + + if (shouldUpdateReverseAttribute) { + //remove this key + LOG.debug("Removing edge {}, key {} from the map attribute {}", string(mapEdge), key, + attributeName); + keys.remove(key); + GraphHelper.setProperty(outVertex, propertyName, keys); + GraphHelper.setProperty(outVertex, keyPropertyName, null); + } + break; } - break; } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java index 73445cb..e301a00 100755 --- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java @@ -268,11 +268,13 @@ public class GraphBackedMetadataRepository implements MetadataRepository { final String entityTypeName = GraphHelper.getTypeName(instanceVertex); String relationshipLabel = GraphHelper.getTraitLabel(entityTypeName, traitNameToBeDeleted); Edge edge = GraphHelper.getEdgeForLabel(instanceVertex, relationshipLabel); - deleteHandler.deleteEdgeReference(edge, DataTypes.TypeCategory.TRAIT, false, true); + if(edge != null) { + deleteHandler.deleteEdgeReference(edge, DataTypes.TypeCategory.TRAIT, false, true); - // update the traits in entity once trait removal is successful - traitNames.remove(traitNameToBeDeleted); - updateTraits(instanceVertex, traitNames); + // update the traits in entity once trait removal is successful + traitNames.remove(traitNameToBeDeleted); + updateTraits(instanceVertex, traitNames); + } } catch (Exception e) { throw new RepositoryException(e); } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java index b3711b5..81fb76e 100755 --- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java @@ -429,20 +429,28 @@ public final class GraphHelper { } public static String string(Vertex vertex) { - if (LOG.isDebugEnabled()) { - return String.format("vertex[id=%s type=%s guid=%s]", vertex.getId().toString(), getTypeName(vertex), - getIdFromVertex(vertex)); + if(vertex == null) { + return "vertex[null]"; } else { - return String.format("vertex[id=%s]", vertex.getId().toString()); + if (LOG.isDebugEnabled()) { + return String.format("vertex[id=%s type=%s guid=%s]", vertex.getId().toString(), getTypeName(vertex), + getIdFromVertex(vertex)); + } else { + return String.format("vertex[id=%s]", vertex.getId().toString()); + } } } public static String string(Edge edge) { - if (LOG.isDebugEnabled()) { - return String.format("edge[id=%s label=%s from %s -> to %s]", edge.getId().toString(), edge.getLabel(), - string(edge.getVertex(Direction.OUT)), string(edge.getVertex(Direction.IN))); + if(edge == null) { + return "edge[null]"; } else { - return String.format("edge[id=%s]", edge.getId().toString()); + if (LOG.isDebugEnabled()) { + return String.format("edge[id=%s label=%s from %s -> to %s]", edge.getId().toString(), edge.getLabel(), + string(edge.getVertex(Direction.OUT)), string(edge.getVertex(Direction.IN))); + } else { + return String.format("edge[id=%s]", edge.getId().toString()); + } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java b/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java index f4f9ec1..0512489 100644 --- a/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java +++ b/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java @@ -370,10 +370,12 @@ public final class TypedInstanceToGraphMapper { if (!cloneElements.isEmpty()) { for (String edgeIdForDelete : cloneElements) { Edge edge = graphHelper.getEdgeByEdgeId(instanceVertex, edgeLabel, edgeIdForDelete); - boolean deleted = deleteHandler.deleteEdgeReference(edge, entryType.getTypeCategory(), - attributeInfo.isComposite, true); - if (!deleted) { - additionalElements.add(edgeIdForDelete); + if(edge != null) { + boolean deleted = deleteHandler.deleteEdgeReference(edge, entryType.getTypeCategory(), + attributeInfo.isComposite, true); + if (!deleted) { + additionalElements.add(edgeIdForDelete); + } } } } @@ -454,11 +456,13 @@ public final class TypedInstanceToGraphMapper { if (!newMap.values().contains(currentEdge)) { String edgeLabel = GraphHelper.getQualifiedNameForMapKey(propertyName, currentKey); Edge edge = graphHelper.getEdgeByEdgeId(instanceVertex, edgeLabel, currentMap.get(currentKey)); - boolean deleted = - deleteHandler.deleteEdgeReference(edge, elementType.getTypeCategory(), attributeInfo.isComposite, true); - if (!deleted) { - additionalMap.put(currentKey, currentEdge); - shouldDeleteKey = false; + if(edge != null) { + boolean deleted = + deleteHandler.deleteEdgeReference(edge, elementType.getTypeCategory(), attributeInfo.isComposite, true); + if (!deleted) { + additionalMap.put(currentKey, currentEdge); + shouldDeleteKey = false; + } } } } @@ -702,7 +706,9 @@ public final class TypedInstanceToGraphMapper { } else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) { final Date dateVal = typedInstance.getDate(attributeInfo.name); //Convert Property value to Long while persisting - propertyValue = dateVal.getTime(); + if(dateVal != null) { + propertyValue = dateVal.getTime(); + } } else if (attributeInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.ENUM) { if (attrValue != null) { propertyValue = ((EnumValue)attrValue).value; http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java b/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java index 58ef675..3a5829b 100755 --- a/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java +++ b/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java @@ -168,16 +168,20 @@ public class GraphBackedTypeStore implements ITypeStore { switch (attrDataType.getTypeCategory()) { case ARRAY: String attrType = TypeUtils.parseAsArrayType(attrDataType.getName()); - IDataType elementType = typeSystem.getDataType(IDataType.class, attrType); - attrDataTypes.add(elementType); + if(attrType != null) { + IDataType elementType = typeSystem.getDataType(IDataType.class, attrType); + attrDataTypes.add(elementType); + } break; case MAP: String[] attrTypes = TypeUtils.parseAsMapType(attrDataType.getName()); - IDataType keyType = typeSystem.getDataType(IDataType.class, attrTypes[0]); - IDataType valueType = typeSystem.getDataType(IDataType.class, attrTypes[1]); - attrDataTypes.add(keyType); - attrDataTypes.add(valueType); + if(attrTypes != null && attrTypes.length > 1) { + IDataType keyType = typeSystem.getDataType(IDataType.class, attrTypes[0]); + IDataType valueType = typeSystem.getDataType(IDataType.class, attrTypes[1]); + attrDataTypes.add(keyType); + attrDataTypes.add(valueType); + } break; case ENUM: http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java ---------------------------------------------------------------------- diff --git a/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java b/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java index 578def0..484a877 100755 --- a/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java +++ b/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java @@ -239,7 +239,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang } private JSONObject createOrUpdateTypes(String typeDefinition, boolean isUpdate) throws AtlasException { - ParamChecker.notEmpty(typeDefinition, "type definition"); + typeDefinition = ParamChecker.notEmpty(typeDefinition, "type definition"); TypesDef typesDef = validateTypeDefinition(typeDefinition); try { @@ -327,7 +327,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang */ @Override public List<String> createEntities(String entityInstanceDefinition) throws AtlasException { - ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition"); + entityInstanceDefinition = ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition"); ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition); @@ -362,8 +362,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang @Override public ITypedReferenceableInstance getTypedReferenceableInstance(Referenceable entityInstance) throws AtlasException { - final String entityTypeName = entityInstance.getTypeName(); - ParamChecker.notEmpty(entityTypeName, "Entity type cannot be null"); + final String entityTypeName = ParamChecker.notEmpty(entityInstance.getTypeName(), "Entity type cannot be null"); ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName); @@ -385,7 +384,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang */ @Override public String getEntityDefinition(String guid) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); + guid = ParamChecker.notEmpty(guid, "entity id"); final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid); return InstanceSerialization.toJson(instance, true); @@ -440,8 +439,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang */ @Override public AtlasClient.EntityResult updateEntities(String entityInstanceDefinition) throws AtlasException { - - ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition"); + entityInstanceDefinition = ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition"); ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition); AtlasClient.EntityResult entityResult = repository.updateEntities(typedInstances); @@ -457,11 +455,11 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang } @Override - public AtlasClient.EntityResult updateEntityAttributeByGuid(final String guid, String attributeName, + public AtlasClient.EntityResult updateEntityAttributeByGuid(String guid, String attributeName, String value) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); - ParamChecker.notEmpty(attributeName, "attribute name"); - ParamChecker.notEmpty(value, "attribute value"); + guid = ParamChecker.notEmpty(guid, "entity id"); + attributeName = ParamChecker.notEmpty(attributeName, "attribute name"); + value = ParamChecker.notEmpty(value, "attribute value"); ITypedReferenceableInstance existInstance = validateEntityExists(guid); ClassType type = typeSystem.getDataType(ClassType.class, existInstance.getTypeName()); @@ -502,10 +500,10 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang } @Override - public AtlasClient.EntityResult updateEntityPartialByGuid(final String guid, Referenceable newEntity) + public AtlasClient.EntityResult updateEntityPartialByGuid(String guid, Referenceable newEntity) throws AtlasException { - ParamChecker.notEmpty(guid, "guid cannot be null"); - ParamChecker.notNull(newEntity, "updatedEntity cannot be null"); + guid = ParamChecker.notEmpty(guid, "guid cannot be null"); + newEntity = ParamChecker.notNull(newEntity, "updatedEntity cannot be null"); ITypedReferenceableInstance existInstance = validateEntityExists(guid); ITypedReferenceableInstance newInstance = convertToTypedInstance(newEntity, existInstance.getTypeName()); @@ -563,10 +561,10 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang public AtlasClient.EntityResult updateEntityByUniqueAttribute(String typeName, String uniqueAttributeName, String attrValue, Referenceable updatedEntity) throws AtlasException { - ParamChecker.notEmpty(typeName, "typeName"); - ParamChecker.notEmpty(uniqueAttributeName, "uniqueAttributeName"); - ParamChecker.notNull(attrValue, "unique attribute value"); - ParamChecker.notNull(updatedEntity, "updatedEntity"); + typeName = ParamChecker.notEmpty(typeName, "typeName"); + uniqueAttributeName = ParamChecker.notEmpty(uniqueAttributeName, "uniqueAttributeName"); + attrValue = ParamChecker.notNull(attrValue, "unique attribute value"); + updatedEntity = ParamChecker.notNull(updatedEntity, "updatedEntity"); ITypedReferenceableInstance oldInstance = getEntityDefinitionReference(typeName, uniqueAttributeName, attrValue); @@ -579,7 +577,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang } private void validateTypeExists(String entityType) throws AtlasException { - ParamChecker.notEmpty(entityType, "entity type"); + entityType = ParamChecker.notEmpty(entityType, "entity type"); IDataType type = typeSystem.getDataType(IDataType.class, entityType); if (type.getTypeCategory() != DataTypes.TypeCategory.CLASS) { @@ -596,7 +594,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang */ @Override public List<String> getTraitNames(String guid) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); + guid = ParamChecker.notEmpty(guid, "entity id"); return repository.getTraitNames(guid); } @@ -609,8 +607,8 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang */ @Override public void addTrait(String guid, String traitInstanceDefinition) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); - ParamChecker.notEmpty(traitInstanceDefinition, "trait instance definition"); + guid = ParamChecker.notEmpty(guid, "entity id"); + traitInstanceDefinition = ParamChecker.notEmpty(traitInstanceDefinition, "trait instance definition"); ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition); addTrait(guid, traitInstance); @@ -644,8 +642,7 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang @Override public ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException { try { - final String entityTypeName = traitInstance.getTypeName(); - ParamChecker.notEmpty(entityTypeName, "entity type"); + final String entityTypeName = ParamChecker.notEmpty(traitInstance.getTypeName(), "entity type"); TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName); return traitType.convert(traitInstance, Multiplicity.REQUIRED); @@ -657,8 +654,8 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang } @Override - public String getTraitDefinition(final String guid, final String traitName) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); + public String getTraitDefinition(String guid, final String traitName) throws AtlasException { + guid = ParamChecker.notEmpty(guid, "entity id"); final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid); IStruct struct = instance.getTrait(traitName); @@ -674,8 +671,8 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang */ @Override public void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); - ParamChecker.notEmpty(traitNameToBeDeleted, "trait name"); + guid = ParamChecker.notEmpty(guid, "entity id"); + traitNameToBeDeleted = ParamChecker.notEmpty(traitNameToBeDeleted, "trait name"); // ensure trait type is already registered with the TS if (!typeSystem.isRegistered(traitNameToBeDeleted)) { @@ -747,8 +744,8 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang @Override public List<EntityAuditEvent> getAuditEvents(String guid, String startKey, short count) throws AtlasException { - ParamChecker.notEmpty(guid, "entity id"); - ParamChecker.notEmptyIfNotNull(startKey, "start key"); + guid = ParamChecker.notEmpty(guid, "entity id"); + startKey = ParamChecker.notEmptyIfNotNull(startKey, "start key"); ParamChecker.lessThan(count, maxAuditResults, "count"); return auditRepository.listEvents(guid, startKey, count); @@ -766,9 +763,9 @@ public class DefaultMetadataService implements MetadataService, ActiveStateChang @Override public AtlasClient.EntityResult deleteEntityByUniqueAttribute(String typeName, String uniqueAttributeName, String attrValue) throws AtlasException { - ParamChecker.notEmpty(typeName, "delete candidate typeName"); - ParamChecker.notEmpty(uniqueAttributeName, "delete candidate unique attribute name"); - ParamChecker.notEmpty(attrValue, "delete candidate unique attribute value"); + typeName = ParamChecker.notEmpty(typeName, "delete candidate typeName"); + uniqueAttributeName = ParamChecker.notEmpty(uniqueAttributeName, "delete candidate unique attribute name"); + attrValue = ParamChecker.notEmpty(attrValue, "delete candidate unique attribute value"); //Throws EntityNotFoundException if the entity could not be found by its unique attribute ITypedReferenceableInstance instance = getEntityDefinitionReference(typeName, uniqueAttributeName, attrValue); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/titan/src/main/java/com/thinkaurelius/titan/diskstorage/hbase/HBaseKeyColumnValueStore.java ---------------------------------------------------------------------- diff --git a/titan/src/main/java/com/thinkaurelius/titan/diskstorage/hbase/HBaseKeyColumnValueStore.java b/titan/src/main/java/com/thinkaurelius/titan/diskstorage/hbase/HBaseKeyColumnValueStore.java index fc8f2c4..c5f6e0d 100644 --- a/titan/src/main/java/com/thinkaurelius/titan/diskstorage/hbase/HBaseKeyColumnValueStore.java +++ b/titan/src/main/java/com/thinkaurelius/titan/diskstorage/hbase/HBaseKeyColumnValueStore.java @@ -330,18 +330,19 @@ public class HBaseKeyColumnValueStore implements KeyColumnValueStore { ensureOpen(); return new RecordIterator<Entry>() { - private final Iterator<Map.Entry<byte[], NavigableMap<Long, byte[]>>> kv = currentRow.getMap().get(columnFamilyBytes).entrySet().iterator(); + private final NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> currentMap = currentRow.getMap(); + private final Iterator<Map.Entry<byte[], NavigableMap<Long, byte[]>>> kv = currentMap == null ? null : currentMap.get(columnFamilyBytes).entrySet().iterator(); @Override public boolean hasNext() { ensureOpen(); - return kv.hasNext(); + return kv == null ? false : kv.hasNext(); } @Override public Entry next() { ensureOpen(); - return StaticArrayEntry.ofBytes(kv.next(), entryGetter); + return kv == null ? null : StaticArrayEntry.ofBytes(kv.next(), entryGetter); } @Override http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java ---------------------------------------------------------------------- diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java index 7ff7958..04e220d 100755 --- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java +++ b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java @@ -45,9 +45,9 @@ public class Id implements ITypedReferenceableInstance { public EntityState state; public Id(String id, int version, String typeName, String state) { - ParamChecker.notEmpty(id, "id"); - ParamChecker.notEmpty(typeName, "typeName"); - ParamChecker.notEmptyIfNotNull(state, "state"); + id = ParamChecker.notEmpty(id, "id"); + typeName = ParamChecker.notEmpty(typeName, "typeName"); + state = ParamChecker.notEmptyIfNotNull(state, "state"); this.id = id; this.typeName = typeName; this.version = version; http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java ---------------------------------------------------------------------- diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java index dc9cdf2..fad091d 100755 --- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java +++ b/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java @@ -47,12 +47,18 @@ abstract class AbstractDataType<T> implements IDataType<T> { @Override public void output(T val, Appendable buf, String prefix, Set<T> inProcess) throws AtlasException { - if (val instanceof Map) { + final String strValue; + + if (val == null) { + strValue = "<null>"; + } else if (val instanceof Map) { ImmutableSortedMap immutableSortedMap = ImmutableSortedMap.copyOf((Map) val); - TypeUtils.outputVal(val == null ? "<null>" : immutableSortedMap.toString(), buf, prefix); + strValue = immutableSortedMap.toString(); } else { - TypeUtils.outputVal(val == null ? "<null>" : val.toString(), buf, prefix); + strValue = val.toString(); } + + TypeUtils.outputVal(strValue, buf, prefix); } @Override http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java ---------------------------------------------------------------------- diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java index befc705..7224699 100755 --- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java +++ b/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java @@ -517,17 +517,20 @@ public abstract class HierarchicalType<ST extends HierarchicalType, T> extends A @Override public Path next() { Path p = pathQueue.poll(); - ST t = null; - try { - t = (ST) typeSystem.getDataType(superTypeClass, p.typeName); - } catch (AtlasException me) { - throw new RuntimeException(me); - } - if (t.superTypes != null) { - ImmutableSet<String> sTs = t.superTypes; - for (String sT : sTs) { - String nm = sT + "." + p.pathName; - pathQueue.add(pathNameToPathMap.get(nm)); + + if(p != null) { + ST t = null; + try { + t = (ST) typeSystem.getDataType(superTypeClass, p.typeName); + } catch (AtlasException me) { + throw new RuntimeException(me); + } + if (t.superTypes != null) { + ImmutableSet<String> sTs = t.superTypes; + for (String sT : sTs) { + String nm = sT + "." + p.pathName; + pathQueue.add(pathNameToPathMap.get(nm)); + } } } return p; http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java ---------------------------------------------------------------------- diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java index 5022aa6..a8f2eeb 100755 --- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java +++ b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java @@ -172,7 +172,9 @@ public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.Insta public InstanceTuple next() { try { InstanceTuple t = queue.poll(); - processReferenceableInstance(t.instance); + if(t != null) { + processReferenceableInstance(t.instance); + } return t; } catch (AtlasException me) { throw new RuntimeException(me); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java ---------------------------------------------------------------------- diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java index db87cf9..81884e8 100755 --- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java +++ b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java @@ -76,7 +76,9 @@ public class ObjectGraphWalker { public void walk() throws AtlasException { while (!queue.isEmpty()) { IReferenceableInstance r = queue.poll(); - processReferenceableInstance(r); + if(r != null) { + processReferenceableInstance(r); + } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/Atlas.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/Atlas.java b/webapp/src/main/java/org/apache/atlas/Atlas.java index db3e13d..dd43c6d 100755 --- a/webapp/src/main/java/org/apache/atlas/Atlas.java +++ b/webapp/src/main/java/org/apache/atlas/Atlas.java @@ -147,9 +147,12 @@ public final class Atlas { } static int getApplicationPort(CommandLine cmd, String enableTLSFlag, Configuration configuration) { + String optionValue = cmd.hasOption(APP_PORT) ? cmd.getOptionValue(APP_PORT) : null; + final int appPort; - if (cmd.hasOption(APP_PORT)) { - appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT)); + + if (StringUtils.isNotEmpty(optionValue)) { + appPort = Integer.valueOf(optionValue); } else { // default : atlas.enableTLS is true appPort = getPortValue(configuration, enableTLSFlag); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java index 1cdc4c7..2426f63 100755 --- a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java +++ b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java @@ -466,7 +466,7 @@ public class QuickStart { if (results != null) { System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows"); } else { - System.out.println("query [" + dslQuery + "] failed, results:" + results.toString()); + System.out.println("query [" + dslQuery + "] failed, results:" + results); } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java b/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java index 58cd318..aeff989 100755 --- a/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java +++ b/webapp/src/main/java/org/apache/atlas/util/CredentialProviderUtility.java @@ -74,26 +74,28 @@ public class CredentialProviderUtility { // prompt for the provider name CredentialProvider provider = getCredentialProvider(textDevice); - char[] cred; - for (String key : KEYS) { - cred = getPassword(textDevice, key); - // create a credential entry and store it - boolean overwrite = true; - if (provider.getCredentialEntry(key) != null) { - String choice = textDevice.readLine("Entry for %s already exists. Overwrite? (y/n) [y]:", key); - overwrite = StringUtils.isEmpty(choice) || choice.equalsIgnoreCase("y"); - if (overwrite) { - provider.deleteCredentialEntry(key); - provider.flush(); + if(provider != null) { + char[] cred; + for (String key : KEYS) { + cred = getPassword(textDevice, key); + // create a credential entry and store it + boolean overwrite = true; + if (provider.getCredentialEntry(key) != null) { + String choice = textDevice.readLine("Entry for %s already exists. Overwrite? (y/n) [y]:", key); + overwrite = StringUtils.isEmpty(choice) || choice.equalsIgnoreCase("y"); + if (overwrite) { + provider.deleteCredentialEntry(key); + provider.flush(); + provider.createCredentialEntry(key, cred); + provider.flush(); + textDevice.printf("Entry for %s was overwritten with the new value.\n", key); + } else { + textDevice.printf("Entry for %s was not overwritten.\n", key); + } + } else { provider.createCredentialEntry(key, cred); provider.flush(); - textDevice.printf("Entry for %s was overwritten with the new value.\n", key); - } else { - textDevice.printf("Entry for %s was not overwritten.\n", key); } - } else { - provider.createCredentialEntry(key, cred); - provider.flush(); } } } @@ -141,16 +143,21 @@ public class CredentialProviderUtility { */ private static CredentialProvider getCredentialProvider(TextDevice textDevice) throws IOException { String providerPath = textDevice.readLine("Please enter the full path to the credential provider:"); - File file = new File(providerPath); - if (file.exists()) { - textDevice - .printf("%s already exists. You will need to specify whether existing entries should be " - + "overwritten " - + "(default is 'yes')\n", providerPath); + + if (providerPath != null) { + File file = new File(providerPath); + if (file.exists()) { + textDevice + .printf("%s already exists. You will need to specify whether existing entries should be " + + "overwritten " + + "(default is 'yes')\n", providerPath); + } + String providerURI = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + providerPath; + Configuration conf = new Configuration(false); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerURI); + return CredentialProviderFactory.getProviders(conf).get(0); } - String providerURI = JavaKeyStoreProvider.SCHEME_NAME + "://file/" + providerPath; - Configuration conf = new Configuration(false); - conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerURI); - return CredentialProviderFactory.getProviders(conf).get(0); + + return null; } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/web/dao/UserDao.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/dao/UserDao.java b/webapp/src/main/java/org/apache/atlas/web/dao/UserDao.java index cd820c6..6b83c21 100644 --- a/webapp/src/main/java/org/apache/atlas/web/dao/UserDao.java +++ b/webapp/src/main/java/org/apache/atlas/web/dao/UserDao.java @@ -19,6 +19,7 @@ package org.apache.atlas.web.dao; import com.google.common.annotations.VisibleForTesting; import java.io.FileInputStream; +import java.io.InputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Properties; @@ -54,6 +55,8 @@ public class UserDao { void loadFileLoginsDetails() { String PROPERTY_FILE_PATH = null; + InputStream inStr = null; + try { Configuration configuration = ApplicationProperties.get(); @@ -61,7 +64,8 @@ public class UserDao { .getString("atlas.authentication.method.file.filename"); if (PROPERTY_FILE_PATH != null && !"".equals(PROPERTY_FILE_PATH)) { userLogins = new Properties(); - userLogins.load(new FileInputStream(PROPERTY_FILE_PATH)); + inStr = new FileInputStream(PROPERTY_FILE_PATH); + userLogins.load(inStr); }else { LOG.error("Error while reading user.properties file, filepath=" + PROPERTY_FILE_PATH); @@ -70,6 +74,14 @@ public class UserDao { } catch (IOException | AtlasException e) { LOG.error("Error while reading user.properties file, filepath=" + PROPERTY_FILE_PATH, e); + } finally { + if(inStr != null) { + try { + inStr.close(); + } catch(Exception excp) { + // ignore + } + } } } http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java index 0d25af7..82016d0 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java @@ -339,7 +339,7 @@ public class EntityResource { private Response updateEntityPartialByGuid(String guid, HttpServletRequest request) { String entityJson = null; try { - ParamChecker.notEmpty(guid, "Guid property cannot be null"); + guid = ParamChecker.notEmpty(guid, "Guid property cannot be null"); entityJson = Servlets.getRequestPayload(request); LOG.info("partially updating entity for guid {} : {} ", guid, entityJson); @@ -468,7 +468,7 @@ public class EntityResource { } LOG.debug("Fetching entity definition for guid={} ", guid); - ParamChecker.notEmpty(guid, "guid cannot be null"); + guid = ParamChecker.notEmpty(guid, "guid cannot be null"); final String entityDefinition = metadataService.getEntityDefinition(guid); JSONObject response = new JSONObject(); @@ -564,9 +564,9 @@ public class EntityResource { public Response getEntityDefinitionByAttribute(String entityType, String attribute, String value) { try { LOG.debug("Fetching entity definition for type={}, qualified name={}", entityType, value); - ParamChecker.notEmpty(entityType, "Entity type cannot be null"); - ParamChecker.notEmpty(attribute, "attribute name cannot be null"); - ParamChecker.notEmpty(value, "attribute value cannot be null"); + entityType = ParamChecker.notEmpty(entityType, "Entity type cannot be null"); + attribute = ParamChecker.notEmpty(attribute, "attribute name cannot be null"); + value = ParamChecker.notEmpty(value, "attribute value cannot be null"); final String entityDefinition = metadataService.getEntityDefinition(entityType, attribute, value); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java index 1cfdbe8..36a6808 100755 --- a/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java +++ b/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java @@ -91,7 +91,7 @@ public class MetadataDiscoveryResource { @DefaultValue(LIMIT_OFFSET_DEFAULT) @QueryParam("offset") int offset) { AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { - perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.search(" + query + ")"); + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.search(" + query + ", " + limit + ", " + offset + ")"); } Response response = searchUsingQueryDSL(query, limit, offset); if (response.getStatus() != Response.Status.OK.getStatusCode()) { @@ -123,10 +123,10 @@ public class MetadataDiscoveryResource { AtlasPerfTracer perf = null; try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { - perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.searchUsingQueryDSL(" + dslQuery + ")"); + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.searchUsingQueryDSL(" + dslQuery + ", " + limit + ", " + offset + ")"); } - ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null"); + dslQuery = ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null"); QueryParams queryParams = validateQueryParams(limit, offset); final String jsonResultStr = discoveryService.searchByDSL(dslQuery, queryParams); @@ -184,7 +184,7 @@ public class MetadataDiscoveryResource { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.searchUsingGremlinQuery(" + gremlinQuery + ")"); } - ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty"); + gremlinQuery = ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty"); final List<Map<String, String>> results = discoveryService.searchByGremlin(gremlinQuery); JSONObject response = new JSONObject(); @@ -230,10 +230,10 @@ public class MetadataDiscoveryResource { AtlasPerfTracer perf = null; try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { - perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.searchUsingFullText(" + query + ")"); + perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "MetadataDiscoveryResource.searchUsingFullText(" + query + ", " + limit + ", " + offset + ")"); } - ParamChecker.notEmpty(query, "query cannot be null or empty"); + query = ParamChecker.notEmpty(query, "query cannot be null or empty"); QueryParams queryParams = validateQueryParams(limit, offset); final String jsonResultStr = discoveryService.searchByFullText(query, queryParams); JSONArray rowsJsonArr = new JSONArray(jsonResultStr); http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e15629c2/webapp/src/main/java/org/apache/atlas/web/security/AtlasAuthenticationProvider.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/web/security/AtlasAuthenticationProvider.java b/webapp/src/main/java/org/apache/atlas/web/security/AtlasAuthenticationProvider.java index b0fe6aa..23d3d70 100644 --- a/webapp/src/main/java/org/apache/atlas/web/security/AtlasAuthenticationProvider.java +++ b/webapp/src/main/java/org/apache/atlas/web/security/AtlasAuthenticationProvider.java @@ -69,35 +69,32 @@ public class AtlasAuthenticationProvider extends if (ldapType.equalsIgnoreCase("LDAP")) { try { - authentication = ldapAuthenticationProvider - .authenticate(authentication); + authentication = ldapAuthenticationProvider.authenticate(authentication); } catch (Exception ex) { LOG.error("Error while LDAP authentication", ex); } } else if (ldapType.equalsIgnoreCase("AD")) { try { - authentication = adAuthenticationProvider - .authenticate(authentication); + authentication = adAuthenticationProvider.authenticate(authentication); } catch (Exception ex) { LOG.error("Error while AD authentication", ex); } } - if (authentication != null && authentication.isAuthenticated()) { - return authentication; - } else { - // If the LDAP/AD authentication fails try the local filebased login method - if (fileAuthenticationMethodEnabled) { - authentication = fileAuthenticationProvider - .authenticate(authentication); - } - if (authentication != null && authentication.isAuthenticated()) { + if (authentication != null) { + if (authentication.isAuthenticated()) { return authentication; - } else { - LOG.error("Authentication failed."); - throw new AtlasAuthenticationException("Authentication failed."); + } else if (fileAuthenticationMethodEnabled) { // If the LDAP/AD authentication fails try the local filebased login method + authentication = fileAuthenticationProvider.authenticate(authentication); + + if (authentication != null && authentication.isAuthenticated()) { + return authentication; + } } } + + LOG.error("Authentication failed."); + throw new AtlasAuthenticationException("Authentication failed."); } }
