http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/AsyncConnectorFacade.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/AsyncConnectorFacade.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/AsyncConnectorFacade.java new file mode 100644 index 0000000..416c8c0 --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/AsyncConnectorFacade.java @@ -0,0 +1,204 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.Future; +import org.identityconnectors.common.security.GuardedString; +import org.identityconnectors.framework.api.ConnectorFacade; +import org.identityconnectors.framework.common.objects.Attribute; +import org.identityconnectors.framework.common.objects.AttributeInfo; +import org.identityconnectors.framework.common.objects.AttributeUtil; +import org.identityconnectors.framework.common.objects.ConnectorObject; +import org.identityconnectors.framework.common.objects.ObjectClass; +import org.identityconnectors.framework.common.objects.ObjectClassInfo; +import org.identityconnectors.framework.common.objects.OperationOptions; +import org.identityconnectors.framework.common.objects.Schema; +import org.identityconnectors.framework.common.objects.SyncToken; +import org.identityconnectors.framework.common.objects.Uid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.scheduling.annotation.Async; +import org.springframework.scheduling.annotation.AsyncResult; +import org.springframework.stereotype.Component; + +/** + * Intercept calls to ConnectorFacade's methods and check if the corresponding connector instance has been configured to + * allow every single operation: if not, simply do nothing. + */ +@Component +public class AsyncConnectorFacade { + + /** + * Logger. + */ + private static final Logger LOG = LoggerFactory.getLogger(AsyncConnectorFacade.class); + + @Async + public Future<Uid> authenticate( + final ConnectorFacade connector, + final String username, + final GuardedString password, + final OperationOptions options) { + + return new AsyncResult<>(connector.authenticate(ObjectClass.ACCOUNT, username, password, options)); + } + + @Async + public Future<Uid> create( + final ConnectorFacade connector, + final ObjectClass objectClass, + final Set<Attribute> attrs, + final OperationOptions options) { + + return new AsyncResult<>(connector.create(objectClass, attrs, options)); + } + + @Async + public Future<Uid> update( + final ConnectorFacade connector, + final ObjectClass objectClass, + final Uid uid, + final Set<Attribute> attrs, + final OperationOptions options) { + + return new AsyncResult<>(connector.update(objectClass, uid, attrs, options)); + } + + @Async + public Future<Uid> delete( + final ConnectorFacade connector, + final ObjectClass objectClass, + final Uid uid, + final OperationOptions options) { + + connector.delete(objectClass, uid, options); + return new AsyncResult<>(uid); + } + + @Async + public Future<SyncToken> getLatestSyncToken( + final ConnectorFacade connector, final ObjectClass objectClass) { + + return new AsyncResult<>(connector.getLatestSyncToken(objectClass)); + } + + @Async + public Future<ConnectorObject> getObject( + final ConnectorFacade connector, + final ObjectClass objectClass, + final Uid uid, + final OperationOptions options) { + + return new AsyncResult<>(connector.getObject(objectClass, uid, options)); + } + + @Async + public Future<Attribute> getObjectAttribute( + final ConnectorFacade connector, + final ObjectClass objectClass, + final Uid uid, + final OperationOptions options, + final String attributeName) { + + Attribute attribute = null; + + final ConnectorObject object = connector.getObject(objectClass, uid, options); + if (object == null) { + LOG.debug("Object for '{}' not found", uid.getUidValue()); + } else { + attribute = object.getAttributeByName(attributeName); + } + + return new AsyncResult<>(attribute); + } + + @Async + public Future<Set<Attribute>> getObjectAttributes( + final ConnectorFacade connector, + final ObjectClass objectClass, + final Uid uid, + final OperationOptions options) { + + final Set<Attribute> attributes = new HashSet<>(); + + final ConnectorObject object = connector.getObject(objectClass, uid, options); + + if (object == null) { + LOG.debug("Object for '{}' not found", uid.getUidValue()); + } else { + for (String attribute : options.getAttributesToGet()) { + attributes.add(object.getAttributeByName(attribute)); + } + } + + return new AsyncResult<>(attributes); + } + + @Async + public Future<Set<String>> getSchemaNames(final ConnectorFacade connector, final boolean includeSpecial) { + final Set<String> schemaNames = new HashSet<String>(); + + try { + final Schema schema = connector.schema(); + for (ObjectClassInfo info : schema.getObjectClassInfo()) { + for (AttributeInfo attrInfo : info.getAttributeInfo()) { + if (includeSpecial || !AttributeUtil.isSpecialName(attrInfo.getName())) { + schemaNames.add(attrInfo.getName()); + } + } + } + } catch (Exception e) { + // catch exception in order to manage unpredictable behaviors + LOG.debug("While reading schema on connector {}", connector, e); + } + + return new AsyncResult<>(schemaNames); + } + + @Async + public Future<Set<ObjectClass>> getSupportedObjectClasses(final ConnectorFacade connector) { + final Set<ObjectClass> objectClasses = new HashSet<ObjectClass>(); + + try { + final Schema schema = connector.schema(); + for (ObjectClassInfo info : schema.getObjectClassInfo()) { + objectClasses.add(new ObjectClass(info.getType())); + } + } catch (Exception e) { + // catch exception in order to manage unpredictable behaviors + LOG.debug("While reading schema on connector {}", connector, e); + } + + return new AsyncResult<>(objectClasses); + } + + @Async + public Future<String> validate(final ConnectorFacade connector) { + connector.validate(); + return new AsyncResult<>("OK"); + } + + @Async + public Future<String> test(final ConnectorFacade connector) { + connector.test(); + return new AsyncResult<>("OK"); + } +}
http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnIdBundleManagerImpl.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnIdBundleManagerImpl.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnIdBundleManagerImpl.java new file mode 100644 index 0000000..461794e --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnIdBundleManagerImpl.java @@ -0,0 +1,296 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.net.URL; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import org.apache.commons.lang3.StringUtils; +import org.apache.syncope.core.persistence.api.dao.NotFoundException; +import org.apache.syncope.core.provisioning.api.ConnIdBundleManager; +import org.apache.syncope.core.provisioning.api.URIUtil; +import org.identityconnectors.common.IOUtil; +import org.identityconnectors.common.security.GuardedString; +import org.identityconnectors.framework.api.APIConfiguration; +import org.identityconnectors.framework.api.ConfigurationProperties; +import org.identityconnectors.framework.api.ConnectorInfo; +import org.identityconnectors.framework.api.ConnectorInfoManager; +import org.identityconnectors.framework.api.ConnectorInfoManagerFactory; +import org.identityconnectors.framework.api.ConnectorKey; +import org.identityconnectors.framework.api.RemoteFrameworkConnectionInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ConnIdBundleManagerImpl implements ConnIdBundleManager { + + private static final Logger LOG = LoggerFactory.getLogger(ConnIdBundleManager.class); + + private String stringLocations; + + /** + * ConnId Locations. + */ + private List<URI> locations; + + /** + * ConnectorInfoManager instances. + */ + private final Map<URI, ConnectorInfoManager> connInfoManagers = + Collections.synchronizedMap(new LinkedHashMap<URI, ConnectorInfoManager>()); + + @Override + public List<URI> getLocations() { + init(); + + return locations; + } + + @Override + public void setStringLocations(final String stringLocations) { + this.stringLocations = stringLocations; + } + + private void init() { + if (locations == null) { + locations = new ArrayList<>(); + for (String location : StringUtils.isBlank(stringLocations) ? new String[0] : stringLocations.split(",")) { + try { + locations.add(URIUtil.buildForConnId(location)); + LOG.info("Valid ConnId location: {}", location.trim()); + } catch (Exception e) { + LOG.error("Invalid ConnId location: {}", location.trim(), e); + } + } + locations = Collections.unmodifiableList(locations); + } + } + + private void initLocal(final URI location) { + // 1. Find bundles inside local directory + File bundleDirectory = new File(location); + String[] bundleFiles = bundleDirectory.list(); + if (bundleFiles == null) { + throw new NotFoundException("Local bundles directory " + location); + } + + List<URL> bundleFileURLs = new ArrayList<>(); + for (String file : bundleFiles) { + try { + bundleFileURLs.add(IOUtil.makeURL(bundleDirectory, file)); + } catch (IOException ignore) { + // ignore exception and don't add bundle + LOG.debug("{}/{} is not a valid connector bundle", bundleDirectory.toString(), file, ignore); + } + } + + if (bundleFileURLs.isEmpty()) { + LOG.warn("No connector bundles found in {}", location); + } + LOG.debug("Configuring local connector server:" + + "\n\tFiles: {}", bundleFileURLs); + + // 2. Get connector info manager + ConnectorInfoManager manager = ConnectorInfoManagerFactory.getInstance().getLocalManager( + bundleFileURLs.toArray(new URL[bundleFileURLs.size()])); + if (manager == null) { + throw new NotFoundException("Local ConnectorInfoManager"); + } + + connInfoManagers.put(location, manager); + } + + private void initRemote(final URI location) { + // 1. Extract conf params for remote connection from given URI + final String host = location.getHost(); + final int port = location.getPort(); + final GuardedString key = new GuardedString(location.getUserInfo().toCharArray()); + final boolean useSSL = location.getScheme().equals("connids"); + + final List<TrustManager> trustManagers = new ArrayList<>(); + final String[] params = StringUtils.isBlank(location.getQuery()) ? null : location.getQuery().split("&"); + if (params != null && params.length > 0) { + final String[] trustAllCerts = params[0].split("="); + if (trustAllCerts != null && trustAllCerts.length > 1 + && "trustAllCerts".equalsIgnoreCase(trustAllCerts[0]) + && "true".equalsIgnoreCase(trustAllCerts[1])) { + + trustManagers.add(new X509TrustManager() { + + @Override + public void checkClientTrusted(final X509Certificate[] chain, final String authType) + throws CertificateException { + // no checks, trust all + } + + @Override + public void checkServerTrusted(final X509Certificate[] chain, final String authType) + throws CertificateException { + // no checks, trust all + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return null; + } + }); + } + } + + LOG.debug("Configuring remote connector server:" + + "\n\tHost: {}" + + "\n\tPort: {}" + + "\n\tKey: {}" + + "\n\tUseSSL: {}" + + "\n\tTrustAllCerts: {}", + host, port, key, useSSL, !trustManagers.isEmpty()); + + RemoteFrameworkConnectionInfo info = + new RemoteFrameworkConnectionInfo(host, port, key, useSSL, trustManagers, 60 * 1000); + LOG.debug("Remote connection info: {}", info); + + // 2. Get connector info manager + ConnectorInfoManager manager = ConnectorInfoManagerFactory.getInstance().getRemoteManager(info); + if (manager == null) { + throw new NotFoundException("Remote ConnectorInfoManager"); + } + + connInfoManagers.put(location, manager); + } + + @Override + public void resetConnManagers() { + connInfoManagers.clear(); + } + + @Override + public Map<URI, ConnectorInfoManager> getConnManagers() { + init(); + + if (connInfoManagers.isEmpty()) { + for (URI location : locations) { + try { + if ("file".equals(location.getScheme())) { + LOG.debug("Local initialization: {}", location); + initLocal(location); + } else if (location.getScheme().startsWith("connid")) { + LOG.debug("Remote initialization: {}", location); + initRemote(location); + } else { + LOG.warn("Unsupported scheme: {}", location); + } + } catch (Exception e) { + LOG.error("Could not process {}", location, e); + } + } + } + + if (LOG.isDebugEnabled()) { + for (Map.Entry<URI, ConnectorInfoManager> entry : connInfoManagers.entrySet()) { + LOG.debug("Connector bundles found at {}", entry.getKey()); + for (ConnectorInfo connInfo : entry.getValue().getConnectorInfos()) { + LOG.debug("\t{}", connInfo.getConnectorDisplayName()); + } + } + } + + return connInfoManagers; + } + + @Override + public ConnectorInfo getConnectorInfo( + final String location, final String bundleName, final String bundleVersion, final String connectorName) { + + // check ConnIdLocation + URI uriLocation = null; + try { + uriLocation = URIUtil.buildForConnId(location); + } catch (Exception e) { + throw new IllegalArgumentException("Invalid ConnId location " + location, e); + } + + // create key for search all properties + final ConnectorKey key = new ConnectorKey(bundleName, bundleVersion, connectorName); + + if (LOG.isDebugEnabled()) { + LOG.debug("\nBundle name: " + key.getBundleName() + + "\nBundle version: " + key.getBundleVersion() + + "\nBundle class: " + key.getConnectorName()); + } + + // get the specified connector + ConnectorInfo info = null; + if (getConnManagers().containsKey(uriLocation)) { + info = getConnManagers().get(uriLocation).findConnectorInfo(key); + } + if (info == null) { + throw new NotFoundException("Connector Info for location " + location + " and key " + key); + } + + return info; + } + + @Override + public Map<String, List<ConnectorInfo>> getConnectorInfos() { + final Map<String, List<ConnectorInfo>> infos = new LinkedHashMap<>(); + for (Map.Entry<URI, ConnectorInfoManager> entry : connInfoManagers.entrySet()) { + infos.put(entry.getKey().toString(), entry.getValue().getConnectorInfos()); + } + return infos; + } + + @Override + public ConfigurationProperties getConfigurationProperties(final ConnectorInfo info) { + if (info == null) { + throw new NotFoundException("Invalid: connector info is null"); + } + + // create default configuration + final APIConfiguration apiConfig = info.createDefaultAPIConfiguration(); + if (apiConfig == null) { + throw new NotFoundException("Default API configuration"); + } + + // retrieve the ConfigurationProperties. + final ConfigurationProperties properties = apiConfig.getConfigurationProperties(); + if (properties == null) { + throw new NotFoundException("Configuration properties"); + } + + if (LOG.isDebugEnabled()) { + for (String propName : properties.getPropertyNames()) { + LOG.debug("Property Name: {}" + + "\nProperty Type: {}", + properties.getProperty(propName).getName(), + properties.getProperty(propName).getType()); + } + } + + return properties; + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorFacadeProxy.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorFacadeProxy.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorFacadeProxy.java new file mode 100644 index 0000000..8516804 --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorFacadeProxy.java @@ -0,0 +1,599 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import java.io.File; +import java.net.URI; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import javax.ws.rs.NotFoundException; +import org.apache.syncope.common.lib.types.ConnConfProperty; +import org.apache.syncope.common.lib.types.ConnectorCapability; +import org.apache.syncope.common.lib.types.PropagationMode; +import org.apache.syncope.common.lib.types.ResourceOperation; +import org.apache.syncope.core.persistence.api.entity.ConnInstance; +import org.apache.syncope.core.persistence.api.entity.MappingItem; +import org.apache.syncope.core.provisioning.api.ConnIdBundleManager; +import org.apache.syncope.core.provisioning.api.ConnPoolConfUtil; +import org.apache.syncope.core.provisioning.api.Connector; +import org.apache.syncope.core.provisioning.api.TimeoutException; +import org.apache.syncope.core.misc.spring.ApplicationContextProvider; +import org.identityconnectors.common.security.GuardedByteArray; +import org.identityconnectors.common.security.GuardedString; +import org.identityconnectors.framework.api.APIConfiguration; +import org.identityconnectors.framework.api.ConfigurationProperties; +import org.identityconnectors.framework.api.ConnectorFacade; +import org.identityconnectors.framework.api.ConnectorFacadeFactory; +import org.identityconnectors.framework.api.ConnectorInfo; +import org.identityconnectors.framework.common.objects.Attribute; +import org.identityconnectors.framework.common.objects.ConnectorObject; +import org.identityconnectors.framework.common.objects.Name; +import org.identityconnectors.framework.common.objects.ObjectClass; +import org.identityconnectors.framework.common.objects.OperationOptions; +import org.identityconnectors.framework.common.objects.OperationOptionsBuilder; +import org.identityconnectors.framework.common.objects.OperationalAttributes; +import org.identityconnectors.framework.common.objects.ResultsHandler; +import org.identityconnectors.framework.common.objects.SyncDeltaBuilder; +import org.identityconnectors.framework.common.objects.SyncDeltaType; +import org.identityconnectors.framework.common.objects.SyncResultsHandler; +import org.identityconnectors.framework.common.objects.SyncToken; +import org.identityconnectors.framework.common.objects.Uid; +import org.identityconnectors.framework.common.objects.filter.Filter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.ClassUtils; + +public class ConnectorFacadeProxy implements Connector { + + /** + * Logger. + */ + private static final Logger LOG = LoggerFactory.getLogger(ConnectorFacadeProxy.class); + + /** + * Connector facade wrapped instance. + */ + private final ConnectorFacade connector; + + /** + * Active connector instance. + */ + private final ConnInstance activeConnInstance; + + @Autowired + private AsyncConnectorFacade asyncFacade; + + /** + * Use the passed connector instance to build a ConnectorFacade that will be used to make all wrapped calls. + * + * @param connInstance the connector instance configuration + * @see ConnectorInfo + * @see APIConfiguration + * @see ConfigurationProperties + * @see ConnectorFacade + */ + public ConnectorFacadeProxy(final ConnInstance connInstance) { + this.activeConnInstance = connInstance; + + ConnIdBundleManager connIdBundleManager = + ApplicationContextProvider.getApplicationContext().getBean(ConnIdBundleManager.class); + ConnectorInfo info = connIdBundleManager.getConnectorInfo(connInstance.getLocation(), + connInstance.getBundleName(), connInstance.getVersion(), connInstance.getConnectorName()); + + // create default configuration + APIConfiguration apiConfig = info.createDefaultAPIConfiguration(); + + // set connector configuration according to conninstance's + ConfigurationProperties properties = apiConfig.getConfigurationProperties(); + for (ConnConfProperty property : connInstance.getConfiguration()) { + if (property.getValues() != null && !property.getValues().isEmpty()) { + properties.setPropertyValue(property.getSchema().getName(), + getPropertyValue(property.getSchema().getType(), property.getValues())); + } + } + + // set pooling configuration (if supported) according to conninstance's + if (connInstance.getPoolConf() != null) { + if (apiConfig.isConnectorPoolingSupported()) { + ConnPoolConfUtil.updateObjectPoolConfiguration( + apiConfig.getConnectorPoolConfiguration(), connInstance.getPoolConf()); + } else { + LOG.warn("Connector pooling not supported for {}", info); + } + } + + // gets new connector, with the given configuration + connector = ConnectorFacadeFactory.getInstance().newInstance(apiConfig); + if (connector == null) { + throw new NotFoundException("Connector"); + } + + // make sure we have set up the Configuration properly + connector.validate(); + } + + @Override + public Uid authenticate(final String username, final String password, final OperationOptions options) { + Uid result = null; + + if (activeConnInstance.getCapabilities().contains(ConnectorCapability.AUTHENTICATE)) { + final Future<Uid> future = asyncFacade.authenticate( + connector, username, new GuardedString(password.toCharArray()), options); + try { + result = future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } else { + LOG.info("Authenticate was attempted, although the connector only has these capabilities: {}. No action.", + activeConnInstance.getCapabilities()); + } + + return result; + } + + @Override + public Uid create(final PropagationMode propagationMode, final ObjectClass objectClass, final Set<Attribute> attrs, + final OperationOptions options, final Set<String> propagationAttempted) { + + Uid result = null; + + if (propagationMode == PropagationMode.ONE_PHASE + ? activeConnInstance.getCapabilities().contains(ConnectorCapability.ONE_PHASE_CREATE) + : activeConnInstance.getCapabilities().contains(ConnectorCapability.TWO_PHASES_CREATE)) { + + propagationAttempted.add("create"); + + final Future<Uid> future = asyncFacade.create(connector, objectClass, attrs, options); + try { + result = future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } else { + LOG.info("Create was attempted, although the connector only has these capabilities: {}. No action.", + activeConnInstance.getCapabilities()); + } + + return result; + } + + @Override + public Uid update(final PropagationMode propagationMode, final ObjectClass objectClass, final Uid uid, + final Set<Attribute> attrs, final OperationOptions options, final Set<String> propagationAttempted) { + + Uid result = null; + + if (propagationMode == PropagationMode.ONE_PHASE + ? activeConnInstance.getCapabilities().contains(ConnectorCapability.ONE_PHASE_UPDATE) + : activeConnInstance.getCapabilities().contains(ConnectorCapability.TWO_PHASES_UPDATE)) { + + propagationAttempted.add("update"); + + final Future<Uid> future = asyncFacade.update(connector, objectClass, uid, attrs, options); + + try { + result = future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } else { + LOG.info("Update for {} was attempted, although the " + + "connector only has these capabilities: {}. No action.", uid.getUidValue(), activeConnInstance. + getCapabilities()); + } + + return result; + } + + @Override + public void delete(final PropagationMode propagationMode, final ObjectClass objectClass, final Uid uid, + final OperationOptions options, final Set<String> propagationAttempted) { + + if (propagationMode == PropagationMode.ONE_PHASE + ? activeConnInstance.getCapabilities().contains(ConnectorCapability.ONE_PHASE_DELETE) + : activeConnInstance.getCapabilities().contains(ConnectorCapability.TWO_PHASES_DELETE)) { + + propagationAttempted.add("delete"); + + final Future<Uid> future = asyncFacade.delete(connector, objectClass, uid, options); + + try { + future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } else { + LOG.info("Delete for {} was attempted, although the connector only has these capabilities: {}. No action.", + uid.getUidValue(), activeConnInstance.getCapabilities()); + } + } + + @Override + public void sync(final ObjectClass objectClass, final SyncToken token, final SyncResultsHandler handler, + final OperationOptions options) { + + if (activeConnInstance.getCapabilities().contains(ConnectorCapability.SYNC)) { + connector.sync(objectClass, token, handler, options); + } else { + LOG.info("Sync was attempted, although the connector only has these capabilities: {}. No action.", + activeConnInstance.getCapabilities()); + } + } + + @Override + public SyncToken getLatestSyncToken(final ObjectClass objectClass) { + SyncToken result = null; + + if (activeConnInstance.getCapabilities().contains(ConnectorCapability.SYNC)) { + final Future<SyncToken> future = asyncFacade.getLatestSyncToken(connector, objectClass); + + try { + result = future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } else { + LOG.info("getLatestSyncToken was attempted, although the " + + "connector only has these capabilities: {}. No action.", activeConnInstance.getCapabilities()); + } + + return result; + } + + @Override + public ConnectorObject getObject(final ObjectClass objectClass, final Uid uid, final OperationOptions options) { + return getObject(null, null, objectClass, uid, options); + } + + @Override + public ConnectorObject getObject(final PropagationMode propagationMode, final ResourceOperation operationType, + final ObjectClass objectClass, final Uid uid, final OperationOptions options) { + + Future<ConnectorObject> future = null; + + if (activeConnInstance.getCapabilities().contains(ConnectorCapability.SEARCH)) { + if (operationType == null) { + future = asyncFacade.getObject(connector, objectClass, uid, options); + } else { + switch (operationType) { + case CREATE: + if (propagationMode == null || (propagationMode == PropagationMode.ONE_PHASE + ? activeConnInstance.getCapabilities(). + contains(ConnectorCapability.ONE_PHASE_CREATE) + : activeConnInstance.getCapabilities(). + contains(ConnectorCapability.TWO_PHASES_CREATE))) { + + future = asyncFacade.getObject(connector, objectClass, uid, options); + } + break; + case UPDATE: + if (propagationMode == null || (propagationMode == PropagationMode.ONE_PHASE + ? activeConnInstance.getCapabilities(). + contains(ConnectorCapability.ONE_PHASE_UPDATE) + : activeConnInstance.getCapabilities(). + contains(ConnectorCapability.TWO_PHASES_UPDATE))) { + + future = asyncFacade.getObject(connector, objectClass, uid, options); + } + break; + default: + future = asyncFacade.getObject(connector, objectClass, uid, options); + } + } + } else { + LOG.info("Search was attempted, although the connector only has these capabilities: {}. No action.", + activeConnInstance.getCapabilities()); + } + + try { + return future == null ? null : future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + @Override + public List<ConnectorObject> search(final ObjectClass objectClass, final Filter filter, + final OperationOptions options) { + + final List<ConnectorObject> result = new ArrayList<>(); + + search(objectClass, filter, new ResultsHandler() { + + @Override + public boolean handle(final ConnectorObject obj) { + return result.add(obj); + } + }, options); + + return result; + } + + @Override + public void getAllObjects( + final ObjectClass objectClass, final SyncResultsHandler handler, final OperationOptions options) { + + search(objectClass, null, new ResultsHandler() { + + @Override + public boolean handle(final ConnectorObject obj) { + return handler.handle(new SyncDeltaBuilder(). + setObject(obj). + setUid(obj.getUid()). + setDeltaType(SyncDeltaType.CREATE_OR_UPDATE). + setToken(new SyncToken("")). + build()); + } + }, options); + } + + @Override + public Attribute getObjectAttribute(final ObjectClass objectClass, final Uid uid, final OperationOptions options, + final String attributeName) { + + final Future<Attribute> future = asyncFacade.getObjectAttribute(connector, objectClass, uid, options, + attributeName); + try { + return future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + @Override + public Set<Attribute> getObjectAttributes(final ObjectClass objectClass, final Uid uid, + final OperationOptions options) { + + final Future<Set<Attribute>> future = asyncFacade.getObjectAttributes(connector, objectClass, uid, options); + try { + return future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + @Override + public Set<String> getSchemaNames(final boolean includeSpecial) { + final Future<Set<String>> future = asyncFacade.getSchemaNames(connector, includeSpecial); + try { + return future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + @Override + public Set<ObjectClass> getSupportedObjectClasses() { + final Future<Set<ObjectClass>> future = asyncFacade.getSupportedObjectClasses(connector); + try { + return future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + @Override + public void validate() { + final Future<String> future = asyncFacade.test(connector); + try { + future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + @Override + public void test() { + final Future<String> future = asyncFacade.test(connector); + try { + future.get(activeConnInstance.getConnRequestTimeout(), TimeUnit.SECONDS); + } catch (java.util.concurrent.TimeoutException e) { + future.cancel(true); + throw new TimeoutException("Request timeout"); + } catch (Exception e) { + LOG.error("Connector request execution failure", e); + if (e.getCause() instanceof RuntimeException) { + throw (RuntimeException) e.getCause(); + } else { + throw new IllegalArgumentException(e.getCause()); + } + } + } + + private void search( + final ObjectClass objectClass, + final Filter filter, + final ResultsHandler handler, + final OperationOptions options) { + + if (activeConnInstance.getCapabilities().contains(ConnectorCapability.SEARCH)) { + connector.search(objectClass, filter, handler, options); + } else { + LOG.info("Search was attempted, although the connector only has these capabilities: {}. No action.", + activeConnInstance.getCapabilities()); + } + } + + @Override + public ConnInstance getActiveConnInstance() { + return activeConnInstance; + } + + @Override + public OperationOptions getOperationOptions(final Collection<? extends MappingItem> mapItems) { + // ------------------------------------- + // Ask just for mapped attributes + // ------------------------------------- + final OperationOptionsBuilder oob = new OperationOptionsBuilder(); + + final Set<String> attrsToGet = new HashSet<String>(); + attrsToGet.add(Name.NAME); + attrsToGet.add(Uid.NAME); + attrsToGet.add(OperationalAttributes.ENABLE_NAME); + + for (MappingItem item : mapItems) { + attrsToGet.add(item.getExtAttrName()); + } + + oob.setAttributesToGet(attrsToGet); + // ------------------------------------- + + return oob.build(); + } + + private Object getPropertyValue(final String propType, final List<?> values) { + Object value = null; + + try { + final Class<?> propertySchemaClass = ClassUtils.forName(propType, ClassUtils.getDefaultClassLoader()); + + if (GuardedString.class.equals(propertySchemaClass)) { + value = new GuardedString(values.get(0).toString().toCharArray()); + } else if (GuardedByteArray.class.equals(propertySchemaClass)) { + value = new GuardedByteArray((byte[]) values.get(0)); + } else if (Character.class.equals(propertySchemaClass) || Character.TYPE.equals(propertySchemaClass)) { + value = values.get(0) == null || values.get(0).toString().isEmpty() + ? null : values.get(0).toString().charAt(0); + } else if (Integer.class.equals(propertySchemaClass) || Integer.TYPE.equals(propertySchemaClass)) { + value = Integer.parseInt(values.get(0).toString()); + } else if (Long.class.equals(propertySchemaClass) || Long.TYPE.equals(propertySchemaClass)) { + value = Long.parseLong(values.get(0).toString()); + } else if (Float.class.equals(propertySchemaClass) || Float.TYPE.equals(propertySchemaClass)) { + value = Float.parseFloat(values.get(0).toString()); + } else if (Double.class.equals(propertySchemaClass) || Double.TYPE.equals(propertySchemaClass)) { + value = Double.parseDouble(values.get(0).toString()); + } else if (Boolean.class.equals(propertySchemaClass) || Boolean.TYPE.equals(propertySchemaClass)) { + value = Boolean.parseBoolean(values.get(0).toString()); + } else if (URI.class.equals(propertySchemaClass)) { + value = URI.create(values.get(0).toString()); + } else if (File.class.equals(propertySchemaClass)) { + value = new File(values.get(0).toString()); + } else if (String[].class.equals(propertySchemaClass)) { + value = values.toArray(new String[] {}); + } else { + value = values.get(0) == null ? null : values.get(0).toString(); + } + } catch (Exception e) { + LOG.error("Invalid ConnConfProperty specified: {} {}", propType, values, e); + } + + return value; + } + + @Override + public String toString() { + return "ConnectorFacadeProxy{" + + "connector=" + connector + "\n" + "capabitilies=" + activeConnInstance.getCapabilities() + '}'; + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorManager.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorManager.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorManager.java new file mode 100644 index 0000000..e9041fe --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/ConnectorManager.java @@ -0,0 +1,185 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang3.SerializationUtils; +import org.apache.syncope.common.lib.types.ConnConfProperty; +import org.apache.syncope.core.persistence.api.dao.ExternalResourceDAO; +import org.apache.syncope.core.persistence.api.entity.ConnInstance; +import org.apache.syncope.core.persistence.api.entity.ExternalResource; +import org.apache.syncope.core.provisioning.api.ConnIdBundleManager; +import org.apache.syncope.core.provisioning.api.Connector; +import org.apache.syncope.core.provisioning.api.ConnectorFactory; +import org.apache.syncope.core.provisioning.api.ConnectorRegistry; +import org.apache.syncope.core.misc.spring.ApplicationContextProvider; +import org.identityconnectors.common.l10n.CurrentLocale; +import org.identityconnectors.framework.api.ConnectorFacadeFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +/** + * Load ConnId connector instances. + */ +@Component +public class ConnectorManager implements ConnectorRegistry, ConnectorFactory { + + private static final Logger LOG = LoggerFactory.getLogger(ConnectorManager.class); + + @Autowired + private ConnIdBundleManager connIdBundleManager; + + @Autowired + private ExternalResourceDAO resourceDAO; + + private String getBeanName(final ExternalResource resource) { + return String.format("connInstance-%d-%s", resource.getConnector().getKey(), resource.getKey()); + } + + @Override + public Connector getConnector(final ExternalResource resource) { + // Try to re-create connector bean from underlying resource (useful for managing failover scenarios) + if (!ApplicationContextProvider.getBeanFactory().containsBean(getBeanName(resource))) { + registerConnector(resource); + } + + return (Connector) ApplicationContextProvider.getBeanFactory().getBean(getBeanName(resource)); + } + + @Override + public Connector createConnector(final ConnInstance connInstance, final Set<ConnConfProperty> configuration) { + final ConnInstance connInstanceClone = SerializationUtils.clone(connInstance); + + connInstanceClone.setConfiguration(configuration); + + Connector connector = new ConnectorFacadeProxy(connInstanceClone); + ApplicationContextProvider.getBeanFactory().autowireBean(connector); + + return connector; + } + + @Override + public ConnInstance getOverriddenConnInstance(final ConnInstance connInstance, + final Set<ConnConfProperty> overridden) { + final Set<ConnConfProperty> configuration = new HashSet<>(); + final Map<String, ConnConfProperty> overridable = new HashMap<>(); + + // add not overridable properties + for (ConnConfProperty prop : connInstance.getConfiguration()) { + if (prop.isOverridable()) { + overridable.put(prop.getSchema().getName(), prop); + } else { + configuration.add(prop); + } + } + + // add overridden properties + for (ConnConfProperty prop : overridden) { + if (overridable.containsKey(prop.getSchema().getName()) && !prop.getValues().isEmpty()) { + configuration.add(prop); + overridable.remove(prop.getSchema().getName()); + } + } + + // add overridable properties not overridden + configuration.addAll(overridable.values()); + + connInstance.setConfiguration(configuration); + + return connInstance; + } + + @Override + public void registerConnector(final ExternalResource resource) { + final ConnInstance connInstance = getOverriddenConnInstance( + SerializationUtils.clone(resource.getConnector()), resource.getConnInstanceConfiguration()); + final Connector connector = createConnector(resource.getConnector(), connInstance.getConfiguration()); + LOG.debug("Connector to be registered: {}", connector); + + final String beanName = getBeanName(resource); + + if (ApplicationContextProvider.getBeanFactory().containsSingleton(beanName)) { + unregisterConnector(beanName); + } + + ApplicationContextProvider.getBeanFactory().registerSingleton(beanName, connector); + LOG.debug("Successfully registered bean {}", beanName); + } + + @Override + public void unregisterConnector(final String id) { + ApplicationContextProvider.getBeanFactory().destroySingleton(id); + } + + @Override + public Integer getPriority() { + return 100; + } + + @Transactional(readOnly = true) + @Override + public void load() { + // This is needed in order to avoid encoding problems when sending error messages via REST + CurrentLocale.set(Locale.ENGLISH); + + // Load all connector bundles + connIdBundleManager.getConnManagers(); + + // Load all resource-specific connectors + int connectors = 0; + for (ExternalResource resource : resourceDAO.findAll()) { + LOG.info("Registering resource-connector pair {}-{}", resource, resource.getConnector()); + try { + registerConnector(resource); + connectors++; + } catch (Exception e) { + LOG.error("While registering resource-connector pair {}-{}", resource, resource.getConnector(), e); + } + } + + LOG.info("Done loading {} connectors", connectors); + } + + @Transactional(readOnly = true) + @Override + public void unload() { + int connectors = 0; + for (ExternalResource resource : resourceDAO.findAll()) { + final String beanName = getBeanName(resource); + if (ApplicationContextProvider.getBeanFactory().containsSingleton(beanName)) { + LOG.info("Unegistering resource-connector pair {}-{}", resource, resource.getConnector()); + unregisterConnector(beanName); + connectors++; + } + } + + LOG.info("Done unloading {} connectors", connectors); + + ConnectorFacadeFactory.getInstance().dispose(); + connIdBundleManager.resetConnManagers(); + LOG.info("All connector resources disposed"); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultAttributableTransformer.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultAttributableTransformer.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultAttributableTransformer.java new file mode 100644 index 0000000..d1a14f0 --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultAttributableTransformer.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import org.apache.syncope.common.lib.mod.AbstractAttributableMod; +import org.apache.syncope.common.lib.to.AbstractAttributableTO; +import org.apache.syncope.core.provisioning.api.AttributableTransformer; + +/** + * Default empty implementation returning received input as result. + */ +public class DefaultAttributableTransformer implements AttributableTransformer { + + @Override + public <T extends AbstractAttributableTO> T transform(final T input) { + return input; + } + + @Override + public <T extends AbstractAttributableMod> T transform(final T input) { + return input; + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultRoleProvisioningManager.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultRoleProvisioningManager.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultRoleProvisioningManager.java new file mode 100644 index 0000000..d78e0fc --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultRoleProvisioningManager.java @@ -0,0 +1,223 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.apache.commons.lang3.StringUtils; +import org.apache.syncope.common.lib.mod.RoleMod; +import org.apache.syncope.common.lib.to.AttrTO; +import org.apache.syncope.common.lib.to.PropagationStatus; +import org.apache.syncope.common.lib.to.RoleTO; +import org.apache.syncope.common.lib.types.PropagationByResource; +import org.apache.syncope.core.persistence.api.RoleEntitlementUtil; +import org.apache.syncope.core.persistence.api.dao.RoleDAO; +import org.apache.syncope.core.persistence.api.entity.role.Role; +import org.apache.syncope.core.persistence.api.entity.task.PropagationTask; +import org.apache.syncope.core.provisioning.api.RoleProvisioningManager; +import org.apache.syncope.core.provisioning.api.WorkflowResult; +import org.apache.syncope.core.provisioning.api.propagation.PropagationException; +import org.apache.syncope.core.provisioning.api.propagation.PropagationManager; +import org.apache.syncope.core.provisioning.api.propagation.PropagationReporter; +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor; +import org.apache.syncope.core.misc.security.AuthContextUtil; +import org.apache.syncope.core.misc.spring.ApplicationContextProvider; +import org.apache.syncope.core.workflow.api.RoleWorkflowAdapter; + +public class DefaultRoleProvisioningManager implements RoleProvisioningManager { + + private static final Logger LOG = LoggerFactory.getLogger(RoleProvisioningManager.class); + + @Autowired + protected RoleWorkflowAdapter rwfAdapter; + + @Autowired + protected PropagationManager propagationManager; + + @Autowired + protected PropagationTaskExecutor taskExecutor; + + @Autowired + protected RoleDAO roleDAO; + + @Override + public Map.Entry<Long, List<PropagationStatus>> create(final RoleTO subject) { + return create(subject, Collections.<String>emptySet()); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> create(final RoleTO subject, final Set<String> excludedResources) { + WorkflowResult<Long> created = rwfAdapter.create(subject); + + AuthContextUtil.extendAuthContext( + created.getResult(), RoleEntitlementUtil.getEntitlementNameFromRoleKey(created.getResult())); + + List<PropagationTask> tasks = + propagationManager.getRoleCreateTaskIds(created, subject.getVirAttrs(), excludedResources); + PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext().getBean( + PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + return new AbstractMap.SimpleEntry<>(created.getResult(), propagationReporter.getStatuses()); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> create( + final RoleTO roleTO, final Map<Long, String> roleOwnerMap, final Set<String> excludedResources) { + + WorkflowResult<Long> created = rwfAdapter.create(roleTO); + AttrTO roleOwner = roleTO.getPlainAttrMap().get(StringUtils.EMPTY); + if (roleOwner != null) { + roleOwnerMap.put(created.getResult(), roleOwner.getValues().iterator().next()); + } + + AuthContextUtil.extendAuthContext( + created.getResult(), RoleEntitlementUtil.getEntitlementNameFromRoleKey(created.getResult())); + + List<PropagationTask> tasks = propagationManager.getRoleCreateTaskIds( + created, roleTO.getVirAttrs(), excludedResources); + + taskExecutor.execute(tasks); + + return new AbstractMap.SimpleEntry<>(created.getResult(), null); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> update(final RoleMod subjectMod) { + return update(subjectMod, Collections.<String>emptySet()); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> update( + final RoleMod subjectMod, final Set<String> excludedResources) { + + WorkflowResult<Long> updated = rwfAdapter.update(subjectMod); + + List<PropagationTask> tasks = propagationManager.getRoleUpdateTaskIds(updated, + subjectMod.getVirAttrsToRemove(), subjectMod.getVirAttrsToUpdate()); + PropagationReporter propagationReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + Map.Entry<Long, List<PropagationStatus>> result = new AbstractMap.SimpleEntry<>( + updated.getResult(), propagationReporter.getStatuses()); + return result; + } + + @Override + public List<PropagationStatus> delete(final Long subjectKey) { + final List<Role> toBeDeprovisioned = new ArrayList<>(); + + final Role syncopeRole = roleDAO.find(subjectKey); + + if (syncopeRole != null) { + toBeDeprovisioned.add(syncopeRole); + + final List<Role> descendants = roleDAO.findDescendants(toBeDeprovisioned.get(0)); + if (descendants != null) { + toBeDeprovisioned.addAll(descendants); + } + } + + final List<PropagationTask> tasks = new ArrayList<>(); + + for (Role role : toBeDeprovisioned) { + // Generate propagation tasks for deleting users from role resources, if they are on those resources only + // because of the reason being deleted (see SYNCOPE-357) + for (Map.Entry<Long, PropagationByResource> entry : roleDAO.findUsersWithIndirectResources(role. + getKey()).entrySet()) { + + WorkflowResult<Long> wfResult = + new WorkflowResult<>(entry.getKey(), entry.getValue(), Collections.<String>emptySet()); + tasks.addAll(propagationManager.getUserDeleteTaskIds(wfResult)); + } + + // Generate propagation tasks for deleting this role from resources + tasks.addAll(propagationManager.getRoleDeleteTaskIds(role.getKey())); + } + + PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext(). + getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + try { + rwfAdapter.delete(subjectKey); + } catch (RuntimeException e) { + throw e; + } + + return propagationReporter.getStatuses(); + } + + @Override + public Long unlink(final RoleMod subjectMod) { + WorkflowResult<Long> updated = rwfAdapter.update(subjectMod); + return updated.getResult(); + } + + @Override + public List<PropagationStatus> deprovision(final Long roleKey, final Collection<String> resources) { + Role role = roleDAO.authFetch(roleKey); + + Set<String> noPropResourceName = role.getResourceNames(); + noPropResourceName.removeAll(resources); + + List<PropagationTask> tasks = propagationManager.getRoleDeleteTaskIds( + roleKey, new HashSet<>(resources), noPropResourceName); + PropagationReporter propagationReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + return propagationReporter.getStatuses(); + } + + @Override + public Long link(final RoleMod subjectMod) { + return rwfAdapter.update(subjectMod).getResult(); + } + +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultUserProvisioningManager.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultUserProvisioningManager.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultUserProvisioningManager.java new file mode 100644 index 0000000..b11cfe4 --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/DefaultUserProvisioningManager.java @@ -0,0 +1,370 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.syncope.common.lib.mod.MembershipMod; +import org.apache.syncope.common.lib.mod.StatusMod; +import org.apache.syncope.common.lib.mod.UserMod; +import org.apache.syncope.common.lib.to.PropagationStatus; +import org.apache.syncope.common.lib.to.UserTO; +import org.apache.syncope.core.persistence.api.dao.UserDAO; +import org.apache.syncope.core.persistence.api.entity.task.PropagationTask; +import org.apache.syncope.core.persistence.api.entity.user.User; +import org.apache.syncope.core.provisioning.api.UserProvisioningManager; +import org.apache.syncope.core.provisioning.api.WorkflowResult; +import org.apache.syncope.common.lib.types.PropagationByResource; +import org.apache.syncope.core.provisioning.api.propagation.PropagationException; +import org.apache.syncope.core.provisioning.api.propagation.PropagationManager; +import org.apache.syncope.core.provisioning.api.propagation.PropagationReporter; +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor; +import org.apache.syncope.core.provisioning.api.sync.ProvisioningResult; +import org.apache.syncope.core.misc.spring.ApplicationContextProvider; +import org.apache.syncope.core.workflow.api.UserWorkflowAdapter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +public class DefaultUserProvisioningManager implements UserProvisioningManager { + + private static final Logger LOG = LoggerFactory.getLogger(UserProvisioningManager.class); + + @Autowired + protected UserWorkflowAdapter uwfAdapter; + + @Autowired + protected PropagationManager propagationManager; + + @Autowired + protected PropagationTaskExecutor taskExecutor; + + @Autowired + protected VirAttrHandler virtAttrHandler; + + @Autowired + protected UserDAO userDAO; + + @Override + public Map.Entry<Long, List<PropagationStatus>> create(final UserTO userTO) { + return create(userTO, true, false, null, Collections.<String>emptySet()); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> create(final UserTO userTO, final boolean storePassword) { + return create(userTO, storePassword, false, null, Collections.<String>emptySet()); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> create(final UserTO userTO, final boolean storePassword, + final boolean disablePwdPolicyCheck, final Boolean enabled, final Set<String> excludedResources) { + + WorkflowResult<Map.Entry<Long, Boolean>> created; + try { + created = uwfAdapter.create(userTO, disablePwdPolicyCheck, enabled, storePassword); + } catch (PropagationException e) { + throw e; + } + + List<PropagationTask> tasks = propagationManager.getUserCreateTaskIds( + created, userTO.getPassword(), userTO.getVirAttrs(), excludedResources, userTO.getMemberships()); + PropagationReporter propagationReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + return new AbstractMap.SimpleEntry<>(created.getResult().getKey(), propagationReporter.getStatuses()); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> update(final UserMod userMod) { + return update(userMod, false); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> update(final UserMod userMod, final boolean removeMemberships) { + WorkflowResult<Map.Entry<UserMod, Boolean>> updated = uwfAdapter.update(userMod); + + List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(updated); + if (tasks.isEmpty()) { + // SYNCOPE-459: take care of user virtual attributes ... + final PropagationByResource propByResVirAttr = virtAttrHandler.fillVirtual( + updated.getResult().getKey().getKey(), + userMod.getVirAttrsToRemove(), + userMod.getVirAttrsToUpdate()); + // SYNCOPE-501: update only virtual attributes (if any of them changed), password propagation is + // not required, take care also of membership virtual attributes + boolean addOrUpdateMemberships = false; + for (MembershipMod membershipMod : userMod.getMembershipsToAdd()) { + if (!virtAttrHandler.fillMembershipVirtual( + updated.getResult().getKey().getKey(), + membershipMod.getRole(), + null, + membershipMod.getVirAttrsToRemove(), + membershipMod.getVirAttrsToUpdate(), + false).isEmpty()) { + addOrUpdateMemberships = true; + } + } + tasks.addAll(!propByResVirAttr.isEmpty() || addOrUpdateMemberships || removeMemberships + ? propagationManager.getUserUpdateTaskIds(updated, false, null) + : Collections.<PropagationTask>emptyList()); + } + PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext(). + getBean(PropagationReporter.class); + if (!tasks.isEmpty()) { + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + } + + Map.Entry<Long, List<PropagationStatus>> result = new AbstractMap.SimpleEntry<>( + updated.getResult().getKey().getKey(), propagationReporter.getStatuses()); + return result; + } + + @Override + public List<PropagationStatus> delete(final Long userKey) { + return delete(userKey, Collections.<String>emptySet()); + } + + @Override + public List<PropagationStatus> delete(final Long subjectId, final Set<String> excludedResources) { + // Note here that we can only notify about "delete", not any other + // task defined in workflow process definition: this because this + // information could only be available after uwfAdapter.delete(), which + // will also effectively remove user from db, thus making virtually + // impossible by NotificationManager to fetch required user information + List<PropagationTask> tasks = propagationManager.getUserDeleteTaskIds(subjectId, excludedResources); + + PropagationReporter propagationReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + try { + uwfAdapter.delete(subjectId); + } catch (PropagationException e) { + throw e; + } + + return propagationReporter.getStatuses(); + } + + @Override + public Long unlink(final UserMod userMod) { + WorkflowResult<Map.Entry<UserMod, Boolean>> updated = uwfAdapter.update(userMod); + return updated.getResult().getKey().getKey(); + } + + @Override + public Long link(final UserMod subjectMod) { + return uwfAdapter.update(subjectMod).getResult().getKey().getKey(); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> activate(final User user, final StatusMod statusMod) { + WorkflowResult<Long> updated; + if (statusMod.isOnSyncope()) { + updated = uwfAdapter.activate(user.getKey(), statusMod.getToken()); + } else { + updated = new WorkflowResult<>(user.getKey(), null, statusMod.getType().name().toLowerCase()); + } + + List<PropagationStatus> statuses = propagateStatus(user, statusMod); + return new AbstractMap.SimpleEntry<>(updated.getResult(), statuses); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> reactivate(final User user, final StatusMod statusMod) { + WorkflowResult<Long> updated; + if (statusMod.isOnSyncope()) { + updated = uwfAdapter.reactivate(user.getKey()); + } else { + updated = new WorkflowResult<>(user.getKey(), null, statusMod.getType().name().toLowerCase()); + } + + List<PropagationStatus> statuses = propagateStatus(user, statusMod); + return new AbstractMap.SimpleEntry<>(updated.getResult(), statuses); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> suspend(final User user, final StatusMod statusMod) { + WorkflowResult<Long> updated; + if (statusMod.isOnSyncope()) { + updated = uwfAdapter.suspend(user.getKey()); + } else { + updated = new WorkflowResult<>(user.getKey(), null, statusMod.getType().name().toLowerCase()); + } + + List<PropagationStatus> statuses = propagateStatus(user, statusMod); + return new AbstractMap.SimpleEntry<>(updated.getResult(), statuses); + } + + protected List<PropagationStatus> propagateStatus(final User user, final StatusMod statusMod) { + Set<String> resourcesToBeExcluded = new HashSet<>(user.getResourceNames()); + resourcesToBeExcluded.removeAll(statusMod.getResourceNames()); + + List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds( + user, statusMod.getType() != StatusMod.ModType.SUSPEND, resourcesToBeExcluded); + PropagationReporter propReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propReporter.onPrimaryResourceFailure(tasks); + } + + return propReporter.getStatuses(); + + } + + @Override + public void innerSuspend(final User user, final boolean propagate) { + final WorkflowResult<Long> updated = uwfAdapter.suspend(user); + + // propagate suspension if and only if it is required by policy + if (propagate) { + UserMod userMod = new UserMod(); + userMod.setKey(updated.getResult()); + + final List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds( + new WorkflowResult<Map.Entry<UserMod, Boolean>>( + new AbstractMap.SimpleEntry<>(userMod, Boolean.FALSE), + updated.getPropByRes(), updated.getPerformedTasks())); + + taskExecutor.execute(tasks); + } + } + + @Override + public List<PropagationStatus> deprovision(final Long userKey, final Collection<String> resources) { + final User user = userDAO.authFetch(userKey); + + final Set<String> noPropResourceName = user.getResourceNames(); + noPropResourceName.removeAll(resources); + + final List<PropagationTask> tasks = + propagationManager.getUserDeleteTaskIds(userKey, new HashSet<>(resources), noPropResourceName); + final PropagationReporter propagationReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + return propagationReporter.getStatuses(); + } + + @Override + public Map.Entry<Long, List<PropagationStatus>> update(final UserMod userMod, final Long key, + final ProvisioningResult result, final Boolean enabled, final Set<String> excludedResources) { + + WorkflowResult<Map.Entry<UserMod, Boolean>> updated; + try { + updated = uwfAdapter.update(userMod); + } catch (Exception e) { + LOG.error("Update of user {} failed, trying to sync its status anyway (if configured)", key, e); + + result.setStatus(ProvisioningResult.Status.FAILURE); + result.setMessage("Update failed, trying to sync status anyway (if configured)\n" + e.getMessage()); + + updated = new WorkflowResult<Map.Entry<UserMod, Boolean>>( + new AbstractMap.SimpleEntry<>(userMod, false), new PropagationByResource(), + new HashSet<String>()); + } + + if (enabled != null) { + User user = userDAO.find(key); + + WorkflowResult<Long> enableUpdate = null; + if (user.isSuspended() == null) { + enableUpdate = uwfAdapter.activate(key, null); + } else if (enabled && user.isSuspended()) { + enableUpdate = uwfAdapter.reactivate(key); + } else if (!enabled && !user.isSuspended()) { + enableUpdate = uwfAdapter.suspend(key); + } + + if (enableUpdate != null) { + if (enableUpdate.getPropByRes() != null) { + updated.getPropByRes().merge(enableUpdate.getPropByRes()); + updated.getPropByRes().purge(); + } + updated.getPerformedTasks().addAll(enableUpdate.getPerformedTasks()); + } + } + + PropagationReporter propagationReporter = ApplicationContextProvider.getApplicationContext(). + getBean(PropagationReporter.class); + + List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds( + updated, updated.getResult().getKey().getPassword() != null, excludedResources); + + try { + taskExecutor.execute(tasks, propagationReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propagationReporter.onPrimaryResourceFailure(tasks); + } + + return new AbstractMap.SimpleEntry<>(updated.getResult().getKey().getKey(), + propagationReporter.getStatuses()); + + } + + @Override + public void requestPasswordReset(final Long id) { + uwfAdapter.requestPasswordReset(id); + } + + @Override + public void confirmPasswordReset(final User user, final String token, final String password) { + uwfAdapter.confirmPasswordReset(user.getKey(), token, password); + + List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds(user, null, null); + PropagationReporter propReporter = + ApplicationContextProvider.getApplicationContext().getBean(PropagationReporter.class); + try { + taskExecutor.execute(tasks, propReporter); + } catch (PropagationException e) { + LOG.error("Error propagation primary resource", e); + propReporter.onPrimaryResourceFailure(tasks); + } + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/UserSuspenderImpl.java ---------------------------------------------------------------------- diff --git a/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/UserSuspenderImpl.java b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/UserSuspenderImpl.java new file mode 100644 index 0000000..bec5e28 --- /dev/null +++ b/syncope620/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/UserSuspenderImpl.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.provisioning.java; + +import org.apache.syncope.core.provisioning.api.UserSuspender; +import org.apache.syncope.core.persistence.api.entity.user.User; +import org.apache.syncope.core.provisioning.api.UserProvisioningManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class UserSuspenderImpl implements UserSuspender { + + private static final Logger LOG = LoggerFactory.getLogger(UserSuspenderImpl.class); + + @Autowired + private UserProvisioningManager provisioningManager; + + @Override + public void suspend(final User user, final boolean suspend) { + try { + LOG.debug("User {}:{} is over to max failed logins", user.getKey(), user.getUsername()); + + // reduce failed logins number to avoid multiple request + user.setFailedLogins(user.getFailedLogins() - 1); + + // disable user and propagate suspension if and only if it is required by policy + provisioningManager.innerSuspend(user, suspend); + } catch (Exception e) { + LOG.error("Error during user suspension", e); + } + } +}
