http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java new file mode 100644 index 0000000..7189d6e --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/TaskLogic.java @@ -0,0 +1,387 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic; + +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import org.apache.commons.lang3.ArrayUtils; +import org.apache.syncope.common.lib.SyncopeClientException; +import org.apache.syncope.common.lib.to.AbstractTaskTO; +import org.apache.syncope.common.lib.to.BulkAction; +import org.apache.syncope.common.lib.to.BulkActionResult; +import org.apache.syncope.common.lib.to.SchedTaskTO; +import org.apache.syncope.common.lib.to.SyncTaskTO; +import org.apache.syncope.common.lib.to.TaskExecTO; +import org.apache.syncope.common.lib.types.ClientExceptionType; +import org.apache.syncope.common.lib.types.PropagationMode; +import org.apache.syncope.common.lib.types.PropagationTaskExecStatus; +import org.apache.syncope.common.lib.types.TaskType; +import org.apache.syncope.core.persistence.api.dao.NotFoundException; +import org.apache.syncope.core.persistence.api.dao.TaskDAO; +import org.apache.syncope.core.persistence.api.dao.TaskExecDAO; +import org.apache.syncope.core.persistence.api.dao.search.OrderByClause; +import org.apache.syncope.core.persistence.api.entity.task.NotificationTask; +import org.apache.syncope.core.persistence.api.entity.task.PropagationTask; +import org.apache.syncope.core.persistence.api.entity.task.SchedTask; +import org.apache.syncope.core.persistence.api.entity.task.Task; +import org.apache.syncope.core.persistence.api.entity.task.TaskExec; +import org.apache.syncope.core.persistence.api.entity.task.TaskUtil; +import org.apache.syncope.core.persistence.api.entity.task.TaskUtilFactory; +import org.apache.syncope.core.provisioning.api.data.TaskDataBinder; +import org.apache.syncope.core.provisioning.api.job.JobNamer; +import org.apache.syncope.core.provisioning.api.job.TaskJob; +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor; +import org.apache.syncope.core.provisioning.api.job.JobInstanceLoader; +import org.apache.syncope.core.logic.notification.NotificationJob; +import org.quartz.JobDataMap; +import org.quartz.JobKey; +import org.quartz.Scheduler; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.quartz.SchedulerFactoryBean; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Component; + +@Component +public class TaskLogic extends AbstractTransactionalLogic<AbstractTaskTO> { + + @Autowired + private TaskDAO taskDAO; + + @Autowired + private TaskExecDAO taskExecDAO; + + @Autowired + private TaskDataBinder binder; + + @Autowired + private PropagationTaskExecutor taskExecutor; + + @Autowired + private NotificationJob notificationJob; + + @Autowired + private JobInstanceLoader jobInstanceLoader; + + @Autowired + private SchedulerFactoryBean scheduler; + + @Autowired + private TaskUtilFactory taskUtilFactory; + + @PreAuthorize("hasRole('TASK_CREATE')") + public <T extends SchedTaskTO> T createSchedTask(final T taskTO) { + TaskUtil taskUtil = taskUtilFactory.getInstance(taskTO); + + SchedTask task = binder.createSchedTask(taskTO, taskUtil); + task = taskDAO.save(task); + + try { + jobInstanceLoader.registerJob(task, task.getJobClassName(), task.getCronExpression()); + } catch (Exception e) { + LOG.error("While registering quartz job for task " + task.getKey(), e); + + SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling); + sce.getElements().add(e.getMessage()); + throw sce; + } + + return binder.getTaskTO(task, taskUtil); + } + + @PreAuthorize("hasRole('TASK_UPDATE')") + public SyncTaskTO updateSync(final SyncTaskTO taskTO) { + return updateSched(taskTO); + } + + @PreAuthorize("hasRole('TASK_UPDATE')") + public <T extends SchedTaskTO> T updateSched(final SchedTaskTO taskTO) { + SchedTask task = taskDAO.find(taskTO.getKey()); + if (task == null) { + throw new NotFoundException("Task " + taskTO.getKey()); + } + + TaskUtil taskUtil = taskUtilFactory.getInstance(task); + + binder.updateSchedTask(task, taskTO, taskUtil); + task = taskDAO.save(task); + + try { + jobInstanceLoader.registerJob(task, task.getJobClassName(), task.getCronExpression()); + } catch (Exception e) { + LOG.error("While registering quartz job for task " + task.getKey(), e); + + SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling); + sce.getElements().add(e.getMessage()); + throw sce; + } + + return binder.getTaskTO(task, taskUtil); + } + + @PreAuthorize("hasRole('TASK_LIST')") + public int count(final TaskType taskType) { + return taskDAO.count(taskType); + } + + @PreAuthorize("hasRole('TASK_LIST')") + @SuppressWarnings("unchecked") + public <T extends AbstractTaskTO> List<T> list(final TaskType taskType, + final int page, final int size, final List<OrderByClause> orderByClauses) { + + TaskUtil taskUtil = taskUtilFactory.getInstance(taskType); + + List<Task> tasks = taskDAO.findAll(page, size, orderByClauses, taskType); + List<T> taskTOs = new ArrayList<>(tasks.size()); + for (Task task : tasks) { + taskTOs.add((T) binder.getTaskTO(task, taskUtil)); + } + + return taskTOs; + } + + @PreAuthorize("hasRole('TASK_READ')") + public <T extends AbstractTaskTO> T read(final Long taskId) { + Task task = taskDAO.find(taskId); + if (task == null) { + throw new NotFoundException("Task " + taskId); + } + return binder.getTaskTO(task, taskUtilFactory.getInstance(task)); + } + + @PreAuthorize("hasRole('TASK_READ')") + public TaskExecTO readExecution(final Long executionId) { + TaskExec taskExec = taskExecDAO.find(executionId); + if (taskExec == null) { + throw new NotFoundException("Task execution " + executionId); + } + return binder.getTaskExecTO(taskExec); + } + + @PreAuthorize("hasRole('TASK_EXECUTE')") + public TaskExecTO execute(final Long taskId, final boolean dryRun) { + Task task = taskDAO.find(taskId); + if (task == null) { + throw new NotFoundException("Task " + taskId); + } + TaskUtil taskUtil = taskUtilFactory.getInstance(task); + + TaskExecTO result = null; + switch (taskUtil.getType()) { + case PROPAGATION: + final TaskExec propExec = taskExecutor.execute((PropagationTask) task); + result = binder.getTaskExecTO(propExec); + break; + + case NOTIFICATION: + final TaskExec notExec = notificationJob.executeSingle((NotificationTask) task); + result = binder.getTaskExecTO(notExec); + break; + + case SCHEDULED: + case SYNCHRONIZATION: + case PUSH: + try { + jobInstanceLoader.registerJob(task, + ((SchedTask) task).getJobClassName(), + ((SchedTask) task).getCronExpression()); + + JobDataMap map = new JobDataMap(); + map.put(TaskJob.DRY_RUN_JOBDETAIL_KEY, dryRun); + + scheduler.getScheduler().triggerJob( + new JobKey(JobNamer.getJobName(task), Scheduler.DEFAULT_GROUP), map); + } catch (Exception e) { + LOG.error("While executing task {}", task, e); + + SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.Scheduling); + sce.getElements().add(e.getMessage()); + throw sce; + } + + result = new TaskExecTO(); + result.setTask(taskId); + result.setStartDate(new Date()); + result.setStatus("JOB_FIRED"); + result.setMessage("Job fired; waiting for results..."); + break; + + default: + } + + return result; + } + + @PreAuthorize("hasRole('TASK_READ')") + public TaskExecTO report(final Long executionId, final PropagationTaskExecStatus status, final String message) { + TaskExec exec = taskExecDAO.find(executionId); + if (exec == null) { + throw new NotFoundException("Task execution " + executionId); + } + + SyncopeClientException sce = SyncopeClientException.build( + ClientExceptionType.InvalidPropagationTaskExecReport); + + TaskUtil taskUtil = taskUtilFactory.getInstance(exec.getTask()); + if (TaskType.PROPAGATION == taskUtil.getType()) { + PropagationTask task = (PropagationTask) exec.getTask(); + if (task.getPropagationMode() != PropagationMode.TWO_PHASES) { + sce.getElements().add("Propagation mode: " + task.getPropagationMode()); + } + } else { + sce.getElements().add("Task type: " + taskUtil); + } + + switch (status) { + case SUCCESS: + case FAILURE: + break; + + case CREATED: + case SUBMITTED: + case UNSUBMITTED: + sce.getElements().add("Execution status to be set: " + status); + break; + + default: + } + + if (!sce.isEmpty()) { + throw sce; + } + + exec.setStatus(status.toString()); + exec.setMessage(message); + return binder.getTaskExecTO(taskExecDAO.save(exec)); + } + + @PreAuthorize("hasRole('TASK_DELETE')") + public <T extends AbstractTaskTO> T delete(final Long taskId) { + Task task = taskDAO.find(taskId); + if (task == null) { + throw new NotFoundException("Task " + taskId); + } + TaskUtil taskUtil = taskUtilFactory.getInstance(task); + + T taskToDelete = binder.getTaskTO(task, taskUtil); + + if (TaskType.SCHEDULED == taskUtil.getType() + || TaskType.SYNCHRONIZATION == taskUtil.getType() + || TaskType.PUSH == taskUtil.getType()) { + + jobInstanceLoader.unregisterJob(task); + } + + taskDAO.delete(task); + return taskToDelete; + } + + @PreAuthorize("hasRole('TASK_DELETE')") + public TaskExecTO deleteExecution(final Long executionId) { + TaskExec taskExec = taskExecDAO.find(executionId); + if (taskExec == null) { + throw new NotFoundException("Task execution " + executionId); + } + + TaskExecTO taskExecutionToDelete = binder.getTaskExecTO(taskExec); + taskExecDAO.delete(taskExec); + return taskExecutionToDelete; + } + + @PreAuthorize("(hasRole('TASK_DELETE') and #bulkAction.operation == #bulkAction.operation.DELETE) or " + + "(hasRole('TASK_EXECUTE') and " + + "(#bulkAction.operation == #bulkAction.operation.EXECUTE or " + + "#bulkAction.operation == #bulkAction.operation.DRYRUN))") + public BulkActionResult bulk(final BulkAction bulkAction) { + BulkActionResult res = new BulkActionResult(); + + switch (bulkAction.getOperation()) { + case DELETE: + for (String taskId : bulkAction.getTargets()) { + try { + res.add(delete(Long.valueOf(taskId)).getKey(), BulkActionResult.Status.SUCCESS); + } catch (Exception e) { + LOG.error("Error performing delete for task {}", taskId, e); + res.add(taskId, BulkActionResult.Status.FAILURE); + } + } + break; + + case DRYRUN: + for (String taskId : bulkAction.getTargets()) { + try { + execute(Long.valueOf(taskId), true); + res.add(taskId, BulkActionResult.Status.SUCCESS); + } catch (Exception e) { + LOG.error("Error performing dryrun for task {}", taskId, e); + res.add(taskId, BulkActionResult.Status.FAILURE); + } + } + break; + + case EXECUTE: + for (String taskId : bulkAction.getTargets()) { + try { + execute(Long.valueOf(taskId), false); + res.add(taskId, BulkActionResult.Status.SUCCESS); + } catch (Exception e) { + LOG.error("Error performing execute for task {}", taskId, e); + res.add(taskId, BulkActionResult.Status.FAILURE); + } + } + break; + + default: + } + + return res; + } + + @Override + protected AbstractTaskTO resolveReference(final Method method, final Object... args) + throws UnresolvedReferenceException { + + Long key = null; + + if (ArrayUtils.isNotEmpty(args) + && !"deleteExecution".equals(method.getName()) && !"readExecution".equals(method.getName())) { + + for (int i = 0; key == null && i < args.length; i++) { + if (args[i] instanceof Long) { + key = (Long) args[i]; + } else if (args[i] instanceof AbstractTaskTO) { + key = ((AbstractTaskTO) args[i]).getKey(); + } + } + } + + if ((key != null) && !key.equals(0l)) { + try { + final Task task = taskDAO.find(key); + return binder.getTaskTO(task, taskUtilFactory.getInstance(task)); + } catch (Throwable ignore) { + LOG.debug("Unresolved reference", ignore); + throw new UnresolvedReferenceException(ignore); + } + } + + throw new UnresolvedReferenceException(); + } +}
http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UnresolvedReferenceException.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UnresolvedReferenceException.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UnresolvedReferenceException.java new file mode 100644 index 0000000..d863369 --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UnresolvedReferenceException.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic; + +/** + * Indicates unresolved bean reference. + */ +public class UnresolvedReferenceException extends Exception { + + private static final long serialVersionUID = -675489116009955632L; + + public UnresolvedReferenceException() { + super(); + } + + public UnresolvedReferenceException(final Throwable cause) { + super(cause); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserLogic.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserLogic.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserLogic.java new file mode 100644 index 0000000..17e577b --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserLogic.java @@ -0,0 +1,522 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic; + +import org.apache.syncope.core.misc.security.UnauthorizedRoleException; +import java.lang.reflect.Method; +import java.security.AccessControlException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang3.ArrayUtils; +import org.apache.syncope.common.lib.SyncopeClientException; +import org.apache.syncope.common.lib.mod.AttrMod; +import org.apache.syncope.common.lib.mod.StatusMod; +import org.apache.syncope.common.lib.mod.UserMod; +import org.apache.syncope.common.lib.to.BulkAction; +import org.apache.syncope.common.lib.to.BulkActionResult; +import org.apache.syncope.common.lib.to.BulkActionResult.Status; +import org.apache.syncope.common.lib.to.MembershipTO; +import org.apache.syncope.common.lib.to.PropagationStatus; +import org.apache.syncope.common.lib.to.UserTO; +import org.apache.syncope.common.lib.types.ClientExceptionType; +import org.apache.syncope.common.lib.types.SubjectType; +import org.apache.syncope.core.persistence.api.RoleEntitlementUtil; +import org.apache.syncope.core.persistence.api.dao.NotFoundException; +import org.apache.syncope.core.persistence.api.dao.RoleDAO; +import org.apache.syncope.core.persistence.api.dao.SubjectSearchDAO; +import org.apache.syncope.core.persistence.api.dao.UserDAO; +import org.apache.syncope.core.persistence.api.dao.search.OrderByClause; +import org.apache.syncope.core.persistence.api.dao.search.SearchCond; +import org.apache.syncope.core.persistence.api.entity.role.Role; +import org.apache.syncope.core.persistence.api.entity.user.User; +import org.apache.syncope.core.provisioning.api.AttributableTransformer; +import org.apache.syncope.core.provisioning.api.UserProvisioningManager; +import org.apache.syncope.core.provisioning.api.data.UserDataBinder; +import org.apache.syncope.core.provisioning.api.propagation.PropagationManager; +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor; +import org.apache.syncope.core.provisioning.java.VirAttrHandler; +import org.apache.syncope.core.misc.security.AuthContextUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.interceptor.TransactionInterceptor; + +/** + * Note that this controller does not extend {@link AbstractTransactionalLogic}, hence does not provide any + * Spring's Transactional logic at class level. + */ +@Component +public class UserLogic extends AbstractSubjectLogic<UserTO, UserMod> { + + @Autowired + protected UserDAO userDAO; + + @Autowired + protected RoleDAO roleDAO; + + @Autowired + protected SubjectSearchDAO searchDAO; + + @Autowired + protected UserDataBinder binder; + + @Autowired + protected VirAttrHandler virtAttrHandler; + + @Autowired + protected PropagationManager propagationManager; + + @Autowired + protected PropagationTaskExecutor taskExecutor; + + @Autowired + protected AttributableTransformer attrTransformer; + + @Autowired + protected UserProvisioningManager provisioningManager; + + @Autowired + protected SyncopeLogic syncopeLogic; + + @PreAuthorize("hasRole('USER_READ')") + public String getUsername(final Long key) { + return binder.getUserTO(key).getUsername(); + } + + @PreAuthorize("hasRole('USER_READ')") + public Long getKey(final String username) { + return binder.getUserTO(username).getKey(); + } + + @PreAuthorize("hasRole('USER_LIST')") + @Transactional(readOnly = true, rollbackFor = { Throwable.class }) + @Override + public int count() { + return userDAO.count(RoleEntitlementUtil.getRoleKeys(AuthContextUtil.getOwnedEntitlementNames())); + } + + @PreAuthorize("hasRole('USER_LIST')") + @Transactional(readOnly = true, rollbackFor = { Throwable.class }) + @Override + public int searchCount(final SearchCond searchCondition) { + return searchDAO.count(RoleEntitlementUtil.getRoleKeys(AuthContextUtil.getOwnedEntitlementNames()), + searchCondition, SubjectType.USER); + } + + @PreAuthorize("hasRole('USER_LIST')") + @Transactional(readOnly = true, rollbackFor = { Throwable.class }) + @Override + public List<UserTO> list(final int page, final int size, final List<OrderByClause> orderBy) { + Set<Long> adminRoleIds = RoleEntitlementUtil.getRoleKeys(AuthContextUtil.getOwnedEntitlementNames()); + + List<User> users = userDAO.findAll(adminRoleIds, page, size, orderBy); + List<UserTO> userTOs = new ArrayList<>(users.size()); + for (User user : users) { + userTOs.add(binder.getUserTO(user)); + } + + return userTOs; + } + + @PreAuthorize("isAuthenticated() " + + "and not(hasRole(T(org.apache.syncope.common.lib.SyncopeConstants).ANONYMOUS_ENTITLEMENT))") + @Transactional(readOnly = true) + public UserTO readSelf() { + return binder.getAuthenticatedUserTO(); + } + + @PreAuthorize("hasRole('USER_READ')") + @Transactional(readOnly = true) + @Override + public UserTO read(final Long key) { + return binder.getUserTO(key); + } + + @PreAuthorize("hasRole('USER_LIST')") + @Transactional(readOnly = true) + @Override + public List<UserTO> search(final SearchCond searchCondition, final int page, final int size, + final List<OrderByClause> orderBy) { + + final List<User> matchingUsers = searchDAO.search( + RoleEntitlementUtil.getRoleKeys(AuthContextUtil.getOwnedEntitlementNames()), + searchCondition, page, size, orderBy, SubjectType.USER); + + final List<UserTO> result = new ArrayList<>(matchingUsers.size()); + for (User user : matchingUsers) { + result.add(binder.getUserTO(user)); + } + + return result; + } + + @PreAuthorize("isAnonymous() or hasRole(T(org.apache.syncope.common.lib.SyncopeConstants).ANONYMOUS_ENTITLEMENT)") + public UserTO createSelf(final UserTO userTO, final boolean storePassword) { + return doCreate(userTO, storePassword); + } + + @PreAuthorize("hasRole('USER_CREATE')") + public UserTO create(final UserTO userTO, final boolean storePassword) { + Set<Long> requestRoleIds = new HashSet<>(userTO.getMemberships().size()); + for (MembershipTO membership : userTO.getMemberships()) { + requestRoleIds.add(membership.getRoleId()); + } + Set<Long> adminRoleIds = RoleEntitlementUtil.getRoleKeys(AuthContextUtil.getOwnedEntitlementNames()); + requestRoleIds.removeAll(adminRoleIds); + if (!requestRoleIds.isEmpty()) { + throw new UnauthorizedRoleException(requestRoleIds); + } + + return doCreate(userTO, storePassword); + } + + protected UserTO doCreate(final UserTO userTO, final boolean storePassword) { + // Attributable transformation (if configured) + UserTO actual = attrTransformer.transform(userTO); + LOG.debug("Transformed: {}", actual); + + Map.Entry<Long, List<PropagationStatus>> created = provisioningManager.create(actual, storePassword); + + final UserTO savedTO = binder.getUserTO(created.getKey()); + savedTO.getPropagationStatusTOs().addAll(created.getValue()); + return savedTO; + } + + @PreAuthorize("isAuthenticated() " + + "and not(hasRole(T(org.apache.syncope.common.lib.SyncopeConstants).ANONYMOUS_ENTITLEMENT))") + public UserTO updateSelf(final UserMod userMod) { + UserTO userTO = binder.getAuthenticatedUserTO(); + + if (userTO.getKey() != userMod.getKey()) { + throw new AccessControlException("Not allowed for user with key " + userMod.getKey()); + } + + return update(userMod); + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Override + public UserTO update(final UserMod userMod) { + // AttributableMod transformation (if configured) + UserMod actual = attrTransformer.transform(userMod); + LOG.debug("Transformed: {}", actual); + + // SYNCOPE-501: check if there are memberships to be removed with virtual attributes assigned + boolean removeMemberships = false; + for (Long membershipId : actual.getMembershipsToRemove()) { + if (!virtAttrHandler.fillMembershipVirtual( + null, + null, + membershipId, + Collections.<String>emptySet(), + Collections.<AttrMod>emptySet(), + true).isEmpty()) { + + removeMemberships = true; + } + } + + Map.Entry<Long, List<PropagationStatus>> updated = provisioningManager.update(actual, removeMemberships); + + final UserTO updatedTO = binder.getUserTO(updated.getKey()); + updatedTO.getPropagationStatusTOs().addAll(updated.getValue()); + return updatedTO; + } + + protected Map.Entry<Long, List<PropagationStatus>> setStatusOnWfAdapter(final User user, + final StatusMod statusMod) { + Map.Entry<Long, List<PropagationStatus>> updated; + + switch (statusMod.getType()) { + case SUSPEND: + updated = provisioningManager.suspend(user, statusMod); + break; + + case REACTIVATE: + updated = provisioningManager.reactivate(user, statusMod); + break; + + case ACTIVATE: + default: + updated = provisioningManager.activate(user, statusMod); + break; + + } + + return updated; + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(rollbackFor = { Throwable.class }) + public UserTO status(final StatusMod statusMod) { + User user = userDAO.authFetch(statusMod.getKey()); + + Map.Entry<Long, List<PropagationStatus>> updated = setStatusOnWfAdapter(user, statusMod); + final UserTO savedTO = binder.getUserTO(updated.getKey()); + savedTO.getPropagationStatusTOs().addAll(updated.getValue()); + return savedTO; + } + + @PreAuthorize("isAnonymous() or hasRole(T(org.apache.syncope.common.lib.SyncopeConstants).ANONYMOUS_ENTITLEMENT)") + @Transactional + public void requestPasswordReset(final String username, final String securityAnswer) { + if (username == null) { + throw new NotFoundException("Null username"); + } + + User user = userDAO.find(username); + if (user == null) { + throw new NotFoundException("User " + username); + } + + if (syncopeLogic.isPwdResetRequiringSecurityQuestions() + && (securityAnswer == null || !securityAnswer.equals(user.getSecurityAnswer()))) { + + throw SyncopeClientException.build(ClientExceptionType.InvalidSecurityAnswer); + } + + provisioningManager.requestPasswordReset(user.getKey()); + } + + @PreAuthorize("isAnonymous() or hasRole(T(org.apache.syncope.common.lib.SyncopeConstants).ANONYMOUS_ENTITLEMENT)") + @Transactional + public void confirmPasswordReset(final String token, final String password) { + User user = userDAO.findByToken(token); + if (user == null) { + throw new NotFoundException("User with token " + token); + } + provisioningManager.confirmPasswordReset(user, token, password); + } + + @PreAuthorize("isAuthenticated() " + + "and not(hasRole(T(org.apache.syncope.common.lib.SyncopeConstants).ANONYMOUS_ENTITLEMENT))") + public UserTO deleteSelf() { + UserTO userTO = binder.getAuthenticatedUserTO(); + + return delete(userTO.getKey()); + } + + @PreAuthorize("hasRole('USER_DELETE')") + @Override + public UserTO delete(final Long key) { + List<Role> ownedRoles = roleDAO.findOwnedByUser(key); + if (!ownedRoles.isEmpty()) { + List<String> owned = new ArrayList<>(ownedRoles.size()); + for (Role role : ownedRoles) { + owned.add(role.getKey() + " " + role.getName()); + } + + SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.RoleOwnership); + sce.getElements().addAll(owned); + throw sce; + } + + List<PropagationStatus> statuses = provisioningManager.delete(key); + + final UserTO deletedTO; + User deleted = userDAO.find(key); + if (deleted == null) { + deletedTO = new UserTO(); + deletedTO.setKey(key); + } else { + deletedTO = binder.getUserTO(key); + } + deletedTO.getPropagationStatusTOs().addAll(statuses); + + return deletedTO; + } + + @PreAuthorize("(hasRole('USER_DELETE') and #bulkAction.operation == #bulkAction.operation.DELETE) or " + + "(hasRole('USER_UPDATE') and " + + "(#bulkAction.operation == #bulkAction.operation.REACTIVATE or " + + "#bulkAction.operation == #bulkAction.operation.SUSPEND))") + public BulkActionResult bulk(final BulkAction bulkAction) { + BulkActionResult res = new BulkActionResult(); + + switch (bulkAction.getOperation()) { + case DELETE: + for (String key : bulkAction.getTargets()) { + try { + res.add(delete(Long.valueOf(key)).getKey(), Status.SUCCESS); + } catch (Exception e) { + LOG.error("Error performing delete for user {}", key, e); + res.add(key, Status.FAILURE); + } + } + break; + + case SUSPEND: + for (String key : bulkAction.getTargets()) { + StatusMod statusMod = new StatusMod(); + statusMod.setKey(Long.valueOf(key)); + statusMod.setType(StatusMod.ModType.SUSPEND); + try { + res.add(status(statusMod).getKey(), Status.SUCCESS); + } catch (Exception e) { + LOG.error("Error performing suspend for user {}", key, e); + res.add(key, Status.FAILURE); + } + } + break; + + case REACTIVATE: + for (String key : bulkAction.getTargets()) { + StatusMod statusMod = new StatusMod(); + statusMod.setKey(Long.valueOf(key)); + statusMod.setType(StatusMod.ModType.REACTIVATE); + try { + res.add(status(statusMod).getKey(), Status.SUCCESS); + } catch (Exception e) { + LOG.error("Error performing reactivate for user {}", key, e); + res.add(key, Status.FAILURE); + } + } + break; + + default: + } + + return res; + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(rollbackFor = { Throwable.class }) + @Override + public UserTO unlink(final Long key, final Collection<String> resources) { + final UserMod userMod = new UserMod(); + userMod.setKey(key); + userMod.getResourcesToRemove().addAll(resources); + Long updatedId = provisioningManager.unlink(userMod); + + return binder.getUserTO(updatedId); + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(rollbackFor = { Throwable.class }) + @Override + public UserTO link(final Long key, final Collection<String> resources) { + final UserMod userMod = new UserMod(); + userMod.setKey(key); + userMod.getResourcesToAdd().addAll(resources); + return binder.getUserTO(provisioningManager.link(userMod)); + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(rollbackFor = { Throwable.class }) + @Override + public UserTO unassign(final Long key, final Collection<String> resources) { + final UserMod userMod = new UserMod(); + userMod.setKey(key); + userMod.getResourcesToRemove().addAll(resources); + return update(userMod); + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(rollbackFor = { Throwable.class }) + @Override + public UserTO assign( + final Long key, + final Collection<String> resources, + final boolean changepwd, + final String password) { + + final UserMod userMod = new UserMod(); + userMod.setKey(key); + userMod.getResourcesToAdd().addAll(resources); + + if (changepwd) { + StatusMod statusMod = new StatusMod(); + statusMod.setOnSyncope(false); + statusMod.getResourceNames().addAll(resources); + userMod.setPwdPropRequest(statusMod); + userMod.setPassword(password); + } + + return update(userMod); + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(rollbackFor = { Throwable.class }) + @Override + public UserTO deprovision(final Long key, final Collection<String> resources) { + final User user = userDAO.authFetch(key); + + List<PropagationStatus> statuses = provisioningManager.deprovision(key, resources); + + final UserTO updatedUserTO = binder.getUserTO(user); + updatedUserTO.getPropagationStatusTOs().addAll(statuses); + return updatedUserTO; + } + + @PreAuthorize("hasRole('USER_UPDATE')") + @Transactional(readOnly = true) + @Override + public UserTO provision( + final Long key, + final Collection<String> resources, + final boolean changePwd, + final String password) { + + final UserTO original = binder.getUserTO(key); + + //trick: assign and retrieve propagation statuses ... + original.getPropagationStatusTOs().addAll( + assign(key, resources, changePwd, password).getPropagationStatusTOs()); + + // .... rollback. + TransactionInterceptor.currentTransactionStatus().setRollbackOnly(); + return original; + } + + @Override + protected UserTO resolveReference(final Method method, final Object... args) throws UnresolvedReferenceException { + Object key = null; + + if (!"confirmPasswordReset".equals(method.getName()) && ArrayUtils.isNotEmpty(args)) { + for (int i = 0; key == null && i < args.length; i++) { + if (args[i] instanceof Long) { + key = (Long) args[i]; + } else if (args[i] instanceof String) { + key = (String) args[i]; + } else if (args[i] instanceof UserTO) { + key = ((UserTO) args[i]).getKey(); + } else if (args[i] instanceof UserMod) { + key = ((UserMod) args[i]).getKey(); + } + } + } + + if ((key != null) && !key.equals(0l)) { + try { + return key instanceof Long ? binder.getUserTO((Long) key) : binder.getUserTO((String) key); + } catch (Throwable ignore) { + LOG.debug("Unresolved reference", ignore); + throw new UnresolvedReferenceException(ignore); + } + } + + throw new UnresolvedReferenceException(); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserWorkflowLogic.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserWorkflowLogic.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserWorkflowLogic.java new file mode 100644 index 0000000..eb122fd --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/UserWorkflowLogic.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic; + +import java.lang.reflect.Method; +import java.util.AbstractMap; +import java.util.List; +import java.util.Map; +import org.apache.syncope.common.lib.mod.AbstractAttributableMod; +import org.apache.syncope.common.lib.mod.UserMod; +import org.apache.syncope.common.lib.to.UserTO; +import org.apache.syncope.common.lib.to.WorkflowFormTO; +import org.apache.syncope.core.persistence.api.dao.UserDAO; +import org.apache.syncope.core.persistence.api.entity.task.PropagationTask; +import org.apache.syncope.core.persistence.api.entity.user.User; +import org.apache.syncope.core.provisioning.api.propagation.PropagationManager; +import org.apache.syncope.core.provisioning.api.propagation.PropagationTaskExecutor; +import org.apache.syncope.core.workflow.api.UserWorkflowAdapter; +import org.apache.syncope.core.provisioning.api.WorkflowResult; +import org.apache.syncope.core.provisioning.api.data.UserDataBinder; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Component +public class UserWorkflowLogic extends AbstractTransactionalLogic<WorkflowFormTO> { + + @Autowired + private UserWorkflowAdapter uwfAdapter; + + @Autowired + private PropagationManager propagationManager; + + @Autowired + private PropagationTaskExecutor taskExecutor; + + @Autowired + private UserDataBinder binder; + + @Autowired + private UserDAO userDAO; + + @PreAuthorize("hasRole('WORKFLOW_FORM_CLAIM')") + @Transactional(rollbackFor = { Throwable.class }) + public WorkflowFormTO claimForm(final String taskId) { + return uwfAdapter.claimForm(taskId); + } + + @PreAuthorize("hasRole('USER_UPDATE')") + public UserTO executeWorkflowTask(final UserTO userTO, final String taskId) { + WorkflowResult<Long> updated = uwfAdapter.execute(userTO, taskId); + + UserMod userMod = new UserMod(); + userMod.setKey(userTO.getKey()); + + List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds( + new WorkflowResult<Map.Entry<UserMod, Boolean>>( + new AbstractMap.SimpleEntry<UserMod, Boolean>(userMod, null), + updated.getPropByRes(), updated.getPerformedTasks())); + + taskExecutor.execute(tasks); + + return binder.getUserTO(updated.getResult()); + } + + @PreAuthorize("hasRole('WORKFLOW_FORM_READ') and hasRole('USER_READ')") + @Transactional(rollbackFor = { Throwable.class }) + public WorkflowFormTO getFormForUser(final Long key) { + User user = userDAO.authFetch(key); + return uwfAdapter.getForm(user.getWorkflowId()); + } + + @PreAuthorize("hasRole('WORKFLOW_FORM_LIST')") + @Transactional(rollbackFor = { Throwable.class }) + public List<WorkflowFormTO> getForms() { + return uwfAdapter.getForms(); + } + + @PreAuthorize("hasRole('WORKFLOW_FORM_READ') and hasRole('USER_READ')") + @Transactional(rollbackFor = { Throwable.class }) + public List<WorkflowFormTO> getForms(final Long key, final String formName) { + User user = userDAO.authFetch(key); + return uwfAdapter.getForms(user.getWorkflowId(), formName); + } + + @PreAuthorize("hasRole('WORKFLOW_FORM_SUBMIT')") + @Transactional(rollbackFor = { Throwable.class }) + public UserTO submitForm(final WorkflowFormTO form) { + WorkflowResult<? extends AbstractAttributableMod> updated = uwfAdapter.submitForm(form); + + // propByRes can be made empty by the workflow definition if no propagation should occur + // (for example, with rejected users) + if (updated.getResult() instanceof UserMod + && updated.getPropByRes() != null && !updated.getPropByRes().isEmpty()) { + + List<PropagationTask> tasks = propagationManager.getUserUpdateTaskIds( + new WorkflowResult<Map.Entry<UserMod, Boolean>>( + new AbstractMap.SimpleEntry<UserMod, Boolean>((UserMod) updated.getResult(), Boolean.TRUE), + updated.getPropByRes(), + updated.getPerformedTasks())); + + taskExecutor.execute(tasks); + } + + return binder.getUserTO(updated.getResult().getKey()); + } + + @Override + protected WorkflowFormTO resolveReference(final Method method, final Object... args) + throws UnresolvedReferenceException { + + throw new UnresolvedReferenceException(); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/WorkflowLogic.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/WorkflowLogic.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/WorkflowLogic.java new file mode 100644 index 0000000..c8dcc93 --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/WorkflowLogic.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic; + +import java.io.OutputStream; +import java.lang.reflect.Method; +import javax.ws.rs.core.MediaType; +import org.apache.syncope.common.lib.AbstractBaseBean; +import org.apache.syncope.core.workflow.api.RoleWorkflowAdapter; +import org.apache.syncope.core.workflow.api.UserWorkflowAdapter; +import org.apache.syncope.core.workflow.api.WorkflowAdapter; +import org.apache.syncope.core.workflow.api.WorkflowDefinitionFormat; +import org.apache.syncope.core.workflow.api.WorkflowException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Component +public class WorkflowLogic extends AbstractTransactionalLogic<AbstractBaseBean> { + + @Autowired + private UserWorkflowAdapter uwfAdapter; + + @Autowired + private RoleWorkflowAdapter rwfAdapter; + + private void exportDefinition( + final WorkflowAdapter adapter, final WorkflowDefinitionFormat format, final OutputStream os) + throws WorkflowException { + + adapter.exportDefinition(format, os); + } + + private WorkflowDefinitionFormat getFormat(final MediaType format) { + return format.equals(MediaType.APPLICATION_JSON_TYPE) + ? WorkflowDefinitionFormat.JSON + : WorkflowDefinitionFormat.XML; + } + + @PreAuthorize("hasRole('WORKFLOW_DEF_READ')") + @Transactional(readOnly = true) + public void exportUserDefinition(final MediaType format, final OutputStream os) + throws WorkflowException { + + exportDefinition(uwfAdapter, getFormat(format), os); + } + + @PreAuthorize("hasRole('WORKFLOW_DEF_READ')") + @Transactional(readOnly = true) + public void exportRoleDefinition(final MediaType format, final OutputStream os) + throws WorkflowException { + + exportDefinition(rwfAdapter, getFormat(format), os); + } + + private void exportDiagram(final WorkflowAdapter adapter, final OutputStream os) + throws WorkflowException { + adapter.exportDiagram(os); + } + + @PreAuthorize("hasRole('WORKFLOW_DEF_READ')") + @Transactional(readOnly = true) + public void exportUserDiagram(final OutputStream os) + throws WorkflowException { + + exportDiagram(uwfAdapter, os); + } + + @PreAuthorize("hasRole('WORKFLOW_DEF_READ')") + @Transactional(readOnly = true) + public void exportRoleDiagram(final OutputStream os) + throws WorkflowException { + + exportDiagram(rwfAdapter, os); + } + + private void importDefinition( + final WorkflowAdapter adapter, final WorkflowDefinitionFormat format, final String definition) { + + adapter.importDefinition(format, definition); + } + + @PreAuthorize("hasRole('WORKFLOW_DEF_UPDATE')") + public void importUserDefinition(final MediaType format, final String definition) { + importDefinition(uwfAdapter, getFormat(format), definition); + } + + @PreAuthorize("hasRole('WORKFLOW_DEF_UPDATE')") + public void importRoleDefinition(final MediaType format, final String definition) { + importDefinition(rwfAdapter, getFormat(format), definition); + } + + @Override + protected AbstractBaseBean resolveReference(final Method method, final Object... args) + throws UnresolvedReferenceException { + + throw new UnresolvedReferenceException(); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/audit/AuditConnectionFactory.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/audit/AuditConnectionFactory.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/audit/AuditConnectionFactory.java new file mode 100644 index 0000000..727d0d5 --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/audit/AuditConnectionFactory.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic.audit; + +import java.io.InputStream; +import java.sql.Connection; +import java.util.Properties; +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.rmi.PortableRemoteObject; +import javax.sql.DataSource; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpression; +import javax.xml.xpath.XPathFactory; +import org.apache.commons.dbcp2.BasicDataSource; +import org.apache.commons.io.IOUtils; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PropertiesLoaderUtils; +import org.springframework.jdbc.datasource.DataSourceUtils; +import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; +import org.springframework.jdbc.datasource.init.ScriptUtils; +import org.w3c.dom.Document; +import org.w3c.dom.bootstrap.DOMImplementationRegistry; +import org.w3c.dom.ls.DOMImplementationLS; +import org.w3c.dom.ls.LSInput; +import org.w3c.dom.ls.LSParser; + +/** + * LOG4J SQL connection factory that first attempts to obtain a {@link javax.sql.DataSource} from the JNDI name + * configured in Spring or, when not found, builds a new {@link javax.sql.DataSource DataSource} via Commons DBCP; if + * any datasource if found, the SQL init script is used to populate the database. + */ +public final class AuditConnectionFactory { + + private static DataSource datasource; + + private static final String PERSISTENCE_CONTEXT = "/persistenceContext.xml"; + + static { + // 1. Attempts to lookup for configured JNDI datasource (if present and available) + InputStream springConf = AuditConnectionFactory.class.getResourceAsStream(PERSISTENCE_CONTEXT); + String primary = null; + String fallback = null; + try { + DOMImplementationRegistry reg = DOMImplementationRegistry.newInstance(); + DOMImplementationLS impl = (DOMImplementationLS) reg.getDOMImplementation("LS"); + LSParser parser = impl.createLSParser(DOMImplementationLS.MODE_SYNCHRONOUS, null); + LSInput lsinput = impl.createLSInput(); + lsinput.setByteStream(springConf); + Document source = parser.parse(lsinput); + + XPathFactory xPathfactory = XPathFactory.newInstance(); + XPath xpath = xPathfactory.newXPath(); + + XPathExpression expr = xpath.compile("//*[local-name()='bean' and @id='persistenceProperties']/" + + "child::*[local-name()='property' and @name='primary']/@value"); + primary = (String) expr.evaluate(source, XPathConstants.STRING); + expr = xpath.compile("//*[local-name()='bean' and @id='persistenceProperties']/" + + "child::*[local-name()='property' and @name='fallback']/@value"); + fallback = (String) expr.evaluate(source, XPathConstants.STRING); + + expr = xpath.compile("//*[local-name()='property' and @name='jndiName']/@value"); + String jndiName = (String) expr.evaluate(source, XPathConstants.STRING); + + Context ctx = new InitialContext(); + Object obj = ctx.lookup(jndiName); + + datasource = (DataSource) PortableRemoteObject.narrow(obj, DataSource.class); + } catch (Exception e) { + // ignore + } finally { + IOUtils.closeQuietly(springConf); + } + + // 2. Creates Commons DBCP datasource + String initSQLScript = null; + try { + Resource persistenceProperties = null; + if (primary != null) { + if (primary.startsWith("file:")) { + persistenceProperties = new FileSystemResource(primary.substring(5)); + } + if (primary.startsWith("classpath:")) { + persistenceProperties = new ClassPathResource(primary.substring(10)); + } + } + if ((persistenceProperties == null || !persistenceProperties.exists()) && fallback != null) { + if (fallback.startsWith("file:")) { + persistenceProperties = new FileSystemResource(fallback.substring(5)); + } + if (fallback.startsWith("classpath:")) { + persistenceProperties = new ClassPathResource(fallback.substring(10)); + } + } + Properties persistence = PropertiesLoaderUtils.loadProperties(persistenceProperties); + + initSQLScript = persistence.getProperty("audit.sql"); + + if (datasource == null) { + BasicDataSource bds = new BasicDataSource(); + bds.setDriverClassName(persistence.getProperty("jpa.driverClassName")); + bds.setUrl(persistence.getProperty("jpa.url")); + bds.setUsername(persistence.getProperty("jpa.username")); + bds.setPassword(persistence.getProperty("jpa.password")); + + bds.setLogAbandoned(true); + bds.setRemoveAbandonedOnBorrow(true); + bds.setRemoveAbandonedOnMaintenance(true); + + datasource = bds; + } + } catch (Exception e) { + throw new IllegalStateException("Audit datasource configuration failed", e); + } + + // 3. Initializes the chosen datasource + ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); + populator.setScripts(new Resource[] { new ClassPathResource("/audit/" + initSQLScript) }); + // forces no statement separation + populator.setSeparator(ScriptUtils.EOF_STATEMENT_SEPARATOR); + Connection conn = DataSourceUtils.getConnection(datasource); + try { + populator.populate(conn); + } finally { + DataSourceUtils.releaseConnection(conn, datasource); + } + } + + public static Connection getConnection() { + if (datasource != null) { + return DataSourceUtils.getConnection(datasource); + } + + throw new IllegalStateException("Audit dataSource init failed: check logs"); + } + + private AuditConnectionFactory() { + // empty constructor for static utility class + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/ImplementationClassNamesLoader.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/ImplementationClassNamesLoader.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/ImplementationClassNamesLoader.java new file mode 100644 index 0000000..ff11801 --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/ImplementationClassNamesLoader.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic.init; + +import java.lang.reflect.Modifier; +import java.util.Collections; +import java.util.EnumMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import org.apache.commons.lang3.StringUtils; +import org.apache.syncope.core.provisioning.api.job.PushJob; +import org.apache.syncope.core.provisioning.api.job.SyncJob; +import org.apache.syncope.core.provisioning.api.job.TaskJob; +import org.apache.syncope.core.provisioning.api.propagation.PropagationActions; +import org.apache.syncope.core.provisioning.api.sync.PushActions; +import org.apache.syncope.core.provisioning.api.sync.SyncActions; +import org.apache.syncope.core.provisioning.api.sync.SyncCorrelationRule; +import org.apache.syncope.core.logic.report.Reportlet; +import org.apache.syncope.core.persistence.api.SyncopeLoader; +import org.apache.syncope.core.persistence.api.attrvalue.validation.Validator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; +import org.springframework.core.type.filter.AssignableTypeFilter; +import org.springframework.stereotype.Component; +import org.springframework.util.ClassUtils; + +/** + * Cache class names for all implementations of Syncope interfaces found in classpath, for later usage. + */ +@Component +public class ImplementationClassNamesLoader implements SyncopeLoader { + + public enum Type { + + REPORTLET, + TASKJOB, + PROPAGATION_ACTIONS, + SYNC_ACTIONS, + PUSH_ACTIONS, + SYNC_CORRELATION_RULE, + PUSH_CORRELATION_RULE, + VALIDATOR + + } + + /** + * Logger. + */ + private static final Logger LOG = LoggerFactory.getLogger(ImplementationClassNamesLoader.class); + + private Map<Type, Set<String>> classNames; + + @Override + public Integer getPriority() { + return 400; + } + + @Override + public void load() { + classNames = new EnumMap<>(Type.class); + for (Type type : Type.values()) { + classNames.put(type, new HashSet<String>()); + } + + ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(false); + scanner.addIncludeFilter(new AssignableTypeFilter(Reportlet.class)); + scanner.addIncludeFilter(new AssignableTypeFilter(TaskJob.class)); + scanner.addIncludeFilter(new AssignableTypeFilter(SyncActions.class)); + scanner.addIncludeFilter(new AssignableTypeFilter(PushActions.class)); + scanner.addIncludeFilter(new AssignableTypeFilter(SyncCorrelationRule.class)); + // TODO: SYNCOPE-631 + //scanner.addIncludeFilter(new AssignableTypeFilter(PushCorrelationRule.class)); + scanner.addIncludeFilter(new AssignableTypeFilter(PropagationActions.class)); + scanner.addIncludeFilter(new AssignableTypeFilter(Validator.class)); + + for (BeanDefinition bd : scanner.findCandidateComponents(StringUtils.EMPTY)) { + try { + Class<?> clazz = ClassUtils.resolveClassName( + bd.getBeanClassName(), ClassUtils.getDefaultClassLoader()); + boolean isAbsractClazz = Modifier.isAbstract(clazz.getModifiers()); + + if (Reportlet.class.isAssignableFrom(clazz) && !isAbsractClazz) { + classNames.get(Type.REPORTLET).add(clazz.getName()); + } + + if (TaskJob.class.isAssignableFrom(clazz) && !isAbsractClazz + && !SyncJob.class.isAssignableFrom(clazz) + && !PushJob.class.isAssignableFrom(clazz)) { + + classNames.get(Type.TASKJOB).add(bd.getBeanClassName()); + } + + if (SyncActions.class.isAssignableFrom(clazz) && !isAbsractClazz) { + classNames.get(Type.SYNC_ACTIONS).add(bd.getBeanClassName()); + } + + if (PushActions.class.isAssignableFrom(clazz) && !isAbsractClazz) { + classNames.get(Type.PUSH_ACTIONS).add(bd.getBeanClassName()); + } + + if (SyncCorrelationRule.class.isAssignableFrom(clazz) && !isAbsractClazz) { + classNames.get(Type.SYNC_CORRELATION_RULE).add(bd.getBeanClassName()); + } + + // TODO: SYNCOPE-631 + /* if (PushCorrelationRule.class.isAssignableFrom(clazz) && !isAbsractClazz) { + * classNames.get(Type.PUSH_CORRELATION_RULES).add(metadata.getClassName()); + * } */ + if (PropagationActions.class.isAssignableFrom(clazz) && !isAbsractClazz) { + classNames.get(Type.PROPAGATION_ACTIONS).add(bd.getBeanClassName()); + } + + if (Validator.class.isAssignableFrom(clazz) && !isAbsractClazz) { + classNames.get(Type.VALIDATOR).add(bd.getBeanClassName()); + } + } catch (Throwable t) { + LOG.warn("Could not inspect class {}", bd.getBeanClassName(), t); + } + } + classNames = Collections.unmodifiableMap(classNames); + + LOG.debug("Implementation classes found: {}", classNames); + } + + public Set<String> getClassNames(final Type type) { + return classNames.get(type); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/JobInstanceLoaderImpl.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/JobInstanceLoaderImpl.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/JobInstanceLoaderImpl.java new file mode 100644 index 0000000..106ed36 --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/JobInstanceLoaderImpl.java @@ -0,0 +1,271 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic.init; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.apache.commons.lang3.StringUtils; +import org.apache.syncope.common.lib.types.TaskType; +import org.apache.syncope.core.persistence.api.dao.ConfDAO; +import org.apache.syncope.core.persistence.api.dao.NotFoundException; +import org.apache.syncope.core.persistence.api.dao.ReportDAO; +import org.apache.syncope.core.persistence.api.dao.TaskDAO; +import org.apache.syncope.core.persistence.api.entity.Report; +import org.apache.syncope.core.persistence.api.entity.conf.CPlainAttr; +import org.apache.syncope.core.persistence.api.entity.task.PushTask; +import org.apache.syncope.core.persistence.api.entity.task.SchedTask; +import org.apache.syncope.core.persistence.api.entity.task.SyncTask; +import org.apache.syncope.core.persistence.api.entity.task.Task; +import org.apache.syncope.core.provisioning.api.job.JobInstanceLoader; +import org.apache.syncope.core.provisioning.api.job.JobNamer; +import org.apache.syncope.core.provisioning.api.job.SyncJob; +import org.apache.syncope.core.provisioning.api.job.TaskJob; +import org.apache.syncope.core.provisioning.api.sync.SyncActions; +import org.apache.syncope.core.logic.notification.NotificationJob; +import org.apache.syncope.core.logic.report.ReportJob; +import org.apache.syncope.core.misc.spring.ApplicationContextProvider; +import org.apache.syncope.core.persistence.api.SyncopeLoader; +import org.apache.syncope.core.provisioning.api.job.PushJob; +import org.apache.syncope.core.provisioning.java.sync.PushJobImpl; +import org.apache.syncope.core.provisioning.java.sync.SyncJobImpl; +import org.quartz.Job; +import org.quartz.JobExecutionContext; +import org.quartz.JobKey; +import org.quartz.Scheduler; +import org.quartz.SchedulerException; +import org.quartz.TriggerKey; +import org.quartz.impl.JobDetailImpl; +import org.quartz.impl.triggers.CronTriggerImpl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.support.AbstractBeanDefinition; +import org.springframework.scheduling.quartz.SchedulerFactoryBean; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Component +public class JobInstanceLoaderImpl implements JobInstanceLoader, SyncopeLoader { + + private static final Logger LOG = LoggerFactory.getLogger(JobInstanceLoader.class); + + @Autowired + private SchedulerFactoryBean scheduler; + + @Autowired + private TaskDAO taskDAO; + + @Autowired + private ReportDAO reportDAO; + + @Autowired + private ConfDAO confDAO; + + private void registerJob(final String jobName, final Job jobInstance, final String cronExpression) + throws SchedulerException, ParseException { + + synchronized (scheduler.getScheduler()) { + boolean jobAlreadyRunning = false; + for (JobExecutionContext jobCtx : scheduler.getScheduler().getCurrentlyExecutingJobs()) { + if (jobName.equals(jobCtx.getJobDetail().getKey().getName()) + && Scheduler.DEFAULT_GROUP.equals(jobCtx.getJobDetail().getKey().getGroup())) { + + jobAlreadyRunning = true; + + LOG.debug("Job {} already running, cancel", jobCtx.getJobDetail().getKey()); + } + } + + if (jobAlreadyRunning) { + return; + } + } + + // 0. unregister job + unregisterJob(jobName); + + // 1. Job bean + ApplicationContextProvider.getBeanFactory().registerSingleton(jobName, jobInstance); + + // 2. JobDetail bean + JobDetailImpl jobDetail = new JobDetailImpl(); + jobDetail.setName(jobName); + jobDetail.setGroup(Scheduler.DEFAULT_GROUP); + jobDetail.setJobClass(jobInstance.getClass()); + + // 3. Trigger + if (cronExpression == null) { + // Jobs added with no trigger must be durable + jobDetail.setDurability(true); + scheduler.getScheduler().addJob(jobDetail, true); + } else { + CronTriggerImpl cronTrigger = new CronTriggerImpl(); + cronTrigger.setName(JobNamer.getTriggerName(jobName)); + cronTrigger.setCronExpression(cronExpression); + + scheduler.getScheduler().scheduleJob(jobDetail, cronTrigger); + } + } + + @SuppressWarnings("unchecked") + @Override + public void registerJob(final Task task, final String jobClassName, final String cronExpression) + throws ClassNotFoundException, SchedulerException, ParseException { + + Class<?> jobClass = Class.forName(jobClassName); + if (SyncJob.class.equals(jobClass)) { + jobClass = SyncJobImpl.class; + } else if (PushJob.class.equals(jobClass)) { + jobClass = PushJobImpl.class; + } + Job jobInstance = (Job) ApplicationContextProvider.getBeanFactory(). + createBean(jobClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false); + if (jobInstance instanceof TaskJob) { + ((TaskJob) jobInstance).setTaskId(task.getKey()); + } + + // In case of synchronization job/task retrieve and set synchronization actions: + // actions cannot be changed at runtime but connector and synchronization policies (reloaded at execution time). + if (jobInstance instanceof SyncJob && task instanceof SyncTask) { + final List<SyncActions> actions = new ArrayList<>(); + for (String className : ((SyncTask) task).getActionsClassNames()) { + try { + Class<?> actionsClass = Class.forName(className); + + final SyncActions syncActions = + (SyncActions) ApplicationContextProvider.getBeanFactory(). + createBean(actionsClass, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, true); + + actions.add(syncActions); + } catch (Exception e) { + LOG.info("Class '{}' not found", className, e); + } + } + + ((SyncJob) jobInstance).setActions(actions); + } + + registerJob(JobNamer.getJobName(task), jobInstance, cronExpression); + } + + @Transactional(readOnly = true) + @Override + public void registerTaskJob(final Long taskKey) + throws ClassNotFoundException, SchedulerException, ParseException { + + SchedTask task = taskDAO.find(taskKey); + if (task == null) { + throw new NotFoundException("Task " + taskKey); + } else { + registerJob(task, task.getJobClassName(), task.getCronExpression()); + } + } + + @Override + public void registerJob(final Report report) throws SchedulerException, ParseException { + Job jobInstance = (Job) ApplicationContextProvider.getBeanFactory(). + createBean(ReportJob.class, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, false); + ((ReportJob) jobInstance).setReportKey(report.getKey()); + + registerJob(JobNamer.getJobName(report), jobInstance, report.getCronExpression()); + } + + @Transactional(readOnly = true) + @Override + public void registerReportJob(final Long reportKey) throws SchedulerException, ParseException { + Report report = reportDAO.find(reportKey); + if (report == null) { + throw new NotFoundException("Report " + reportKey); + } else { + registerJob(report); + } + } + + private void unregisterJob(final String jobName) { + try { + scheduler.getScheduler().unscheduleJob(new TriggerKey(jobName, Scheduler.DEFAULT_GROUP)); + scheduler.getScheduler().deleteJob(new JobKey(jobName, Scheduler.DEFAULT_GROUP)); + } catch (SchedulerException e) { + LOG.error("Could not remove job " + jobName, e); + } + + if (ApplicationContextProvider.getBeanFactory().containsSingleton(jobName)) { + ApplicationContextProvider.getBeanFactory().destroySingleton(jobName); + } + } + + @Override + public void unregisterJob(final Task task) { + unregisterJob(JobNamer.getJobName(task)); + } + + @Override + public void unregisterJob(final Report report) { + unregisterJob(JobNamer.getJobName(report)); + } + + @Override + public Integer getPriority() { + return 200; + } + + @Transactional + @Override + public void load() { + // 1. jobs for SchedTasks + Set<SchedTask> tasks = new HashSet<>(taskDAO.<SchedTask>findAll(TaskType.SCHEDULED)); + tasks.addAll(taskDAO.<SyncTask>findAll(TaskType.SYNCHRONIZATION)); + tasks.addAll(taskDAO.<PushTask>findAll(TaskType.PUSH)); + for (SchedTask task : tasks) { + try { + registerJob(task, task.getJobClassName(), task.getCronExpression()); + } catch (Exception e) { + LOG.error("While loading job instance for task " + task.getKey(), e); + } + } + + // 2. NotificationJob + CPlainAttr notificationJobCronExp = + confDAO.find("notificationjob.cronExpression", NotificationJob.DEFAULT_CRON_EXP); + if (StringUtils.isBlank(notificationJobCronExp.getValuesAsStrings().get(0))) { + LOG.debug("Empty value provided for NotificationJob's cron, not registering anything on Quartz"); + } else { + LOG.debug("NotificationJob's cron expression: {} - registering Quartz job and trigger", + notificationJobCronExp); + + try { + registerJob(null, NotificationJob.class.getName(), notificationJobCronExp.getValuesAsStrings().get(0)); + } catch (Exception e) { + LOG.error("While loading NotificationJob instance", e); + } + } + + // 3. ReportJobs + for (Report report : reportDAO.findAll()) { + try { + registerJob(report); + } catch (Exception e) { + LOG.error("While loading job instance for report " + report.getName(), e); + } + } + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LoggerLoader.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LoggerLoader.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LoggerLoader.java new file mode 100644 index 0000000..447a92f --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LoggerLoader.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic.init; + +import java.util.HashMap; +import java.util.Map; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.syncope.common.lib.SyncopeConstants; +import org.apache.syncope.common.lib.types.LoggerLevel; +import org.apache.syncope.common.lib.types.LoggerType; +import org.apache.syncope.core.persistence.api.SyncopeLoader; +import org.apache.syncope.core.persistence.api.dao.LoggerDAO; +import org.apache.syncope.core.persistence.api.entity.EntityFactory; +import org.apache.syncope.core.persistence.api.entity.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Component +public class LoggerLoader implements SyncopeLoader { + + @Autowired + private LoggerDAO loggerDAO; + + @Autowired + private EntityFactory entityFactory; + + @Override + public Integer getPriority() { + return 300; + } + + @Transactional + @Override + public void load() { + Map<String, Logger> syncopeLoggers = new HashMap<>(); + for (Logger syncopeLogger : loggerDAO.findAll(LoggerType.LOG)) { + syncopeLoggers.put(syncopeLogger.getKey(), syncopeLogger); + } + + for (Logger syncopeLogger : loggerDAO.findAll(LoggerType.AUDIT)) { + syncopeLoggers.put(syncopeLogger.getKey(), syncopeLogger); + } + + LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + + /* + * Traverse all defined log4j loggers: if there is a matching SyncopeLogger, set log4j level accordingly, + * otherwise create a SyncopeLogger instance with given name and level. + */ + for (LoggerConfig logConf : ctx.getConfiguration().getLoggers().values()) { + final String loggerName = LogManager.ROOT_LOGGER_NAME.equals(logConf.getName()) + ? SyncopeConstants.ROOT_LOGGER : logConf.getName(); + if (logConf.getLevel() != null) { + if (syncopeLoggers.containsKey(loggerName)) { + logConf.setLevel(syncopeLoggers.get(loggerName).getLevel().getLevel()); + syncopeLoggers.remove(loggerName); + } else if (!loggerName.equals(LoggerType.AUDIT.getPrefix())) { + Logger syncopeLogger = entityFactory.newEntity(Logger.class); + syncopeLogger.setKey(loggerName); + syncopeLogger.setLevel(LoggerLevel.fromLevel(logConf.getLevel())); + syncopeLogger.setType(loggerName.startsWith(LoggerType.AUDIT.getPrefix()) + ? LoggerType.AUDIT + : LoggerType.LOG); + loggerDAO.save(syncopeLogger); + } + } + } + + /* + * Foreach SyncopeLogger not found in log4j create a new log4j logger with given name and level. + */ + for (Logger syncopeLogger : syncopeLoggers.values()) { + LoggerConfig logConf = ctx.getConfiguration().getLoggerConfig(syncopeLogger.getKey()); + logConf.setLevel(syncopeLogger.getLevel().getLevel()); + } + + ctx.updateLoggers(); + } +} http://git-wip-us.apache.org/repos/asf/syncope/blob/d30c8526/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LogicInitializer.java ---------------------------------------------------------------------- diff --git a/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LogicInitializer.java b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LogicInitializer.java new file mode 100644 index 0000000..c784262 --- /dev/null +++ b/syncope620/core/logic/src/main/java/org/apache/syncope/core/logic/init/LogicInitializer.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.syncope.core.logic.init; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import org.apache.syncope.core.persistence.api.SyncopeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.aop.support.AopUtils; +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanFactory; +import org.springframework.beans.factory.BeanFactoryAware; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.stereotype.Component; + +/** + * Take care of all initializations needed by Syncope logic to run up and safe. + */ +@Component +public class LogicInitializer implements InitializingBean, BeanFactoryAware { + + private static final Logger LOG = LoggerFactory.getLogger(LogicInitializer.class); + + private DefaultListableBeanFactory beanFactory; + + @Override + public void setBeanFactory(final BeanFactory beanFactory) throws BeansException { + this.beanFactory = (DefaultListableBeanFactory) beanFactory; + } + + @Override + public void afterPropertiesSet() throws Exception { + Map<String, SyncopeLoader> loaderMap = beanFactory.getBeansOfType(SyncopeLoader.class); + + List<SyncopeLoader> loaders = new ArrayList<>(loaderMap.values()); + Collections.sort(loaders, new Comparator<SyncopeLoader>() { + + @Override + public int compare(final SyncopeLoader o1, final SyncopeLoader o2) { + return o1.getPriority().compareTo(o2.getPriority()); + } + }); + + LOG.debug("Starting initialization..."); + for (SyncopeLoader loader : loaders) { + LOG.debug("Invoking {} with priority {}", AopUtils.getTargetClass(loader).getName(), loader.getPriority()); + loader.load(); + } + LOG.debug("Initialization completed"); + } +}
