This is an automated email from the ASF dual-hosted git repository.
ilgrosso pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/syncope.git
The following commit(s) were added to refs/heads/master by this push:
new db0703a2bc [SYNCOPE-1850] Checking concurrent task execution
db0703a2bc is described below
commit db0703a2bcec32cfc43a3a681b0e41e425a5e57e
Author: Francesco Chicchiriccò <[email protected]>
AuthorDate: Fri Jan 3 09:07:34 2025 +0100
[SYNCOPE-1850] Checking concurrent task execution
---
.github/workflows/codeql-analysis.yml | 2 +-
.github/workflows/crosschecks.yml | 2 +-
.github/workflows/dockerhub.yml | 2 +-
.github/workflows/fit_Elasticsearch.yml | 2 +-
.github/workflows/fit_OpenSearch.yml | 2 +-
.github/workflows/fit_Payara.yml | 2 +-
.github/workflows/fit_Tomcat_PostgreSQL_JSON.yml | 2 +-
.github/workflows/fit_Tomcat_PostgreSQL_XML.yml | 2 +-
.github/workflows/fit_Tomcat_PostgreSQL_YAML.yml | 2 +-
.github/workflows/fit_WA_OIDCC4UI.yml | 2 +-
.github/workflows/fit_WA_SAML2SP4UI.yml | 2 +-
.github/workflows/fit_WA_SRA_CASClient.yml | 2 +-
.github/workflows/fit_WA_SRA_OAuth2.yml | 2 +-
.github/workflows/fit_WA_SRA_OIDC.yml | 2 +-
.github/workflows/fit_WA_SRA_SAML2.yml | 2 +-
.github/workflows/fit_Wildfly.yml | 2 +-
.github/workflows/fit_Zookeeper.yml | 2 +-
.github/workflows/mariadb.yml | 2 +-
.github/workflows/mysql.yml | 2 +-
.github/workflows/neo4j.yml | 2 +-
.github/workflows/oracle.yml | 2 +-
.../provisioning/java/job/DefaultJobManager.java | 60 +++++++++++-------
.../syncope/core/provisioning/java/job/Job.java | 31 ++++++----
.../java/job/SyncopeTaskScheduler.java | 69 +++++++++++----------
.../apache/syncope/fit/core/SchedTaskITCase.java | 72 ++++++++++++++++------
pom.xml | 8 +--
26 files changed, 171 insertions(+), 111 deletions(-)
diff --git a/.github/workflows/codeql-analysis.yml
b/.github/workflows/codeql-analysis.yml
index 4544626ea8..2addc10811 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -54,7 +54,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/crosschecks.yml
b/.github/workflows/crosschecks.yml
index 2fac41fbe8..1474e9d4ef 100644
--- a/.github/workflows/crosschecks.yml
+++ b/.github/workflows/crosschecks.yml
@@ -47,7 +47,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/dockerhub.yml b/.github/workflows/dockerhub.yml
index 3c2b3a8b10..5a10de05be 100644
--- a/.github/workflows/dockerhub.yml
+++ b/.github/workflows/dockerhub.yml
@@ -64,7 +64,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Elasticsearch.yml
b/.github/workflows/fit_Elasticsearch.yml
index 1a1d4ad937..b2f24b4e4e 100644
--- a/.github/workflows/fit_Elasticsearch.yml
+++ b/.github/workflows/fit_Elasticsearch.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_OpenSearch.yml
b/.github/workflows/fit_OpenSearch.yml
index 90ed0cad66..25d5851d46 100644
--- a/.github/workflows/fit_OpenSearch.yml
+++ b/.github/workflows/fit_OpenSearch.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Payara.yml b/.github/workflows/fit_Payara.yml
index aaf721c5e7..7b2d697106 100644
--- a/.github/workflows/fit_Payara.yml
+++ b/.github/workflows/fit_Payara.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Tomcat_PostgreSQL_JSON.yml
b/.github/workflows/fit_Tomcat_PostgreSQL_JSON.yml
index 4a6554b65c..2d9a5b8bcb 100644
--- a/.github/workflows/fit_Tomcat_PostgreSQL_JSON.yml
+++ b/.github/workflows/fit_Tomcat_PostgreSQL_JSON.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Tomcat_PostgreSQL_XML.yml
b/.github/workflows/fit_Tomcat_PostgreSQL_XML.yml
index cdb2d838e9..b3153d898c 100644
--- a/.github/workflows/fit_Tomcat_PostgreSQL_XML.yml
+++ b/.github/workflows/fit_Tomcat_PostgreSQL_XML.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Tomcat_PostgreSQL_YAML.yml
b/.github/workflows/fit_Tomcat_PostgreSQL_YAML.yml
index 2317b2db75..ba19bdd2d0 100644
--- a/.github/workflows/fit_Tomcat_PostgreSQL_YAML.yml
+++ b/.github/workflows/fit_Tomcat_PostgreSQL_YAML.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_WA_OIDCC4UI.yml
b/.github/workflows/fit_WA_OIDCC4UI.yml
index 5e891b7a95..9b079f6490 100644
--- a/.github/workflows/fit_WA_OIDCC4UI.yml
+++ b/.github/workflows/fit_WA_OIDCC4UI.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_WA_SAML2SP4UI.yml
b/.github/workflows/fit_WA_SAML2SP4UI.yml
index abba469d82..93881c49d3 100644
--- a/.github/workflows/fit_WA_SAML2SP4UI.yml
+++ b/.github/workflows/fit_WA_SAML2SP4UI.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_WA_SRA_CASClient.yml
b/.github/workflows/fit_WA_SRA_CASClient.yml
index 9090b001c7..f32f124ff1 100644
--- a/.github/workflows/fit_WA_SRA_CASClient.yml
+++ b/.github/workflows/fit_WA_SRA_CASClient.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_WA_SRA_OAuth2.yml
b/.github/workflows/fit_WA_SRA_OAuth2.yml
index 1a892f5ef9..e016731bf4 100644
--- a/.github/workflows/fit_WA_SRA_OAuth2.yml
+++ b/.github/workflows/fit_WA_SRA_OAuth2.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_WA_SRA_OIDC.yml
b/.github/workflows/fit_WA_SRA_OIDC.yml
index 52d04df2b0..0360b68d72 100644
--- a/.github/workflows/fit_WA_SRA_OIDC.yml
+++ b/.github/workflows/fit_WA_SRA_OIDC.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_WA_SRA_SAML2.yml
b/.github/workflows/fit_WA_SRA_SAML2.yml
index 2d6fe0030c..b60f464740 100644
--- a/.github/workflows/fit_WA_SRA_SAML2.yml
+++ b/.github/workflows/fit_WA_SRA_SAML2.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Wildfly.yml
b/.github/workflows/fit_Wildfly.yml
index 41bd06bb28..5e16517473 100644
--- a/.github/workflows/fit_Wildfly.yml
+++ b/.github/workflows/fit_Wildfly.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/fit_Zookeeper.yml
b/.github/workflows/fit_Zookeeper.yml
index 5afa0226e5..c2ff40d05e 100644
--- a/.github/workflows/fit_Zookeeper.yml
+++ b/.github/workflows/fit_Zookeeper.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/mariadb.yml b/.github/workflows/mariadb.yml
index 482bca4558..8c2e2b7b1e 100644
--- a/.github/workflows/mariadb.yml
+++ b/.github/workflows/mariadb.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/mysql.yml b/.github/workflows/mysql.yml
index 4897ea71e4..30997151f9 100644
--- a/.github/workflows/mysql.yml
+++ b/.github/workflows/mysql.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/neo4j.yml b/.github/workflows/neo4j.yml
index 06af0eb436..d698236c3f 100644
--- a/.github/workflows/neo4j.yml
+++ b/.github/workflows/neo4j.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git a/.github/workflows/oracle.yml b/.github/workflows/oracle.yml
index e4958d6643..128a47b4d5 100644
--- a/.github/workflows/oracle.yml
+++ b/.github/workflows/oracle.yml
@@ -40,7 +40,7 @@ jobs:
- name: Setup Maven
uses: stCarolas/setup-maven@v5
with:
- maven-version: 3.9.9
+ maven-version: 3.9.6
- uses: actions/cache@v4
with:
path: ~/.m2/repository
diff --git
a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/DefaultJobManager.java
b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/DefaultJobManager.java
index cb0064cf32..1b11094182 100644
---
a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/DefaultJobManager.java
+++
b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/DefaultJobManager.java
@@ -103,11 +103,13 @@ public class DefaultJobManager implements JobManager,
SyncopeCoreLoader {
@Override
public boolean isRunning(final String jobName) {
- boolean locked = jobStatusDAO.lock(jobName);
- if (locked) {
- jobStatusDAO.unlock(jobName);
+ synchronized (jobName) {
+ boolean locked = jobStatusDAO.lock(jobName);
+ if (locked) {
+ jobStatusDAO.unlock(jobName);
+ }
+ return !locked;
}
- return !locked;
}
protected void registerJob(
@@ -247,28 +249,21 @@ public class DefaultJobManager implements JobManager,
SyncopeCoreLoader {
@Transactional
@Override
public void load(final String domain) {
- String notificationJobCronExp =
AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> {
- String result = StringUtils.EMPTY;
-
- String conf = confParamOps.get(
- SyncopeConstants.MASTER_DOMAIN,
"notificationjob.cronExpression", null, String.class);
- if (conf == null) {
- result = NotificationJob.DEFAULT_CRON_EXP;
- } else if (!StringUtils.EMPTY.equals(conf)) {
- result = conf;
- }
- return result;
- });
-
AuthContextUtils.runAsAdmin(domain, () -> {
// 1. jobs for SchedTasks
Set<SchedTask> tasks = new
HashSet<>(taskDAO.<SchedTask>findAll(TaskType.SCHEDULED));
tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.PULL));
tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.PUSH));
+ tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.MACRO));
+ tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.LIVE_SYNC));
boolean loadException = false;
for (Iterator<SchedTask> it = tasks.iterator(); it.hasNext() &&
!loadException;) {
SchedTask task = it.next();
+
+ LOG.debug("Loading job for {} Task {} {}",
+ taskUtilsFactory.getInstance(task).getType(),
task.getKey(), task.getName());
+
try {
register(domain, task, task.getStartAt(),
securityProperties.getAdminUser(), false, Map.of());
} catch (Exception e) {
@@ -278,11 +273,14 @@ public class DefaultJobManager implements JobManager,
SyncopeCoreLoader {
}
if (loadException) {
- LOG.debug("Errors while loading job instances for tasks,
aborting");
+ LOG.error("Errors while loading job for tasks, aborting");
} else {
// 2. jobs for Reports
for (Iterator<? extends Report> it =
reportDAO.findAll().iterator(); it.hasNext() && !loadException;) {
Report report = it.next();
+
+ LOG.debug("Loading job for Report {} {}", report.getKey(),
report.getName());
+
try {
register(domain, report, null,
securityProperties.getAdminUser(), false);
} catch (Exception e) {
@@ -292,12 +290,25 @@ public class DefaultJobManager implements JobManager,
SyncopeCoreLoader {
}
if (loadException) {
- LOG.debug("Errors while loading job instances for reports,
aborting");
+ LOG.error("Errors while loading job for reports,
aborting");
}
}
});
if (SyncopeConstants.MASTER_DOMAIN.equals(domain)) {
+ String notificationJobCronExp =
AuthContextUtils.callAsAdmin(SyncopeConstants.MASTER_DOMAIN, () -> {
+ String result = StringUtils.EMPTY;
+
+ String conf = confParamOps.get(
+ SyncopeConstants.MASTER_DOMAIN,
"notificationjob.cronExpression", null, String.class);
+ if (conf == null) {
+ result = NotificationJob.DEFAULT_CRON_EXP;
+ } else if (!StringUtils.EMPTY.equals(conf)) {
+ result = conf;
+ }
+ return result;
+ });
+
// 3. NotificationJob
if (StringUtils.isBlank(notificationJobCronExp)) {
LOG.debug("Empty value provided for {}'s cron, not
scheduling", NotificationJob.class.getSimpleName());
@@ -348,21 +359,28 @@ public class DefaultJobManager implements JobManager,
SyncopeCoreLoader {
Set<SchedTask> tasks = new
HashSet<>(taskDAO.<SchedTask>findAll(TaskType.SCHEDULED));
tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.PULL));
tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.PUSH));
+ tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.MACRO));
+ tasks.addAll(taskDAO.<SchedTask>findAll(TaskType.LIVE_SYNC));
tasks.forEach(task -> {
+ LOG.debug("Unloading job for {} Task {} {}",
+ taskUtilsFactory.getInstance(task).getType(),
task.getKey(), task.getName());
+
try {
unregister(task);
} catch (Exception e) {
- LOG.error("While unloading job instance for task {}",
task.getKey(), e);
+ LOG.error("While unloading job for task {}",
task.getKey(), e);
}
});
// 2. jobs for Reports
reportDAO.findAll().forEach(report -> {
+ LOG.debug("Unloading job for Report {} {}", report.getKey(),
report.getName());
+
try {
unregister(report);
} catch (Exception e) {
- LOG.error("While unloading job instance for report {}",
report.getName(), e);
+ LOG.error("While unloading job for report {}",
report.getName(), e);
}
});
});
diff --git
a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/Job.java
b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/Job.java
index 054530f619..a30be09722 100644
---
a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/Job.java
+++
b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/Job.java
@@ -57,18 +57,27 @@ public abstract class Job implements Runnable {
AuthContextUtils.getWho(),
false)));
- if (AuthContextUtils.callAsAdmin(context.getDomain(), () ->
jobStatusDAO.lock(context.getJobName()))) {
- LOG.debug("Job {} locked, starting execution",
context.getJobName());
+ boolean locked = false;
+ try {
+ locked = AuthContextUtils.callAsAdmin(context.getDomain(), () ->
jobStatusDAO.lock(context.getJobName()));
+ } catch (Exception e) {
+ LOG.debug("While attempting to lock job {}", context.getJobName(),
e);
+ }
+ if (!locked) {
+ LOG.debug("Could not lock job {}, skipping execution",
context.getJobName());
+ return;
+ }
+
+ LOG.debug("Job {} locked, starting execution", context.getJobName());
+
+ try {
+ execute(context);
+ } catch (JobExecutionException e) {
+ LOG.error("While executing job {}", context.getJobName(), e);
+ } finally {
+ LOG.debug("Job {} execution completed", context.getJobName());
- try {
- execute(context);
- } catch (JobExecutionException e) {
- LOG.error("While executing job {}", context.getJobName(), e);
- } finally {
- AuthContextUtils.runAsAdmin(context.getDomain(), () ->
jobStatusDAO.unlock(context.getJobName()));
- }
- } else {
- LOG.info("Could not lock job {}, skipping execution",
context.getJobName());
+ AuthContextUtils.runAsAdmin(context.getDomain(), () ->
jobStatusDAO.unlock(context.getJobName()));
}
}
}
diff --git
a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/SyncopeTaskScheduler.java
b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/SyncopeTaskScheduler.java
index c659c80c70..17a35ce8e0 100644
---
a/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/SyncopeTaskScheduler.java
+++
b/core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/job/SyncopeTaskScheduler.java
@@ -24,8 +24,10 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
+import org.apache.commons.lang3.tuple.MutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.syncope.core.persistence.api.dao.JobStatusDAO;
import
org.apache.syncope.core.provisioning.api.job.StoppableSchedTaskJobDelegate;
@@ -38,78 +40,63 @@ import org.springframework.scheduling.support.CronTrigger;
public class SyncopeTaskScheduler {
+ public static final String CACHE = "jobCache";
+
protected static final Logger LOG =
LoggerFactory.getLogger(SyncopeTaskScheduler.class);
protected final TaskScheduler scheduler;
protected final JobStatusDAO jobStatusDAO;
- protected final Map<Pair<String, String>, Pair<Job, ScheduledFuture<?>>>
jobs = new ConcurrentHashMap<>();
+ protected final Map<Pair<String, String>, Pair<Job,
Optional<ScheduledFuture<?>>>> jobs = new ConcurrentHashMap<>();
public SyncopeTaskScheduler(final TaskScheduler scheduler, final
JobStatusDAO jobStatusDAO) {
this.scheduler = scheduler;
this.jobStatusDAO = jobStatusDAO;
}
- public void register(final Job job) {
- jobs.put(
+ protected void register(final Job job, final Optional<ScheduledFuture<?>>
future) {
+ jobs.putIfAbsent(
Pair.of(job.getContext().getDomain(),
job.getContext().getJobName()),
- Pair.of(job, null));
- }
-
- public void start(final String domain, final String jobName) {
- Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).
- ifPresent(pair -> schedule(pair.getLeft(), Instant.now()));
+ MutablePair.of(job, future));
}
- public void start(final Job job) {
- schedule(job, Instant.now());
+ public void register(final Job job) {
+ register(job, Optional.empty());
}
public void schedule(final Job job, final CronTrigger trigger) {
- ScheduledFuture<?> future = scheduler.schedule(job, trigger);
- jobs.put(
- Pair.of(job.getContext().getDomain(),
job.getContext().getJobName()),
- Pair.of(job, future));
+ register(job, Optional.of(scheduler.schedule(job, trigger)));
}
public void schedule(final Job job, final Instant startTime) {
- ScheduledFuture<?> future = scheduler.schedule(job, startTime);
- jobs.put(
- Pair.of(job.getContext().getDomain(),
job.getContext().getJobName()),
- Pair.of(job, future));
+ register(job, Optional.of(scheduler.schedule(job, startTime)));
}
public boolean contains(final String domain, final String jobName) {
return jobs.containsKey(Pair.of(domain, jobName));
}
- public Optional<Class<?>> getJobClass(final String domain, final String
jobName) {
- return Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).
- map(pair -> AopUtils.getTargetClass(pair.getLeft()));
- }
-
- public Optional<OffsetDateTime> getNextTrigger(final String domain, final
String jobName) {
- return Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).
- filter(pair -> pair.getRight() != null).
- map(pair -> pair.getRight().getDelay(TimeUnit.SECONDS)).
- filter(delay -> delay > 0).
- map(delay -> OffsetDateTime.now().plusSeconds(delay));
+ public void start(final String domain, final String jobName) {
+ Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).
+ filter(pair ->
pair.getRight().map(Future::isDone).orElse(true)).
+ ifPresent(pair ->
pair.setValue(Optional.of(scheduler.schedule(pair.getLeft(), Instant.now()))));
}
public void cancel(final String domain, final String jobName) {
Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).ifPresent(pair
-> {
- boolean mayInterruptIfRunning = true;
+ boolean mayInterruptIfRunning;
if (pair.getLeft() instanceof TaskJob taskJob
&& taskJob.getDelegate() instanceof
StoppableSchedTaskJobDelegate stoppable) {
stoppable.stop();
mayInterruptIfRunning = false;
+ } else {
+ mayInterruptIfRunning = true;
}
- if (pair.getRight() != null) {
- pair.getRight().cancel(mayInterruptIfRunning);
- }
+ pair.getRight().ifPresent(f -> f.cancel(mayInterruptIfRunning));
+ pair.setValue(Optional.empty());
});
}
@@ -118,6 +105,20 @@ public class SyncopeTaskScheduler {
AuthContextUtils.runAsAdmin(domain, () ->
jobStatusDAO.unlock(jobName));
}
+ public Optional<Class<?>> getJobClass(final String domain, final String
jobName) {
+ return Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).
+ map(pair -> AopUtils.getTargetClass(pair.getLeft()));
+ }
+
+ public Optional<OffsetDateTime> getNextTrigger(final String domain, final
String jobName) {
+ return Optional.ofNullable(jobs.get(Pair.of(domain, jobName))).
+ filter(pair -> pair.getRight().map(f ->
!f.isDone()).orElse(false)).
+ flatMap(Pair::getRight).
+ map(f -> f.getDelay(TimeUnit.SECONDS)).
+ filter(delay -> delay > 0).
+ map(delay -> OffsetDateTime.now().plusSeconds(delay));
+ }
+
public List<String> getJobNames(final String domain) {
return jobs.keySet().stream().filter(pair ->
domain.equals(pair.getLeft())).map(Pair::getRight).toList();
}
diff --git
a/fit/core-reference/src/test/java/org/apache/syncope/fit/core/SchedTaskITCase.java
b/fit/core-reference/src/test/java/org/apache/syncope/fit/core/SchedTaskITCase.java
index b850f47cce..7a8eb99d63 100644
---
a/fit/core-reference/src/test/java/org/apache/syncope/fit/core/SchedTaskITCase.java
+++
b/fit/core-reference/src/test/java/org/apache/syncope/fit/core/SchedTaskITCase.java
@@ -28,11 +28,9 @@ import static org.junit.jupiter.api.Assertions.fail;
import jakarta.ws.rs.core.Response;
import java.time.OffsetDateTime;
-import java.util.Comparator;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate;
import org.apache.syncope.common.lib.to.ExecTO;
import org.apache.syncope.common.lib.to.ImplementationTO;
@@ -41,10 +39,10 @@ import org.apache.syncope.common.lib.to.PagedResult;
import org.apache.syncope.common.lib.to.PullTaskTO;
import org.apache.syncope.common.lib.to.PushTaskTO;
import org.apache.syncope.common.lib.to.SchedTaskTO;
-import org.apache.syncope.common.lib.to.TaskTO;
import org.apache.syncope.common.lib.types.IdRepoImplementationType;
import org.apache.syncope.common.lib.types.JobAction;
import org.apache.syncope.common.lib.types.TaskType;
+import org.apache.syncope.common.rest.api.RESTHeaders;
import org.apache.syncope.common.rest.api.beans.ExecQuery;
import org.apache.syncope.common.rest.api.beans.ExecSpecs;
import org.apache.syncope.common.rest.api.beans.TaskQuery;
@@ -97,40 +95,74 @@ public class SchedTaskITCase extends AbstractTaskITCase {
SchedTaskTO task = new SchedTaskTO();
task.setActive(true);
- task.setName("deferred");
+ task.setName("deferred" + getUUIDString());
task.setJobDelegate(taskJobDelegate.getKey());
Response response = TASK_SERVICE.create(TaskType.SCHEDULED, task);
- task = getObject(response.getLocation(), TaskService.class,
SchedTaskTO.class);
- assertNotNull(task);
- String taskKey = task.getKey();
- assertNotNull(task);
+ String taskKey = response.getHeaderString(RESTHeaders.RESOURCE_KEY);
+ assertNotNull(taskKey);
OffsetDateTime initial = OffsetDateTime.now();
OffsetDateTime later = initial.plusSeconds(2);
- AtomicReference<TaskTO> taskTO = new AtomicReference<>(task);
- int preSyncSize = taskTO.get().getExecutions().size();
- ExecTO execution = TASK_SERVICE.execute(new
ExecSpecs.Builder().key(task.getKey()).startAt(later).build());
- assertNotNull(execution.getExecutor());
+ TASK_SERVICE.execute(new
ExecSpecs.Builder().key(taskKey).startAt(later).build());
await().atMost(MAX_WAIT_SECONDS, TimeUnit.SECONDS).pollInterval(1,
TimeUnit.SECONDS).until(() -> {
try {
- taskTO.set(TASK_SERVICE.read(TaskType.SCHEDULED, taskKey,
true));
- return preSyncSize < taskTO.get().getExecutions().size();
+ return !TASK_SERVICE.read(TaskType.SCHEDULED, taskKey,
true).getExecutions().isEmpty();
} catch (Exception e) {
return false;
}
});
- PagedResult<ExecTO> execs = TASK_SERVICE.listExecutions(new
ExecQuery.Builder().key(task.getKey()).build());
- assertEquals(preSyncSize + 1, execs.getTotalCount());
+ PagedResult<ExecTO> execs = TASK_SERVICE.listExecutions(new
ExecQuery.Builder().key(taskKey).build());
+ assertEquals(1, execs.getTotalCount());
- ExecTO exec = execs.getResult().stream().
-
sorted(Comparator.comparing(ExecTO::getStart).reversed()).findFirst().orElseThrow();
- assertTrue(exec.getStart().isAfter(initial));
+ assertTrue(execs.getResult().get(0).getStart().isAfter(initial));
// round 1 sec for safety
- assertTrue(exec.getStart().plusSeconds(1).isAfter(later));
+
assertTrue(execs.getResult().get(0).getStart().plusSeconds(1).isAfter(later));
+ }
+
+ @Test
+ public void multistart() {
+ ImplementationTO taskJobDelegate = IMPLEMENTATION_SERVICE.read(
+ IdRepoImplementationType.TASKJOB_DELEGATE,
TestSampleJobDelegate.class.getSimpleName());
+ assertNotNull(taskJobDelegate);
+
+ SchedTaskTO task = new SchedTaskTO();
+ task.setActive(true);
+ task.setName("multistart" + getUUIDString());
+ task.setJobDelegate(taskJobDelegate.getKey());
+
+ Response response = TASK_SERVICE.create(TaskType.SCHEDULED, task);
+ String taskKey = response.getHeaderString(RESTHeaders.RESOURCE_KEY);
+ assertNotNull(taskKey);
+
+ TASK_SERVICE.actionJob(taskKey, JobAction.START);
+
+ await().atMost(MAX_WAIT_SECONDS, TimeUnit.SECONDS).pollInterval(1,
TimeUnit.SECONDS).until(() -> {
+ try {
+ return !TASK_SERVICE.read(TaskType.SCHEDULED, taskKey,
true).getExecutions().isEmpty();
+ } catch (Exception e) {
+ return false;
+ }
+ });
+
+ PagedResult<ExecTO> execs = TASK_SERVICE.listExecutions(new
ExecQuery.Builder().key(taskKey).build());
+ assertEquals(1, execs.getTotalCount());
+
+ TASK_SERVICE.actionJob(taskKey, JobAction.START);
+
+ await().atMost(MAX_WAIT_SECONDS, TimeUnit.SECONDS).pollInterval(1,
TimeUnit.SECONDS).until(() -> {
+ try {
+ return TASK_SERVICE.read(TaskType.SCHEDULED, taskKey,
true).getExecutions().size() >= 2;
+ } catch (Exception e) {
+ return false;
+ }
+ });
+
+ execs = TASK_SERVICE.listExecutions(new
ExecQuery.Builder().key(taskKey).build());
+ assertEquals(2, execs.getTotalCount());
}
@Test
diff --git a/pom.xml b/pom.xml
index 22fe9069ce..0b42cde948 100644
--- a/pom.xml
+++ b/pom.xml
@@ -412,7 +412,7 @@ under the License.
<cxf.version>4.1.0</cxf.version>
<bouncycastle.version>1.79</bouncycastle.version>
- <nimbus-jose-jwt.version>9.48</nimbus-jose-jwt.version>
+ <nimbus-jose-jwt.version>10.0</nimbus-jose-jwt.version>
<spring-boot.version>3.3.7</spring-boot.version>
<spring-cloud-gateway.version>4.1.6</spring-cloud-gateway.version>
@@ -1741,7 +1741,7 @@ under the License.
<version>${targetJdk}</version>
</requireJavaVersion>
<requireMavenVersion>
- <version>3.9.9</version>
+ <version>3.9.5</version>
</requireMavenVersion>
</rules>
</configuration>
@@ -1816,7 +1816,7 @@ under the License.
<plugin>
<groupId>org.codehaus.gmavenplus</groupId>
<artifactId>gmavenplus-plugin</artifactId>
- <version>4.1.0</version>
+ <version>4.1.1</version>
<dependencies>
<dependency>
<groupId>org.apache.groovy</groupId>
@@ -2332,7 +2332,7 @@ under the License.
</activation>
<properties>
- <mockito.version>5.14.2</mockito.version>
+ <mockito.version>5.15.2</mockito.version>
<byte-buddy.version>1.15.11</byte-buddy.version>
</properties>