This is an automated email from the ASF dual-hosted git repository.

shuber pushed a commit to branch unomi-3-dev
in repository https://gitbox.apache.org/repos/asf/unomi.git


The following commit(s) were added to refs/heads/unomi-3-dev by this push:
     new c52449e87 Up-port latest changes to the merge branch. - Enhanced 
build.sh script for improved error handling and preflight validation. - Updated 
documentation to reflect changes in building and deploying Apache Unomi. - 
Added migration script (migration_es7-es9.sh) for reindexing data from 
Elasticsearch 7 to 9, including prerequisites and execution instructions. - 
Updated various files to reflect the new Elasticsearch migration process and 
improved build cache configuration. - Ad [...]
c52449e87 is described below

commit c52449e87b73a0fd0f7bc603ed291d5610f6b86c
Author: Serge Huber <[email protected]>
AuthorDate: Sat Dec 6 21:44:35 2025 +0100

    Up-port latest changes to the merge branch.
    - Enhanced build.sh script for improved error handling and preflight 
validation.
    - Updated documentation to reflect changes in building and deploying Apache 
Unomi.
    - Added migration script (migration_es7-es9.sh) for reindexing data from 
Elasticsearch 7 to 9, including prerequisites and execution instructions.
    - Updated various files to reflect the new Elasticsearch migration process 
and improved build cache configuration.
    - Adjusted copyright years in NOTICE files to 2025.
    - Cleaned up .gitignore and build configurations for better clarity and 
organization.
---
 .github/workflows/codeql-analysis-java.yml         |   3 +
 .github/workflows/unomi-ci-build-tests.yml         |   3 +
 .github/workflows/unomi-ci-docs-deploy.yml         |   3 +
 .gitignore                                         |   1 +
 .mvn/develocity.xml                                |  44 ++
 .mvn/extensions.xml                                |  45 +-
 .mvn/maven-build-cache-config.xml                  |  18 +-
 BUILDING                                           | 103 +--
 KEYS                                               | 176 +++--
 NOTICE                                             |   2 +-
 NOTICE.template                                    |   2 +-
 build.sh                                           | 105 ++-
 generate-manual-config.sh                          |  14 +-
 .../test/java/org/apache/unomi/itests/AllITs.java  |   2 +-
 .../org/apache/unomi/itests/ProgressListener.java  | 132 +++-
 .../org/apache/unomi/itests/ProgressSuite.java     | 105 +++
 .../java/org/apache/unomi/itests/SecurityIT.java   |  46 --
 .../src/main/asciidoc/building-and-deploying.adoc  | 248 +++++--
 manual/src/main/asciidoc/index.adoc                |   2 +
 manual/src/main/asciidoc/migrate-es7-to-es9.adoc   | 131 ++++
 manual/src/main/asciidoc/privacy.adoc              |   4 +-
 manual/src/main/asciidoc/whats-new.adoc            |   6 +-
 migration_es7-es9.sh                               | 750 +++++++++++++++++++++
 package/src/main/resources/NOTICE                  |  12 +-
 .../ConditionESQueryBuilderDispatcher.java         |  27 +-
 .../ConditionOSQueryBuilderDispatcher.java         |  33 +-
 ...t.java => ConditionQueryBuilderDispatcher.java} |  52 +-
 pom.xml                                            |  14 -
 28 files changed, 1616 insertions(+), 467 deletions(-)

diff --git a/.github/workflows/codeql-analysis-java.yml 
b/.github/workflows/codeql-analysis-java.yml
index 5fd21511c..0b8b7c427 100644
--- a/.github/workflows/codeql-analysis-java.yml
+++ b/.github/workflows/codeql-analysis-java.yml
@@ -20,6 +20,9 @@ on:
   schedule:
     - cron: '38 1 * * 0'
 
+env:
+  DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
+
 jobs:
   analyze:
     name: Analyze
diff --git a/.github/workflows/unomi-ci-build-tests.yml 
b/.github/workflows/unomi-ci-build-tests.yml
index 1a6e9e5ee..e629253cd 100644
--- a/.github/workflows/unomi-ci-build-tests.yml
+++ b/.github/workflows/unomi-ci-build-tests.yml
@@ -9,6 +9,9 @@ on:
   pull_request:
     types: [opened, reopened, synchronize]
 
+env:
+  DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
+
 jobs:
   unit-tests:
     name: Execute unit tests
diff --git a/.github/workflows/unomi-ci-docs-deploy.yml 
b/.github/workflows/unomi-ci-docs-deploy.yml
index 286404e0b..ad1c08dc7 100644
--- a/.github/workflows/unomi-ci-docs-deploy.yml
+++ b/.github/workflows/unomi-ci-docs-deploy.yml
@@ -7,6 +7,9 @@ on:
   push:
     branches: [master]
 
+env:
+  DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
+
 jobs:
   publish-docs-and-snapshots:
     name: Publish Javadoc and snapshots
diff --git a/.gitignore b/.gitignore
index d2a8b60d7..85c793ac4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,6 +18,7 @@ rest/.miredot-offline.json
 **/*.versionsBackup
 itests/src/main
 dependency_tree.txt
+.mvn/.develocity/develocity-workspace-id
 /.cursor/rules/always-read-existing-code.mdc
 /.cursor/rules/avoid-code-duplication.mdc
 /.cursor/rules/osgi-injection-frameworks.mdc
diff --git a/.mvn/develocity.xml b/.mvn/develocity.xml
new file mode 100644
index 000000000..d8d4e709b
--- /dev/null
+++ b/.mvn/develocity.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<develocity xmlns="https://www.gradle.com/develocity-maven"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="https://www.gradle.com/develocity-maven 
https://www.gradle.com/schema/develocity-maven.xsd";>
+    <projectId>unomi</projectId>
+    <server>
+        <url>https://develocity.apache.org</url>
+    </server>
+    <buildScan>
+        
<backgroundBuildScanUpload>#{isFalse(env['GITHUB_ACTIONS'])}</backgroundBuildScanUpload>
+        <publishing>
+            <onlyIf><![CDATA[authenticated]]></onlyIf>
+        </publishing>
+        <obfuscation>
+            <ipAddresses>#{{'0.0.0.0'}}</ipAddresses>
+        </obfuscation>
+    </buildScan>
+    <buildCache>
+        <local>
+            <enabled>false</enabled>
+        </local>
+        <remote>
+            <enabled>false</enabled>
+        </remote>
+    </buildCache>
+</develocity>
\ No newline at end of file
diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml
index f9a3dfca9..af06e17d7 100644
--- a/.mvn/extensions.xml
+++ b/.mvn/extensions.xml
@@ -1,23 +1,38 @@
-<!-- -
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
 
       http://www.apache.org/licenses/LICENSE-2.0
 
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
 <extensions>
     <extension>
         <groupId>org.apache.maven.extensions</groupId>
         <artifactId>maven-build-cache-extension</artifactId>
-        <version>1.2.0</version>
+        <version>1.2.1</version>
+    </extension>
+    <extension>
+        <groupId>com.gradle</groupId>
+        <artifactId>develocity-maven-extension</artifactId>
+        <version>2.1</version>
+    </extension>
+    <extension>
+        <groupId>com.gradle</groupId>
+        <artifactId>common-custom-user-data-maven-extension</artifactId>
+        <version>2.0.6</version>
     </extension>
-</extensions>
\ No newline at end of file
+</extensions>
diff --git a/.mvn/maven-build-cache-config.xml 
b/.mvn/maven-build-cache-config.xml
index 1e0eb9902..c88a0ca2c 100644
--- a/.mvn/maven-build-cache-config.xml
+++ b/.mvn/maven-build-cache-config.xml
@@ -17,9 +17,6 @@
 <cache xmlns="http://maven.apache.org/BUILD-CACHE-CONFIG/1.0.0";
     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
     xsi:schemaLocation="http://maven.apache.org/BUILD-CACHE-CONFIG/1.0.0 
https://maven.apache.org/xsd/build-cache-config-1.0.0.xsd";>
-    <!-- 
-        Template Maven build cache configuration
-      -->
     <configuration>
         <enabled>true</enabled>
         <hashAlgorithm>SHA-256</hashAlgorithm>
@@ -34,26 +31,13 @@
     </configuration>
     <input>
         <global>
-            <!--  If not defined, default glob is "*"  -->
-            <!--glob> 
{*.java,*.groovy,*.yaml,*.svcd,*.proto,*assembly.xml,assembly*.xml,*logback.xml,*.vm,*.ini,*.jks,*.properties,*.sh,*.bat}
 </glob-->
             <includes>
-                <!--  By default, project sources and resources directories 
are included
-                (src/main/java and src/main/resources)  -->
-                <!--  In this example, the goal is to include a wider range of 
src directories (like
-                src/main/assembly or src/main/groovy)  -->
                 <include>src/</include>
             </includes>
             <excludes>
-                <!--  We don't want a static "hash" pom resolution (it would 
conflict the will to
-                adjust the version in the manifest),  -->
-                <!--  we exclude this specific file (as it is already by 
default since it is not in
-                an include folder  -->
-                <!--  The need to rebuild a project based on the pom is 
already computed with some
-                intelligence by the extension.  -->
                 <exclude>pom.xml</exclude>
-                <!--  Also excluding everything located in this project 
specific folder  -->
                 <exclude>src/main/javagen/**</exclude>
             </excludes>
         </global>
     </input>
-</cache>
\ No newline at end of file
+</cache>
diff --git a/BUILDING b/BUILDING
index 1d460d507..03ff90681 100644
--- a/BUILDING
+++ b/BUILDING
@@ -18,105 +18,10 @@
 Building Apache Unomi
 =====================
 
-Initial Setup
--------------
+This file has been deprecated to avoid duplication. Please use the online 
manual for up-to-date instructions:
 
-1) Install J2SE 17 SDK (or later), which can be downloaded from 
http://www.oracle.com/technetwork/java/javase/downloads/index.html
+  Online manual (Building): 
https://unomi.apache.org/manual/latest/index.html#_building
 
-2) Make sure that your JAVA_HOME environment variable is set to the newly 
installed
-   JDK location, and that your PATH includes %JAVA_HOME%\bin (windows) or
-   $JAVA_HOME$/bin (unix).
+To propose edits to the build documentation, update the source Asciidoc here:
 
-3) Install Maven 3.0.3 (or later), which can be downloaded from
-   http://maven.apache.org/download.html. Make sure that your PATH includes
-   the MVN_HOME/bin directory.
-
-
-Building
---------
-
-1) Change to the top level directory of Apache Unomi source distribution.
-2) Run
-
-         $> mvn clean install
-
-   This will compile Apache Unomi and run all of the tests in the
-   Apache Unomi source distribution. Alternatively, you can run
-
-         $> mvn -P \!integration-tests clean install
-
-   This will compile Apache Unomi without running the tests and takes less
-   time to build.
-
-3) The distributions will be available under "package/target" directory.
-
-Building on Windows with PowerShell
----------------------------------
-
-For Windows users, we provide a PowerShell build script (build.ps1) that 
offers the same functionality
-as the Unix shell script with a more Windows-native experience.
-
-1) Open PowerShell and change to the Apache Unomi directory
-2) Run the build script:
-
-         PS> .\build.ps1
-
-   Available options include:
-   - -Help                    Show help message
-   - -SkipTests              Skip all tests
-   - -IntegrationTests       Run integration tests
-   - -Deploy                 Deploy after build
-   - -Debug                  Run Karaf in debug mode
-   - -UseOpenSearch         Use OpenSearch instead of ElasticSearch
-   - -KarafHome <path>       Set Karaf home directory
-   - -ResolverDebug          Enable Karaf Resolver debug logging for 
integration tests
-
-   Examples:
-   - Build with integration tests:
-         PS> .\build.ps1 -IntegrationTests
-
-   - Build and deploy to Karaf:
-         PS> .\build.ps1 -Deploy -KarafHome ~\apache-karaf
-
-   The script will automatically check for required tools (Java, Maven, 
GraphViz)
-   and provide installation instructions if any are missing.
-
-Updating the website
---------------------
-
-We provide two scripts to generate the website and upload the modifications
-to the live SVN repository. To generate the website and check if everything is 
in
-place simply launch:
-
-    ./generate-site.sh
-
-The generated site will then be in the target/site directory
-
-Once it is ready to be published to the live site, simply use the following
-command:
-
-    ./generate-site-and-upload.sh SVNusername SVNpassword
-
-Where the required username and password have the proper credentials for the 
SVN
-Apache repository.
-
-JGitFlow
---------
-
-We now can use the [JGitFlow Maven plugin](http://jgitflow.bitbucket.org) to 
make it easier to work with feature,
-hotfix and other types of branches.
-
-For example, to start a feature branch, simply use
-
-    mvn jgitflow:feature-start
-
-This will prompt you for the feature name, and then create a feature branch 
and update
-all the POMs to have a version that contains the feature name. This makes it 
then a lot
-easier to integrate with continuous integration systems to generate builds for 
the
-feature branch.
-
-Once the feature is completed you can use
-
-    mvn jgitflow:feature-finish
-
-To merge the branch into master.
+  manual/src/main/asciidoc/building-and-deploying.adoc
diff --git a/KEYS b/KEYS
index 6d17befb8..742bcd464 100644
--- a/KEYS
+++ b/KEYS
@@ -235,62 +235,128 @@ SIFgvTXN5InBSs+XEagL18j7gYxdJ64FxuZkqw8l5TxdNg==
 =WYQb
 -----END PGP PUBLIC KEY BLOCK-----
 
-pub   rsa4096 2024-09-02 [SC]
-      2518A55A61447F846B494C62D0EEBF15121B4740
-uid          [  ultime ] Jonathan Sinovassin-naik (CODE SIGNING KEY) 
<[email protected]>
-sig 3        D0EEBF15121B4740 2024-09-02  Jonathan Sinovassin-naik (CODE 
SIGNING KEY) <[email protected]>
-sub   rsa4096 2024-09-02 [E]
-sig          D0EEBF15121B4740 2024-09-02  Jonathan Sinovassin-naik (CODE 
SIGNING KEY) <[email protected]>
+pub   rsa4096/4B65ABFBFCD57B56 2025-10-31 [SC]
+      A5011B897E6D7C96EA8D97984B65ABFBFCD57B56
+uid                [  ultime ] Sinovassin-naik Jonathan (CODE SIGNING KEY) 
<[email protected]>
+sig 3        4B65ABFBFCD57B56 2025-10-31  [autosignature]
+sub   rsa4096/254B217A877B335F 2025-10-31 [E]
+sig          4B65ABFBFCD57B56 2025-10-31  [autosignature]
 
 -----BEGIN PGP PUBLIC KEY BLOCK-----
 
-mQINBGbVgHMBEACwoOXutUMrIfveN9tcO/Cn2I4Pu4kC2cPIafqh+IGOVp+b6KSi
-Zv+oqW09tcdH75NUoknGSuWMV37bZlsfmWw/ehf+PmvPKfXcPCiW45ff8QuF5gKt
-/GG2DzQLyNVvuE5gr9gu7CcpjZAnBvzBSCwdeDD9G2Bev2pPQzLT9jmu/STXsGZy
-fzV24nxZxivZWmAaSuWWhhggZa4aWVriSZjRRMbZlHf4XhV8AKBq+tTYUOreoER5
-ht/ADeBUXwZmWq0ocGEyA6TQPG4fpD8afWcgKVwLHyhi1+BbxlzQi6l7K8hTZ2Pe
-c5vWqPh61QE2Pes1GpcciTs42xqJe0RX2AbCxOb64lh3vLun1ipUenLtZBZXWDPW
-Ko3LW23FNOoucLnl8K1UJvnWMEl9IAC/qbfTjgVaHcvR3nwGi8+BIki7c8HcXYkU
-eGE+5/B0wqEZpaSFl0TrlpR827Q3lQJugd92wydkwFd1gGL0VWideRl2oiguMqVF
-qhX1XDyjUoA5f094YMa0y36EodqJo4HBQXf+eEnaUmQEW3FLoivWJAJ9/N7BmyeT
-pNAtHK+XuZZBGEkI0eMZ7yZlGES1G8p2Cro4imnCwZ9ZNbLcOCBlga9yFzjqLPN/
-U2aD0Lyy9wNsopMy9g9qTrG+MT1VCgx+pZLA/kLwiGDKCe2GZqmeajedwwARAQAB
-tEhKb25hdGhhbiBTaW5vdmFzc2luLW5haWsgKENPREUgU0lHTklORyBLRVkpIDxq
-c2lub3Zhc3Npbm5haWtAYXBhY2hlLm9yZz6JAk4EEwEIADgWIQQlGKVaYUR/hGtJ
-TGLQ7r8VEhtHQAUCZtWAcwIbAwULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRDQ
-7r8VEhtHQHUUD/46MlLc7UYc31NzQ2Eaqp+39sC8jeENWM1ienvOIQMfX0Nl4exu
-FpSmD2hzb2AyDNapWpVH45zG0I2+pqtiktn8YafGadeTIWmTJ1+oU3FtHvb4bTMP
-cu9ICyKkIFD83giBCwoMCf5iqp6pEIekYNZ4TSq/64WncqRZUnDByabTI4BlmZLx
-DAYhi/ZBzqcJgjZM7E5aiP/A9Ee+50l5QzKg91F0Huoiy2aEvz5jY5F/Yjy7w2wU
-Bafyr9yrn5193OVEG7GTxZFYIlfS8kH7iw+F+PF06twU6Mt82cPdeQ/3ZfR3kJ/V
-1mWu7A0Usto/hoSa2B5E/VF4wp6hBHqIq/gaOSOfDDds5+s+hUKm7pVIe+NUbPOO
-oqajX33wfAfESSjePGR2LI7r2A1974xH0jHXVaO2DjBeVS9AHm2rK3bBef4wEA2H
-VHkry/ArQ0I0Tmfb2J0mW624qtGIgpO+QUljA0a3JCWjBtZBEisX59gCQWxSUOr1
-Undlazh8CMfMVZ6vQR577at+m6TSBqIGbi/rk7Vif1EblcE+fF4YN8XSkNX3PASo
-gHfmuj7ircra+7l4qwUNukbxRoByF/6U6+z4WJSBCxDP7DcsGNj+KG0TpOE3LiGF
-rp5S+NP+G7kDFJhuffzXKYY7qu8CV6etR5l82KWoJEZKVv4MMok5oHbnmbkCDQRm
-1YBzARAAxhRuUCge5/F7gEzaMbDB1xIR6gDckn76TKsmqHQ+/L16Ui12tuH03UNH
-N7YCdIFHc6uNu5ePDOoduNtCnrA5POTyKpgYb5UTz7t0Q+Z+zLtra6MgBndX8jX2
-/Itqu067mDGrYansQ6ylx6xtuSLXEihRC0KCShdUV1TYYc53XpwzgAsWdPjlKwJ+
-Oky2Fni6c0E5ScPXS4E+KCyHCId7oYSQzi6pwJEvAhWzfAOchI+avbckGGPHJJVM
-88T7Q25wODpHGkJUusI8TghadV242hFlG4JMHglnFxuEL1XXTeLAghI/qnFMs+Fx
-ug1dOl5QjJdXKx0m08/WJNbcXH5K+XBKv5cl8t6qXssrThFFVln8IXvweoSm+nIt
-AdtD3UjVJz2uwXlasWSljhewTmj9eLqe0xJPbCKn7zZzLcZPJmR5Bj8BFsZsc1Tk
-75+/Pcvp2kOjkbmwANEXZwngkjM4uIwypZKvujmdybJ0TpjT9hbu10diUT6pLZ84
-Ci94s6KrJykoca3OeG1ZDbNjZV6qRgrEe7OxAwlRDqWvS/cZVXz8y+38Xrsnyt00
-2WOsplnAMcJhMSBRJLMqL8Z7nN4jVwHUfxWgwWKP3IO08AFFn9EO9dl6gKtiWFbB
-RRV7WlTLW3ybvMekhQSqAgOoZQTtOC0MHPy1EmMNQPMza4dQWJUAEQEAAYkCNgQY
-AQgAIBYhBCUYpVphRH+Ea0lMYtDuvxUSG0dABQJm1YBzAhsMAAoJENDuvxUSG0dA
-oigP/RzKbFF+ur3TSJeiby/07AZvRQluRTtx2UpaRS9olmdBxaN6dubSaLohKkpa
-S9/2A2ottoIzH4K4KpSFE8E68vneRby6J2NlqHzutsoOdGNgaqlziao0b3CpWD6i
-sz4MLTs1htULFElmZm38dbvlhtk1XlZU7AZguoniiunx5vh+RycEcZo25+nwGYtZ
-WHFLRcVbVE+WKRz9hCGqcJEWIUOPFIXO+7hJheEDWG1iYRk+htwUAt3OoISrMQgr
-IQ3Z8Kbte9Q0wMYQ0ZFn1vlVV5k8Yfuep0e6Q51u8VYcHPpHhO859dPZwHcYZrK1
-1ZNp/TkCr2+hNEM0c2b9lVJJqEmh+6X6vrJB6ICHAyZ7mtpRcxvWPQ9mahYTlUJ4
-+7ysniMj39JyLUZ8indFmfq/rWKg/s5415AsxZw35agD3jgc78IDWSVdhlPUtt8w
-qFCppbaQlLYSqzIiUQjDPvNmoC7RfDAZrLrwee57xQD7GIobjd40YrdELRsLzfsK
-609ehv4N0FLTLa6UiWg5vxt9J17S4IEi9RhAloNyMxMD+Jw/gBCcDf7fVB2S0oPw
-rqVGM8K3VPqW02zTa3SB/At9a6Rfxr2tGdkrP3SC9ZCpLYlHkG9KAzKyxmqvNclU
-2E16HtpIg5kkrjvSV74m6QzZ8vx++R91x9OiEDfdfaghteZ6
-=Dslh
+mDMEaQSIixYJKwYBBAHaRw8BAQdAbf3KuJRUK4/UDZpFfr4B/9FJqzn64x7KJUyA
+aXRBC6u0NVNpbm92YXNzaW4tbmFpayBKb25hdGhhbiA8anNpbm92YXNzaW5uYWlr
+QGFwYWNoZS5vcmc+iJkEExYKAEEWIQQplSWGubIcFfqh/J3/z+LB1B38MwUCaQSI
+iwIbAwUJBaOagAULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAKCRD/z+LB1B38
+MwtcAP9F/0P+XqhSvgrTqaeCJo1q+GhFApDTuhUM5KTq9Ge22gEA4uFBWstU6hW4
+NZWz2RimQ0CiOtrM6kPeAk0lA3hRbAW4OARpBIiLEgorBgEEAZdVAQUBAQdA/d2z
+5MP0Po7x6SeNxQm6LlPGjZYnnsBcbEQbI9beRjcDAQgHiH4EGBYKACYWIQQplSWG
+ubIcFfqh/J3/z+LB1B38MwUCaQSIiwIbDAUJBaOagAAKCRD/z+LB1B38MyjoAQCP
+5Bujd6QdGCGjeqN9xJNZj/FjdwscMJhbvYnrs1onhgD/VkUjxXov6evzI+V6jwiY
+FYBO+ppehCLVrnZgITvnggGZAg0EaQTB+QEQAL2qfkyhfBeAANwGIUonH/fBA5sn
+52u5Ms5pGoyMfJpxk5fLSsSyDkSoqAqk4TRxvmsGJdM7x+CB/JbNGqukaibd9gQz
+xsJ7XgjqK1u+6/HECc+0uEWWHhet9ElDBKCpVEn+aZsqcXL1wA9l+8ceBNfFi3GZ
+z88wA19n6Y3O6vYB/dQv3i5sH6ms+53gU2PUBuANiRJh4qPiubvOB/TXTh7b5seJ
+5ZtzYItyeYIq8Lx16Oi00jafQVTkftAbBxhAMINqu/+xiIi5ekZkWOcr5CxpZrd7
+GA+elj9TMgGuhmxqSF4M3eXCVIOjj/bXP94AY2bSTew1DRnPkZbPmjDcP1qiR0IT
+X1yM4LkXRGBKzY30DSsMcplw3ge6OW6p2uhcWmspiod0bCnUYHQ907tcpts5AkZv
+It3xFV3gT+HlesNvH821HzC/7Cpg6XIILSVBI6coOqd8SZrhjKFq2tjpfKWfmnLp
+Vrhy04zi8dS9mNoXFSISmok5OI5VQ/uQI4KqMrq0QChL9zRal/M9biouyPShvDks
+CbJi6y643yVn316C4Vr7rU/ODCglpvogJIt+v88j4AMr27UawhzSYVkD9anhMR58
+nrBUW/aM5H0OzaaA8eZ5E+4dWRZbkHjDeK5FyW3I2CKB/LvGMbbWTEWnewqVhe6s
+UGuZcZDWjSb/TQbjABEBAAG0NUpvbmF0aGFuIFNpbm92YXNzaW4tbmFpayA8anNp
+bm92YXNzaW5uYWlrQGFwYWNoZS5vcmc+iQJRBBMBCAA7FiEEXjFAJvzaJVkPFLHI
+CHA1ycSXJi4FAmkEwfkCGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQ
+CHA1ycSXJi7LIRAAq1lnhoAyViMSphVuIRTc218wKdszFujTw/FYcbeUaLHP7/qF
+Ds2sgOtkYs7pHhv7OURufuMhqf1eYE+Zb0Htid9UH/5DGlymdArt9Dve7DGj1qSt
+2L/fVNw5QQyqePw4xbT41ClYy17uUWDc032C+r0a/8A80AKjzRLQcedqxSjhS3Jj
+rkGdSnV9mWk65Ka5/9i5L+eM7yhdZ4Xy68q+l4wwWB+Smjp8+HCAAdJFbQOrGoxV
+fKf3QLTuFqmnwhiq755XKAOE2C/UpmknncZaLcmU2/7Pi3AAwDrcI70GjunXPXOp
+YXxM9tkEn2JLA/jaVY2eODpAM4OYodYAhkKKl0PNB4LPe+Inzn22FiETsY65GP42
+7oRLka25hqzOs3h+LXDriyw15jG9mbtpw3lanY1i20mqG31h7TP+/L3g8fsVOgYz
+SbiA0kk/76hcw3NuFbzpETC26u+RITxESfNDiSiT6mtyHWEqoreW6YZDAsQtCSaD
+lmVTYzCo5g9Ku3gwxZV5eCnlDWYZ1q46XEm4dw7EhWdnNkpQh4MPCvrZIYBm6bHB
+NSyX00noNfKiqgjSA2ecZ67WmpCm+TsLWgJQ2Ktf4pRlP4hXuIoMvt1Dmo83JR9v
+XsaGpJUvYCn3mdCtBI+iFJBC1JR/+Hwo+Z0vLQVJrwO7vK4gO0GXTyaEb2O5Ag0E
+aQTB+QEQAJ2ZtL2h4fFkAIwTHd3XnKdggL5Whf6181LBM8VRn+YvuPD48M1ksNYI
+SaJHiZDWCACJBQanQU5XQMdH1gOZ+OzPBszYnP/nC0lJFh6/TsgWWDnJAd4v0ZFs
+Y31Adcqdx8TvrmK4zJDahObFLmAvaIynLQeFWq80acYeuOQ1WmQMLANBZXG/FW0P
+rzRLspIKYlpovUgPzTohnIkweXdEi8bTsxi0MBESeLJmV5qVCd9B3omsYWLI6kjt
+EhDVHstxQJrjV83fMD9LJmfF+DnAcagD8geE4pRC7rOoGYavwbRVhpK2Ucxm9Pik
+RHI5IV7rgx2Z397mjTKcV5/xC3663gTBhFGPdeM4Y4YjewEan7ZfNOM3Jbt9t/uD
+Vs6yh7Cwo+kynlE33ZkniBfimZSuGrmlOIXa3ATUKz9SZp0e8+ErzEDvemQdNTzv
+xWATfCq5eidFkBr7AC6ppKI4Mm3t1/A/tikjZ9Yz+6s370J4eQd9ZJMaH8T+SwlN
+MrHYqYsoNrKPFGXzzBFPr/G7xmGgoAIEKIHoZzmiEMAqCSL2cc8fXh/9VtVdna9p
+tWnjFSZIprpVIR7MT9LZqj+Yf9qpybeorgeeXTT2C2JHucY/vMCAg7HUxr9Q1EBM
+xjwbC+JwmJeMsJejRy+jGqXktV1O1juD/mZvsUphBbkG5wcPU2n/ABEBAAGJAjYE
+GAEIACAWIQReMUAm/NolWQ8UscgIcDXJxJcmLgUCaQTB+QIbDAAKCRAIcDXJxJcm
+LgUUD/95o+JjC6ZN/Bnp4Fsj23Q13iHjam5nj3mx1opM39J19QpERiN1lifcVpHr
+9F1MincAX0CDbY24AECBf9ghPX+7goYB9bwyBsVjy/X/9iYpXP5XroU7eWu7OHuO
+hE0Yxre9eNN11/tJwwT7eRJN/sZUQ+Aj62tOXK/U1PWVUyrmJ+k2vchwNwKIYdIK
+PEtscYXdw99sNcprBXLEeCqz7heRdtvb5/CNiIZSMFMim7VDFbP5u3PUrZUSyXzV
+htR9Os0QGuTaTHnfjDaK98fTybg3usefRry3EBwAn6QmZomD80Itu/OeGp0OAZwG
+jwkk2UkU47W9S3r8W1qLHuYPPp8/ZFJnwUWk/WJlFaeYPFNXjI9xXwgifQIIAtMC
+TxmHU7XhJK16LVkW9s6ttb/cYRsz0ZP/8m8jfYhLGUJIG9Z6LSZlr4XdXVCt47mn
+EGhoNHMCrLOBtHmV06GsqAsU12LmdkIWtbCReZmm7aRHriKfbWgOyod4fvU+xtk9
+3tFPpdqyq1qG/kbdlmafjFvH9ZeUmsOtLzOUrSdir0NsrKw0rars45V0I+B0XXXi
+4OjCLHKCVtKe4KcyVfqOhCAFW6eDkeXdJRVzk8qglDpdTH1IddNA7n+bQmHyG0xf
+QhcTaRbFRBdsTVF2lQ0rGfNzdSZ0gVP5WNHjQ0+xUpn5SqEysZkCDQRpBMgwARAA
+uns7VrwB6kq58P2iVJQ7hlRpVAZXSfTZV3ydyMMW80G+Pm/KkfUjT3u9eV42ed/+
+00255gNP/cZ9WDkUHrjG7/ACj4UCItRaRJiwo/h1uyDe5FnlLwot7UnVU4nDqoHp
+sHfJN2izQ65P84LBXYXCMKVuF6nQdyTTOxXJRiBcPYJYFHIJCZinnRErAGt8iff0
+XewriqEBC1ppQ9KpzxygvJFZSt4CXrLS6JAaO1+LbTRBbNmSw4I5g6+e2YJljiNB
+rqlM1CQW+/LmvdDLV10SaONKYi+ihIPEFaf2aLQWQYJ4LLj4wuRBD2ijcghRZPR9
+zsK2JIXSm4eR7BKj8Yn9zAMX5o9srlyl92jNrDDVlmNxC95wjVgwyojfUKnsuzFz
+LRO+No7JeAoVSAFQgzAnIuFW/VpQq1Ffu/sVZE4QULhNmDvjC+koL6tSl/AVqLsV
+ypjKuQSIj+fi7RI5FMzWWgjr/1huxcm7XoMzFpeW6jZovW2CwKu6kvl5ehpTb2ra
+JECYDvBBQxM9OyWqWJKS23AHA6L9VA+1VFVErxmsilP7WVyMqXtI13mjn0REPZTA
+Ux4CiQcViOqPwjWYWPrvl+5lEeCail+8my/33P6G0BjWVM80lbltIiLIoVBIxITa
+crYrpq5sG71M/ryLoMItx7VHbPQvZAJbRFW3STdXVV0AEQEAAbRIU2lub3Zhc3Np
+bi1uYWlrIEpvbmF0aGFuIChDT0RFIFNJR05JTkcgS0VZKSA8anNpbm92YXNzaW5u
+YWlrQGFwYWNoZS5vcmc+iQJRBBMBCAA7FiEEpQEbiX5tfJbqjZeYS2Wr+/zVe1YF
+AmkEyDACGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQS2Wr+/zVe1ba
+1w//Yp/M4opIBzvSzCHcaI1W1CtV09rDkYxEQJTs0er0QDBxwb0GwUA/q88vAaMk
+gCG2bPh/b5igNHeqo1An+0HvcVKr1w2IiHBgxEc1SaKvc6aaSYicEWM6Xs4HnpKg
+VsropgcLYLOqR68eBNWiHcQ2Rjd0VMIiqsWtuPN638mzpOlA7Bm2uRwy0Wf0YkFq
+nlTFjodxmOZji17sUdg9G6prtvW2GLquqVaHvhHvcyfp8QWH3NFNwidsBlBGlH5w
+3QxFi2Xf3XZGEmSete2SpOZ9PJ16f22vf9UsZf0KgEjmJ/twjXqaWcNYaqxkjoc4
+9opTeetG2CBfQrmAzubEypaTnpX5nByS77ti38ku7C8/fKvepdSjBq8TZ85tfXM6
+5TYOakOVtaG8iQs86r7jXfXdZBgxLDw3HvM1ed4UYVPNq250d61iAd1bvpK6Sajc
+3/lwv3ZcyKT/QO2zHuRwYBk82UcGmP8KQNAsdjmv/cyxV1an9NNpu7i7uBJAZdOW
+mTEBbnKgJpSVE+nae0HZL7lKyYn4mJvM4Tk8Ay3fDxf+U7ZIL5pLYFiHu167tVZJ
+ugA/jSqzKYhdhhFLmxnWT9cB3QiMwDLiotwEAXIT5Vj+mds6oH2oHsOVrvVXj0Wt
+PaZWRONG4uot+sudwBctQhgguIscHTGRa0uZCgZKzT/6L9a5Ag0EaQTIMAEQANhB
+JA/kkEJ8O71YPHYGheyZqHRsyvZ3vRjL6v1HRyey/cYa1Q0MBOqo70SpntMBM1M0
+57LMERwgjObuAe+gUqNnBzjax7BbkLqV6L630cnAYc0OlkZHFrFWEQRISr+Fhn3i
+avYMF2XjSQm7+7DoQ8fKp8rhLiEGygvQNwjR79l9e88IHtzqrH/wZezvixTEAJuT
+PbG/A0MZEFLUFpevjCv6NdoqESWS3GLjOIeZqzVvDoN4JN3Fbz+hh4P4aKh62uDY
+XOyM66cTTmrTEASG1j2D4OQ8wQmsgwe/JrYoCmGWedWbRlUomUUWNzcN3IvtFtXR
+URvbN7cAUodV7YeQBrv2qUtmN4SHP8Z3D46kQ2MreruF/CYt7P+tvKuEoQY1Wm/A
+12VFWtHB1uHDbAiyIamEFuW2ud97HfvkfsClB+4hjqj82zcfGVs/12VVzhwgyi+F
+XNij5M+z4BvjDRYJkLhQ7/OZ7YqZ8HlCSaIY9/OIa/fy8V2tVkjnBwufIpXhhgBv
+NooF9FhZa9YQjuNUH4flrI3WSbthpXUOOZK0bVwqAtbg1K/7KyEYYKCO+unmFHOo
+MaSf07Iml63MrJxRQgm5KDH6E8UlZPk7ksz7FuSKcurHNHCD4yQ9bu4J3zgjm3a0
+xjGnuheBoNWfoQ8Jf0ogphdIAIDsDisecuNf6AiTABEBAAGJAjYEGAEIACAWIQSl
+ARuJfm18luqNl5hLZav7/NV7VgUCaQTIMAIbDAAKCRBLZav7/NV7VgvxD/9ljI9o
+4NYSBh2qlkNcuqODip7n8sonif7R78q0shdHVPXs+ynDRSpysRqFPRC3Sw2gTf9J
+RQMomIzPMSEB9tizgn2yp07u9eg6WbzXYcRDJ1tCCLO7Oz8IlW/e+AWPTl1IlbsW
+9mSbKii6j9ONJ+PBH+GEy1ofDaCtAw2n/doi8bX8clsx3YWN9WBIZpR/OlpdrjrK
+uxjLMmh5ONyKfOFEeG6vLQERDn4g/kxPPVsHQ+uxZ+6WwFffz8Gr4X69mUDmEZ6w
+wDGncZqvbD2Er2qZbXi6zP1KqIGK7+GrKX112CLuQbD4WEGvOQRoiFZtCyqj8Vdj
+S/K93yHSEskdzitPjBw1jbRL2b+dOH6+K2hSmq3dPi1LyeHba3aV11D8eydyRlYS
+FgXAyQT7tmoIWtm8cytOsXriw+1UJZNHl8x2WW23ivWconugMk/rEepocASSXTlC
+f/HwmNmWzR7VCAnoq4GYsL3kmO4WprNoabwWtXQQWbhAqEdEpsT1OL6EtbcQT/+S
+MDtRnlk7QiAuVWIWpjnKDWX+oWvyBtJB+xMknWzXs01sAo51vGX2JJ+solQGH9E4
+AMi2rbgE2IVnBkaCtndxn4WhBrJ2JtYJUU5OHdGXwLUB7hOG+z/cL1Z9ZuR6ZY7C
+7hmSTsEeCwdIuun6IgTf994VX6WtMJeof9ckopgzBGkExe4WCSsGAQQB2kcPAQEH
+QCVoXd++PBklqgyh3Ukj6nF4GE3QM25vOXHBNgyPiWNttDVKb25hdGhhbiBTaW5v
+dmFzc2luLW5haWsgPGpzaW5vdmFzc2lubmFpa0BhcGFjaGUub3JnPoiZBBMWCgBB
+FiEE/QNvEJqwdYQsYQadoRk2F/k1npIFAmkExe4CGwMFCQWjmoAFCwkIBwICIgIG
+FQoJCAsCBBYCAwECHgcCF4AACgkQoRk2F/k1npJJzAD/bLS/RTxMbvpoqP2bi9dG
+dDTIPXb+DduvHsJ1hzezFKAA/R8MW7tj7uO/UkBwybT71AUrd+bTMm0nwQNFP6MK
+ifQJuDgEaQTF7hIKKwYBBAGXVQEFAQEHQJ+FoVKTA3dteV84NeYuA0wBVv4ABITp
+VkN51fVWSbxdAwEIB4h+BBgWCgAmFiEE/QNvEJqwdYQsYQadoRk2F/k1npIFAmkE
+xe4CGwwFCQWjmoAACgkQoRk2F/k1npKJqwEArlGZ0P/KwH8/tpUADZRhWUZ+UP+N
+cxkybBARo52TPegBANfyfdJhsZKRSCfWSqOWtvLKOdCOP9+V5k4cuSagldcP
+=S+4x
 -----END PGP PUBLIC KEY BLOCK-----
diff --git a/NOTICE b/NOTICE
index ff07edfe3..869a870f2 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,5 +1,5 @@
 Apache Unomi
-Copyright 2015-2023 The Apache Software Foundation
+Copyright 2015-2025 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).
diff --git a/NOTICE.template b/NOTICE.template
index 2ae4cfc5d..f76a54ff9 100644
--- a/NOTICE.template
+++ b/NOTICE.template
@@ -1,5 +1,5 @@
 Apache Unomi
-Copyright 2015-2023 The Apache Software Foundation
+Copyright 2015-2025 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).
diff --git a/build.sh b/build.sh
index bbc60f3b2..95ee78d51 100755
--- a/build.sh
+++ b/build.sh
@@ -149,7 +149,7 @@ print_section() {
 print_status() {
     local status=$1
     local message=$2
-    
+
     if [ "$HAS_COLORS" -eq 1 ]; then
         case $status in
             "success")
@@ -228,7 +228,7 @@ prompt_continue() {
     if [ -z "$prompt_text" ]; then
         prompt_text="Continue?"
     fi
-    
+
     read -p "$prompt_text (y/N) " -n 1 -r
     echo
     if [[ ! $REPLY =~ ^[Yy]$ ]]; then
@@ -339,61 +339,32 @@ EOF
 
     echo
     echo "Examples:"
-    if [ "$HAS_COLORS" -eq 1 ]; then
-        echo -e "  ${GRAY}# Build with integration tests using OpenSearch${NC}"
-        echo -e "  ${GRAY}$0 --integration-tests --use-opensearch${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Build skipping unit tests but running integration 
tests${NC}"
-        echo -e "  ${GRAY}$0 --skip-unit-tests --integration-tests${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Build in debug mode${NC}"
-        echo -e "  ${GRAY}$0 --debug --debug-port 5006 --debug-suspend${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Deploy to specific Karaf instance${NC}"
-        echo -e "  ${GRAY}$0 --deploy --karaf-home ~/apache-karaf${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Build without Karaf and auto-start OpenSearch${NC}"
-        echo -e "  ${GRAY}$0 --no-karaf --auto-start opensearch${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Run a single integration test${NC}"
-        echo -e "  ${GRAY}$0 --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Debug a single integration test${NC}"
-        echo -e "  ${GRAY}$0 --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT --it-debug 
--it-debug-suspend${NC}"
-        echo -e
-        echo -e "  ${GRAY}# Run without colored output${NC}"
-        echo -e "  ${GRAY}NO_COLOR=1 $0${NC}"
-        echo -e "  ${GRAY}# or ${NC}"
-        echo -e "  ${GRAY}export NO_COLOR=1${NC}"
-        echo -e "  ${GRAY}$0${NC}"
-    else
-        echo "  # Build with integration tests using OpenSearch"
-        echo "  $0 --integration-tests --use-opensearch"
-        echo
-        echo "  # Build skipping unit tests but running integration tests"
-        echo "  $0 --skip-unit-tests --integration-tests"
-        echo
-        echo "  # Build in debug mode"
-        echo "  $0 --debug --debug-port 5006 --debug-suspend"
-        echo
-        echo "  # Deploy to specific Karaf instance"
-        echo "  $0 --deploy --karaf-home ~/apache-karaf"
-        echo
-        echo "  # Build without Karaf and auto-start OpenSearch"
-        echo "  $0 --no-karaf --auto-start opensearch"
-        echo
-        echo "  # Run a single integration test"
-        echo "  $0 --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT"
-        echo
-        echo "  # Debug a single integration test"
-        echo "  $0 --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT --it-debug --it-debug-suspend"
-        echo
-        echo "  # Run without colored output"
-        echo "  NO_COLOR=1 $0"
-        echo "  # or"
-        echo "  export NO_COLOR=1"
-        echo "  $0"
-    fi
+    echo "  # Build with integration tests using OpenSearch"
+    echo "  $0 --integration-tests --use-opensearch"
+    echo
+    echo "  # Build skipping unit tests but running integration tests"
+    echo "  $0 --skip-unit-tests --integration-tests"
+    echo
+    echo "  # Build in debug mode"
+    echo "  $0 --debug --debug-port 5006 --debug-suspend"
+    echo
+    echo "  # Deploy to specific Karaf instance"
+    echo "  $0 --deploy --karaf-home ~/apache-karaf"
+    echo
+    echo "  # Build without Karaf and auto-start OpenSearch"
+    echo "  $0 --no-karaf --auto-start opensearch"
+    echo
+    echo "  # Run a single integration test"
+    echo "  $0 --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT"
+    echo
+    echo "  # Debug a single integration test"
+    echo "  $0 --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT --it-debug --it-debug-suspend"
+    echo
+    echo "  # Run without colored output"
+    echo "  NO_COLOR=1 $0"
+    echo "  # or"
+    echo "  export NO_COLOR=1"
+    echo "  $0"
     exit 1
 }
 
@@ -512,7 +483,7 @@ check_requirements() {
     print_status "info" "Checking required tools..."
     local required_tools=("mvn" "java" "tar" "gzip" "dot")
     local missing_tools=()
-    
+
     echo "Required tools:"
     for tool in "${required_tools[@]}"; do
         if command_exists "$tool"; then
@@ -616,7 +587,7 @@ check_requirements() {
 
     # 3. System Resources Check
     print_status "info" "Checking system resources..."
-    
+
     # Memory check
     if command_exists free; then
         available_memory=$(free -m | awk '/^Mem:/{print $2}')
@@ -660,7 +631,7 @@ check_requirements() {
 
     # 4. Configuration Check
     print_status "info" "Checking configuration..."
-    
+
     # Maven settings check
     if [ ! -f ~/.m2/settings.xml ]; then
         print_status "warning" "✗ Maven settings.xml not found"
@@ -712,7 +683,7 @@ check_requirements() {
 
     # 5. Option Validation
     print_status "info" "Validating options..."
-    
+
     if [ "$SKIP_TESTS" = true ] && [ "$RUN_INTEGRATION_TESTS" = true ]; then
         print_status "error" "Cannot use --skip-tests and --integration-tests 
together"
         has_errors=true
@@ -844,7 +815,7 @@ fi
 check_integration_test_env_vars() {
     local detected_vars=()
     local script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-    
+
     # Check for Elasticsearch environment variables
     if [ -n "${UNOMI_ELASTICSEARCH_CLUSTERNAME+x}" ] || \
        [ -n "${UNOMI_ELASTICSEARCH_USERNAME+x}" ] || \
@@ -853,7 +824,7 @@ check_integration_test_env_vars() {
        [ -n "${UNOMI_ELASTICSEARCH_SSL_TRUST_ALL_CERTIFICATES+x}" ]; then
         detected_vars+=("Elasticsearch")
     fi
-    
+
     # Check for OpenSearch environment variables
     if [ -n "${UNOMI_OPENSEARCH_CLUSTERNAME+x}" ] || \
        [ -n "${UNOMI_OPENSEARCH_ADDRESSES+x}" ] || \
@@ -863,7 +834,7 @@ check_integration_test_env_vars() {
        [ -n "${UNOMI_OPENSEARCH_SSL_TRUST_ALL_CERTIFICATES+x}" ]; then
         detected_vars+=("OpenSearch")
     fi
-    
+
     if [ ${#detected_vars[@]} -gt 0 ]; then
         print_status "error" "Environment variables for ${detected_vars[*]} 
are set and will interfere with integration tests"
         echo ""
@@ -914,13 +885,13 @@ if [ "$RUN_INTEGRATION_TESTS" = true ]; then
         echo "Running integration tests with ElasticSearch"
     fi
     MVN_OPTS="$MVN_OPTS -P integration-tests"
-    
+
     # Add single test option if specified
     if [ ! -z "$SINGLE_TEST" ]; then
         MVN_OPTS="$MVN_OPTS -Dit.test=$SINGLE_TEST"
         echo "Running single integration test: $SINGLE_TEST"
     fi
-    
+
     # Add integration test debug options if enabled
     if [ "$IT_DEBUG" = true ]; then
         DEBUG_OPTS="port=$IT_DEBUG_PORT"
@@ -962,7 +933,7 @@ else
         MVN_OPTS="$MVN_OPTS -P skip-unit-tests"
         echo "Skipping unit tests"
     fi
-    
+
     # Warn if single test was specified but integration tests are not enabled
     if [ ! -z "$SINGLE_TEST" ]; then
         print_status "warning" "Single test specified but integration tests 
are not enabled. Use --integration-tests to run the test."
diff --git a/generate-manual-config.sh b/generate-manual-config.sh
index c8709fcf3..e02e5ef5e 100644
--- a/generate-manual-config.sh
+++ b/generate-manual-config.sh
@@ -25,13 +25,13 @@
 # === VERSION CONFIGURATION ===
 # Latest version (master branch)
 LATEST_BRANCH="master"
-LATEST_VERSION="3.1.0-SNAPSHOT"  
+LATEST_VERSION="3.1.0-SNAPSHOT"
 LATEST_DIR="latest"
 
-# Stable version (release branch)  
-STABLE_BRANCH="unomi-2.7.x"
-STABLE_VERSION="2.7.0"
-STABLE_DIR="2_7_x"
+# Stable version (release branch)
+STABLE_BRANCH="unomi-3.0.x"
+STABLE_VERSION="3.0.0"
+STABLE_DIR="3_0_x"
 
 # === INFRASTRUCTURE CONFIGURATION ===
 # Git repository URL
@@ -82,7 +82,7 @@ PROJECT_STAGING_DIR="target/staging"
 # === TIMEOUTS ===
 # Command timeouts in seconds
 MAVEN_TIMEOUT=1800  # 30 minutes
-GIT_TIMEOUT=300     # 5 minutes  
+GIT_TIMEOUT=300     # 5 minutes
 SVN_TIMEOUT=600     # 10 minutes
 
 # Export all configuration variables
@@ -95,4 +95,4 @@ export MAVEN_SIGN_PROFILE MAVEN_INTEGRATION_PROFILE
 export MAVEN_CLEAN_GOAL MAVEN_INSTALL_GOAL MAVEN_JAVADOC_GOAL
 export CLONE_MANUAL_SOURCE_DIR CLONE_MANUAL_TARGET_DIR CLONE_API_TARGET_DIR 
CLONE_STAGING_DIR
 export PROJECT_STAGING_DIR
-export MAVEN_TIMEOUT GIT_TIMEOUT SVN_TIMEOUT
\ No newline at end of file
+export MAVEN_TIMEOUT GIT_TIMEOUT SVN_TIMEOUT
diff --git a/itests/src/test/java/org/apache/unomi/itests/AllITs.java 
b/itests/src/test/java/org/apache/unomi/itests/AllITs.java
index b08561cca..e9ec8ab60 100644
--- a/itests/src/test/java/org/apache/unomi/itests/AllITs.java
+++ b/itests/src/test/java/org/apache/unomi/itests/AllITs.java
@@ -31,7 +31,7 @@ import org.junit.runners.Suite.SuiteClasses;
 @RunWith(ProgressSuite.class)
 @SuiteClasses({
         Migrate16xToCurrentVersionIT.class,
-        MigrationIT.class,  
+        MigrationIT.class,
         BasicIT.class,
         ConditionEvaluatorIT.class,
         ConditionQueryBuilderIT.class,
diff --git a/itests/src/test/java/org/apache/unomi/itests/ProgressListener.java 
b/itests/src/test/java/org/apache/unomi/itests/ProgressListener.java
index ca764ba06..3e3b58d11 100644
--- a/itests/src/test/java/org/apache/unomi/itests/ProgressListener.java
+++ b/itests/src/test/java/org/apache/unomi/itests/ProgressListener.java
@@ -24,42 +24,115 @@ import org.junit.runner.notification.RunListener;
 import java.util.PriorityQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 
+/**
+ * A comprehensive JUnit test run listener that provides enhanced progress 
reporting
+ * with visual elements, timing information, and motivational quotes during 
test execution.
+ *
+ * <p>This listener extends JUnit's {@link RunListener} to provide real-time 
feedback
+ * about test execution progress. It features:</p>
+ * <ul>
+ *   <li>ASCII art logo display at test suite startup</li>
+ *   <li>Real-time progress bar with percentage completion</li>
+ *   <li>Colorized output (when ANSI is supported)</li>
+ *   <li>Estimated time remaining calculations</li>
+ *   <li>Test success/failure counters</li>
+ *   <li>Top 10 slowest tests tracking and reporting</li>
+ *   <li>Motivational quotes displayed at progress milestones</li>
+ *   <li>CSV-formatted performance data output</li>
+ * </ul>
+ *
+ * <p>The listener automatically detects ANSI color support based on the 
terminal
+ * environment and adjusts output accordingly. When ANSI is not supported,
+ * plain text output is used instead.</p>
+ *
+ * <p>Example usage in test configuration:</p>
+ * <pre>{@code
+ * JUnitCore core = new JUnitCore();
+ * ProgressListener listener = new ProgressListener(totalTestCount, 
completedCounter);
+ * core.addListener(listener);
+ * core.run(testClasses);
+ * }</pre>
+ *
+ * <p>The listener tracks test execution times and maintains a priority queue
+ * of the slowest tests, which is reported at the end of the test run along
+ * with CSV-formatted data for further analysis.</p>
+ *
+ * @author Apache Unomi
+ * @since 3.0.0
+ * @see org.junit.runner.notification.RunListener
+ * @see org.junit.runner.Description
+ * @see org.junit.runner.Result
+ */
 public class ProgressListener extends RunListener {
 
+    /** ANSI escape code to reset text formatting */
     private static final String RESET = "\u001B[0m";
+    /** ANSI escape code for green text color */
     private static final String GREEN = "\u001B[32m";
+    /** ANSI escape code for yellow text color */
     private static final String YELLOW = "\u001B[33m";
+    /** ANSI escape code for red text color */
     private static final String RED = "\u001B[31m";
+    /** ANSI escape code for cyan text color */
     private static final String CYAN = "\u001B[36m";
+    /** ANSI escape code for blue text color */
     private static final String BLUE = "\u001B[34m";
 
+    /** Array of motivational quotes displayed at progress milestones */
     private static final String[] QUOTES = {
             "Success is not final, failure is not fatal: It is the courage to 
continue that counts. - Winston Churchill",
             "Believe you can and you're halfway there. - Theodore Roosevelt",
-            "Don’t watch the clock; do what it does. Keep going. - Sam 
Levenson",
+            "Don't watch the clock; do what it does. Keep going. - Sam 
Levenson",
             "It does not matter how slowly you go as long as you do not stop. 
- Confucius",
             "Hardships often prepare ordinary people for an extraordinary 
destiny. - C.S. Lewis"
     };
 
+    /**
+     * Inner class representing a test execution time record.
+     * Used to track individual test performance for reporting the slowest 
tests.
+     */
     private static class TestTime {
+        /** The display name of the test */
         String name;
+        /** The execution time in milliseconds */
         long time;
 
+        /**
+         * Creates a new test time record.
+         *
+         * @param name the display name of the test
+         * @param time the execution time in milliseconds
+         */
         TestTime(String name, long time) {
             this.name = name;
             this.time = time;
         }
     }
 
+    /** Total number of tests to be executed */
     private final int totalTests;
+    /** Thread-safe counter for completed tests */
     private final AtomicInteger completedTests;
+    /** Thread-safe counter for successful tests */
     private final AtomicInteger successfulTests = new AtomicInteger(0);
+    /** Thread-safe counter for failed tests */
     private final AtomicInteger failedTests = new AtomicInteger(0);
+    /** Priority queue to track the slowest tests (limited to top 10) */
     private final PriorityQueue<TestTime> slowTests;
+    /** Flag indicating whether ANSI color codes are supported in the terminal 
*/
     private final boolean ansiSupported;
+    /** Timestamp when the test suite started */
     private long startTime = System.currentTimeMillis();
+    /** Timestamp when the current individual test started */
     private long startTestTime = System.currentTimeMillis();
 
+    /**
+     * Creates a new ProgressListener instance.
+     *
+     * @param totalTests the total number of tests that will be executed
+     * @param completedTests a thread-safe counter that tracks the number of 
completed tests
+     *                       (this should be shared with the test runner for 
accurate progress tracking)
+     */
     public ProgressListener(int totalTests, AtomicInteger completedTests) {
         this.totalTests = totalTests;
         this.completedTests = completedTests;
@@ -67,11 +140,23 @@ public class ProgressListener extends RunListener {
         this.ansiSupported = isAnsiSupported();
     }
 
+    /**
+     * Determines if the current terminal supports ANSI color codes.
+     *
+     * @return true if ANSI colors are supported, false otherwise
+     */
     private boolean isAnsiSupported() {
         String term = System.getenv("TERM");
         return System.console() != null && term != null && 
term.contains("xterm");
     }
 
+    /**
+     * Applies ANSI color codes to text if the terminal supports them.
+     *
+     * @param text the text to colorize
+     * @param color the ANSI color code to apply
+     * @return the colorized text if ANSI is supported, otherwise the original 
text
+     */
     private String colorize(String text, String color) {
         if (ansiSupported) {
             return color + text + RESET;
@@ -79,6 +164,11 @@ public class ProgressListener extends RunListener {
         return text;
     }
 
+    /**
+     * Called when the test run starts. Displays an ASCII art logo and welcome 
message.
+     *
+     * @param description the description of the test run
+     */
     @Override
     public void testRunStarted(Description description) {
         startTime = System.currentTimeMillis();
@@ -121,11 +211,21 @@ public class ProgressListener extends RunListener {
         System.out.println(colorize(bottomBorder, CYAN));
     }
 
+    /**
+     * Called when an individual test starts. Records the start time for 
timing calculations.
+     *
+     * @param description the description of the test that started
+     */
     @Override
     public void testStarted(Description description) {
         startTestTime = System.currentTimeMillis();
     }
 
+    /**
+     * Called when an individual test finishes successfully. Updates counters 
and displays progress.
+     *
+     * @param description the description of the test that finished
+     */
     @Override
     public void testFinished(Description description) {
         long testDuration = System.currentTimeMillis() - startTestTime;
@@ -139,6 +239,11 @@ public class ProgressListener extends RunListener {
         displayProgress();
     }
 
+    /**
+     * Called when a test fails. Updates failure counters and displays the 
failure message.
+     *
+     * @param failure the failure information
+     */
     @Override
     public void testFailure(Failure failure) {
         successfulTests.decrementAndGet(); // Remove the previous success 
count for this test.
@@ -147,6 +252,11 @@ public class ProgressListener extends RunListener {
         displayProgress();
     }
 
+    /**
+     * Called when the entire test run finishes. Displays final statistics and 
performance data.
+     *
+     * @param result the final result of the test run
+     */
     @Override
     public void testRunFinished(Result result) {
         long elapsedTime = System.currentTimeMillis() - startTime;
@@ -190,6 +300,9 @@ public class ProgressListener extends RunListener {
 
     /**
      * Escapes special characters for CSV compatibility.
+     *
+     * @param value the string value to escape
+     * @return the escaped string suitable for CSV output
      */
     private String escapeCsv(String value) {
         if (value.contains(",") || value.contains("\"") || 
value.contains("\n")) {
@@ -198,6 +311,11 @@ public class ProgressListener extends RunListener {
         return value;
     }
 
+    /**
+     * Displays the current progress of the test run including progress bar,
+     * percentage completion, estimated time remaining, and success/failure 
counts.
+     * Also displays motivational quotes at progress milestones.
+     */
     private void displayProgress() {
         int completed = completedTests.get();
         long elapsedTime = System.currentTimeMillis() - startTime;
@@ -236,6 +354,12 @@ public class ProgressListener extends RunListener {
         }
     }
 
+    /**
+     * Formats a time duration in milliseconds into a human-readable string.
+     *
+     * @param timeInMillis the time duration in milliseconds
+     * @return a formatted time string (e.g., "1h 23m 45s" or "2m 30s")
+     */
     private String formatTime(long timeInMillis) {
         long seconds = timeInMillis / 1000;
         long hours = seconds / 3600;
@@ -259,6 +383,12 @@ public class ProgressListener extends RunListener {
         return timeBuilder.toString().trim(); // Trim any trailing spaces
     }
 
+    /**
+     * Generates a visual progress bar based on the completion percentage.
+     *
+     * @param progressPercentage the completion percentage (0.0 to 100.0)
+     * @return a string representation of the progress bar with appropriate 
colors
+     */
     private String generateProgressBar(double progressPercentage) {
         int totalBars = 30;
         int completedBars = (int) (progressPercentage / (100.0 / totalBars));
diff --git a/itests/src/test/java/org/apache/unomi/itests/ProgressSuite.java 
b/itests/src/test/java/org/apache/unomi/itests/ProgressSuite.java
index 219f2d73d..02d8da8e0 100644
--- a/itests/src/test/java/org/apache/unomi/itests/ProgressSuite.java
+++ b/itests/src/test/java/org/apache/unomi/itests/ProgressSuite.java
@@ -25,16 +25,85 @@ import org.junit.runners.model.InitializationError;
 import java.lang.reflect.Method;
 import java.util.concurrent.atomic.AtomicInteger;
 
+/**
+ * A custom JUnit test suite runner that provides enhanced progress reporting
+ * during test execution by integrating with the {@link ProgressListener}.
+ *
+ * <p>This suite extends JUnit's standard {@link Suite} runner to automatically
+ * count test methods across the entire class hierarchy and provide real-time
+ * progress feedback. It features:</p>
+ * <ul>
+ *   <li>Automatic test method counting across class hierarchies</li>
+ *   <li>Integration with {@link ProgressListener} for enhanced progress 
reporting</li>
+ *   <li>Thread-safe progress tracking using atomic counters</li>
+ *   <li>Support for nested test classes and inheritance</li>
+ * </ul>
+ *
+ * <p>The suite automatically counts all methods annotated with {@code @Test}
+ * in the specified test classes and their superclasses, providing an accurate
+ * total count for progress reporting.</p>
+ *
+ * <p>Example usage:</p>
+ * <pre>{@code
+ * @RunWith(ProgressSuite.class)
+ * @Suite.SuiteClasses({
+ *     TestClass1.class,
+ *     TestClass2.class,
+ *     TestClass3.class
+ * })
+ * public class AllTestsSuite {
+ *     // This class serves as a container for the test suite
+ * }
+ * }</pre>
+ *
+ * <p>The suite will automatically:</p>
+ * <ul>
+ *   <li>Count all test methods in the specified classes and their 
hierarchies</li>
+ *   <li>Create a {@link ProgressListener} with the accurate test count</li>
+ *   <li>Display real-time progress with visual elements and timing 
information</li>
+ *   <li>Provide detailed performance statistics at completion</li>
+ * </ul>
+ *
+ * @author Apache Unomi
+ * @since 3.0.0
+ * @see org.junit.runners.Suite
+ * @see org.apache.unomi.itests.ProgressListener
+ * @see org.junit.runner.RunWith
+ * @see org.junit.runners.Suite.SuiteClasses
+ */
 public class ProgressSuite extends Suite {
 
+    /** Total number of test methods across all classes in the suite */
     private final int totalTests;
+    /** Thread-safe counter for completed tests, shared with ProgressListener 
*/
     private final AtomicInteger completedTests = new AtomicInteger(0);
 
+    /**
+     * Creates a new ProgressSuite instance for the specified test suite class.
+     *
+     * <p>The constructor initializes the suite by:</p>
+     * <ul>
+     *   <li>Extracting test classes from the {@code @Suite.SuiteClasses} 
annotation</li>
+     *   <li>Counting all test methods across the class hierarchies</li>
+     *   <li>Initializing the progress tracking infrastructure</li>
+     * </ul>
+     *
+     * @param klass the test suite class that must be annotated with {@code 
@Suite.SuiteClasses}
+     * @throws InitializationError if the class is not properly annotated or 
if there are
+     *                             issues with the test class configuration
+     */
     public ProgressSuite(Class<?> klass) throws InitializationError {
         super(klass, getAnnotatedClasses(klass));
         this.totalTests = countTestMethods(getAnnotatedClasses(klass));
     }
 
+    /**
+     * Extracts the test classes from the {@code @Suite.SuiteClasses} 
annotation.
+     *
+     * @param klass the test suite class to examine
+     * @return an array of test classes specified in the annotation
+     * @throws InitializationError if the class is not annotated with {@code 
@Suite.SuiteClasses}
+     */
     private static Class<?>[] getAnnotatedClasses(Class<?> klass) throws 
InitializationError {
         Suite.SuiteClasses annotation = 
klass.getAnnotation(Suite.SuiteClasses.class);
         if (annotation == null) {
@@ -44,6 +113,12 @@ public class ProgressSuite extends Suite {
         return annotation.value();
     }
 
+    /**
+     * Counts the total number of test methods across all specified test 
classes.
+     *
+     * @param testClasses array of test classes to count methods in
+     * @return the total number of methods annotated with {@code @Test}
+     */
     private static int countTestMethods(Class<?>[] testClasses) {
         int count = 0;
         for (Class<?> testClass : testClasses) {
@@ -52,6 +127,16 @@ public class ProgressSuite extends Suite {
         return count;
     }
 
+    /**
+     * Recursively counts test methods in a class and its entire inheritance 
hierarchy.
+     *
+     * <p>This method traverses the class hierarchy upward from the given 
class,
+     * counting all methods annotated with {@code @Test} in each class. It 
stops
+     * at {@code Object.class} to avoid counting system methods.</p>
+     *
+     * @param clazz the class to count test methods in (including superclasses)
+     * @return the number of test methods found in this class and its hierarchy
+     */
     private static int countTestMethodsInClassHierarchy(Class<?> clazz) {
         int count = 0;
         if (clazz == null || clazz == Object.class) {
@@ -67,6 +152,26 @@ public class ProgressSuite extends Suite {
         return count;
     }
 
+    /**
+     * Executes the test suite with enhanced progress reporting.
+     *
+     * <p>This method overrides the standard suite execution to integrate
+     * the {@link ProgressListener} for real-time progress feedback. It:</p>
+     * <ul>
+     *   <li>Creates a {@link ProgressListener} with the accurate test 
count</li>
+     *   <li>Manually triggers the test run started event (since the listener
+     *       is registered after this event would normally be fired)</li>
+     *   <li>Registers the listener with the run notifier</li>
+     *   <li>Delegates to the parent suite execution</li>
+     * </ul>
+     *
+     * <p>Note: Two separate {@link ProgressListener} instances are created:
+     * one for manual event triggering and another for the notifier. This is
+     * necessary because the test run started event is fired before listeners
+     * can be registered.</p>
+     *
+     * @param notifier the run notifier to use for test execution notifications
+     */
     @Override
     public void run(RunNotifier notifier) {
         ProgressListener listener = new ProgressListener(totalTests, 
completedTests);
diff --git a/itests/src/test/java/org/apache/unomi/itests/SecurityIT.java 
b/itests/src/test/java/org/apache/unomi/itests/SecurityIT.java
index 734619be2..5e6b51847 100644
--- a/itests/src/test/java/org/apache/unomi/itests/SecurityIT.java
+++ b/itests/src/test/java/org/apache/unomi/itests/SecurityIT.java
@@ -55,52 +55,6 @@ public class SecurityIT extends BaseIT {
         objectMapper = CustomObjectMapper.getObjectMapper();
     }
 
-    @Test
-    public void testOGNLInjection() throws Exception {
-        ContextRequest contextRequest = new ContextRequest();
-        List<PersonalizationService.PersonalizationRequest> personalizations = 
new ArrayList<>();
-        PersonalizationService.PersonalizationRequest personalizationRequest = 
new PersonalizationService.PersonalizationRequest();
-        personalizationRequest.setId("vuln-test");
-        personalizationRequest.setStrategy("matching-first");
-        Map<String, Object> strategyOptions = new HashMap<>();
-        strategyOptions.put("fallback", "var2");
-        personalizationRequest.setStrategyOptions(strategyOptions);
-        List<PersonalizationService.PersonalizedContent> 
personalizationContents = new ArrayList<>();
-        PersonalizationService.PersonalizedContent var1Content = new 
PersonalizationService.PersonalizedContent();
-        var1Content.setId("var1");
-        List<PersonalizationService.Filter> filters = new ArrayList<>();
-        PersonalizationService.Filter filter = new 
PersonalizationService.Filter();
-        Condition condition = new Condition();
-        File vulnFile = new File("target/vuln-file.txt");
-        if (vulnFile.exists()) {
-            vulnFile.delete();
-        }
-        condition.setConditionTypeId("profilePropertyCondition");
-        condition.setParameter("propertyName", 
"@java.lang.Runtime@getRuntime().exec('touch " + vulnFile.getCanonicalPath() + 
"')");
-        condition.setParameter("comparisonOperator", "equals");
-        condition.setParameter("propertyValue",
-                "script::java.io.PrintWriter writer = new 
java.io.PrintWriter(new java.io.BufferedWriter(new java.io.FileWriter(\""
-                        + vulnFile.getCanonicalPath() + "\", 
true)));\nwriter.println(\"test\");\nwriter.close();");
-        filter.setCondition(condition);
-        filters.add(filter);
-        var1Content.setFilters(filters);
-        personalizationContents.add(var1Content);
-        PersonalizationService.PersonalizedContent var2Content = new 
PersonalizationService.PersonalizedContent();
-        var2Content.setId("var2");
-        personalizationContents.add(var2Content);
-        personalizationRequest.setContents(personalizationContents);
-
-        personalizations.add(personalizationRequest);
-        contextRequest.setPersonalizations(personalizations);
-
-        contextRequest.setSessionId(SESSION_ID);
-        HttpPost request = new HttpPost(getFullUrl("/cxs/context.json"));
-        request.setEntity(new 
StringEntity(objectMapper.writeValueAsString(contextRequest), 
ContentType.create("application/json")));
-
-        TestUtils.RequestResponse response = 
executeContextJSONRequest(request, SESSION_ID);
-        assertFalse("Vulnerability successfully executed ! File created at " + 
vulnFile.getCanonicalPath(), vulnFile.exists());
-    }
-
     @Test
     public void testSystemOperationsAndContext() throws Exception {
         SecurityService securityService = 
getOsgiService(SecurityService.class);
diff --git a/manual/src/main/asciidoc/building-and-deploying.adoc 
b/manual/src/main/asciidoc/building-and-deploying.adoc
index 57337fd44..a03249623 100644
--- a/manual/src/main/asciidoc/building-and-deploying.adoc
+++ b/manual/src/main/asciidoc/building-and-deploying.adoc
@@ -14,6 +14,11 @@
 
 === Building
 
+Apache Unomi 3.x provides a convenient `build.sh` script that simplifies the 
build process with
+enhanced error handling, preflight validation, and deployment options. See the
+<<Using the build.sh script>> section below for details. Alternatively, you 
can build using Maven
+directly as described in this section.
+
 ==== Initial Setup
 
 . Install J2SE 17 SDK (or later), which can be downloaded from
@@ -23,7 +28,7 @@
  JDK location, and that your PATH includes %JAVA_HOME%\bin (windows) or
  $JAVA_HOME$/bin (unix).
 
-. Install Maven 3.9.6 (or later), which can be downloaded from
+. Install Maven 3.9.8 (or later), which can be downloaded from
  http://maven.apache.org/download.html[http://maven.apache.org/download.html]. 
Make sure that your PATH includes
  the MVN_HOME/bin directory.
 
@@ -47,104 +52,211 @@ This will compile Apache Unomi and run all of the tests 
in the
 ----
      $> mvn -P \!integration-tests clean install
 ----
++
+This will compile Apache Unomi without running the tests and takes less
+ time to build.
++
+TIP: On a non-English Windows env, the Asciidoctor Maven Plugin may fail to
+     generate manuals due to an encoding conversion issue.
+     To solve this issue, we recommend setting the *file.encoding* system 
property
+     to _UTF-8_ like the following examples before issuing the commands shown 
above.
++
+[source]
+----
+     > set MAVEN_OPTS=-Dfile.encoding=UTF-8
+     or
+     > set MAVEN_OPTS=-Dfile.encoding=UTF-8 -Xmx2048m
+     ...
+----
++
+. The distributions will be available under "package/target" directory.
+
+==== Using the build.sh script
 
-===== Windows (PowerShell)
+Apache Unomi 3.x provides a unified `build.sh` script that simplifies the 
build, deployment, and execution process.
+This script replaces multiple legacy build scripts and provides enhanced error 
handling, colorized output, and
+comprehensive environment validation.
 
-For Windows users, Apache Unomi provides a PowerShell build script that offers 
enhanced functionality and a more native Windows experience.
+The `build.sh` script offers the following features:
 
-1. Get the code: `git clone https://github.com/apache/unomi.git`
-2. Open PowerShell and change to the Unomi directory
-3. Run the build script:
+* **Preflight validation**: Automatically checks for required tools (Java, 
Maven, GraphViz), system resources
+  (memory, disk space), Maven settings, and port availability
+* **Flexible build options**: Support for skipping tests, running integration 
tests, using OpenSearch, debug modes,
+  and more
+* **Deployment helpers**: Automatically copies KAR packages to Karaf deploy 
directory and purges caches
+* **Enhanced output**: Colorized, structured output with progress indicators 
(respects `NO_COLOR` environment variable)
+
+To use the script:
+
+. Get the code: `git clone https://github.com/apache/unomi.git`
+. Change to the top level directory of Apache Unomi source distribution.
+. Make the script executable (if needed): `chmod +x build.sh`
+. Run the script with your desired options:
 +
-[source,powershell]
+[source]
 ----
-.\build.ps1
+./build.sh --help
 ----
++
+This will display all available options and usage examples.
 
-The script supports various options:
+Common usage examples:
 
-[cols="1,2"]
-|===
-|Option |Description
+* Build with integration tests using OpenSearch:
++
+[source]
+----
+./build.sh --integration-tests --use-opensearch
+----
++
+* Build in debug mode:
++
+[source]
+----
+./build.sh --debug --debug-port 5006 --debug-suspend
+----
++
+* Deploy to specific Karaf instance:
++
+[source]
+----
+./build.sh --deploy --karaf-home ~/apache-karaf
+----
++
+* Build without Karaf and auto-start OpenSearch:
++
+[source]
+----
+./build.sh --no-karaf --auto-start opensearch
+----
++
+* Run a single integration test:
++
+[source]
+----
+./build.sh --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT
+----
++
+* Debug a single integration test:
++
+[source]
+----
+./build.sh --integration-tests --single-test 
org.apache.unomi.itests.graphql.GraphQLEventIT --it-debug --it-debug-suspend
+----
++
+* Run without colored output:
++
+[source]
+----
+NO_COLOR=1 ./build.sh
+----
++
+or
++
+[source]
+----
+export NO_COLOR=1
+./build.sh
+----
++
+For a complete list of options and examples, run `./build.sh --help`.
 
-|-Help
-|Show help message and available options
+==== Updating the website
 
-|-SkipTests
-|Skip all tests during build
+Use the top-level `generate-manual.sh` script to generate and publish the 
documentation website.
 
-|-IntegrationTests
-|Run integration tests
+[source]
+----
+./generate-manual.sh publish <svn_user> <svn_pass>
+./generate-manual.sh simulate <svn_user> <svn_pass>
+----
 
-|-Deploy
-|Deploy after build
+Modes:
 
-|-Debug
-|Run Karaf in debug mode
+* `publish`: generates all documentation and publishes to Apache SVN
+** Generates exactly 2 versions (latest + stable)
+** Publishes HTML manual to `$SVN_WEBSITE_BASE/manual` and API docs from master
+** Uploads release packages (PDF/ZIP) to Apache Dist SVN for non-master 
branches
+** Removes old versions automatically on the website
 
-|-DebugPort <port>
-|Set debug port (default: 5005)
+* `simulate`: dry-run; prints the commands without making changes
 
-|-DebugSuspend
-|Suspend JVM until debugger connects
+Requirements:
 
-|-KarafHome <path>
-|Set Karaf home directory for deployment
+* Java 17+, Maven 3.6+, Git, SVN client, `bc`
+* Access to the `master` and the stable branch configured in 
`generate-manual-config.sh`
 
-|-UseOpenSearch
-|Use OpenSearch instead of ElasticSearch
+Outputs and locations:
 
-|-NoKaraf
-|Build without starting Karaf
+* Staging directories under `target/generated-docs/` (created by the build)
+* Website content committed to `$SVN_WEBSITE_BASE/manual/<version>`
+* Release artifacts (PDF/ZIP + signatures/checksums) committed to Apache Dist 
SVN at `$SVN_DIST_BASE/<version>`
 
-|-AutoStart <engine>
-|Auto-start with specified engine (elasticsearch/opensearch)
+Notes:
 
-|-ResolverDebug
-|Enable Karaf Resolver debug logging for integration tests
-|===
+* The script sources optional `generate-manual-config.sh` and `shell-utils.sh` 
for configuration/utilities.
+* Javadoc aggregation is attempted; if it fails (toolchain mismatch), the rest 
still publishes.
 
-Examples:
+==== JGitFlow
 
-[source,powershell]
-----
-# Build with integration tests using OpenSearch
-.\build.ps1 -IntegrationTests -UseOpenSearch
+You can use the https://jgitflow.bitbucket.org[JGitFlow Maven plugin] to work 
with feature, hotfix and other types of branches.
 
-# Build in debug mode
-.\build.ps1 -Debug -DebugPort 5006 -DebugSuspend
+For example, to start a feature branch, simply use:
 
-# Deploy to specific Karaf instance
-.\build.ps1 -Deploy -KarafHome ~\apache-karaf
+[source]
+----
+mvn jgitflow:feature-start
 ----
 
-Features of the PowerShell script:
-
-* Automatic requirement checking for Java, Maven, and GraphViz
-* Detailed installation instructions if requirements are missing
-* Colorized output (when supported by the terminal)
-* Progress indicators for long-running operations
-* Enhanced error handling and reporting
-* Native Windows path handling
-* System resource validation (memory, disk space)
+This will prompt you for the feature name, and then create a feature branch 
and update all the POMs to have a version that contains the feature name. This 
makes it easier to integrate with continuous integration systems to generate 
builds for the feature branch.
 
-TIP: The script will automatically detect if your terminal supports ANSI 
colors and Unicode symbols, falling back to plain text if needed.
+Once the feature is completed you can use:
 
-+
-TIP: On a non-English Windows env, the Asciidoctor Maven Plugin may fail to
-     generate manuals due to an encoding conversion issue.
-     To solve this issue, we recommend setting the *file.encoding* system 
property
-     to _UTF-8_ like the following examples before issuing the commands shown 
above.
-+
 [source]
 ----
-     > set MAVEN_OPTS=-Dfile.encoding=UTF-8
-     or
-     > set MAVEN_OPTS=-Dfile.encoding=UTF-8 -Xmx2048m
-     ...
+mvn jgitflow:feature-finish
 ----
-+
-. The distributions will be available under "package/target" directory.
+
+To merge the branch into master.
+
+==== Maven Build Cache
+
+Apache Unomi uses the Maven Build Cache extension to significantly improve 
build efficiency by caching compiled artifacts and avoiding unnecessary 
recompilation (build time without/with cached artifacts ~2mn/~3s).
+
+The build cache is enabled by default and configured in the 
`.mvn/maven-build-cache-config.xml` file. The cache configuration includes:
+
+* Local caching with up to 3 builds cached
+* SHA-256 hash algorithm for cache keys
+* Includes all source directories (`src/`)
+* Excludes `pom.xml` files and generated sources (`src/main/javagen/**`)
+
+Command line control:
+
+* Disable cache: `mvn -Dmaven.build.cache.enabled=false clean install`
+  ** Completely turns off the build cache functionality
+  ** Maven will not store or retrieve any build outputs from the cache
+  ** Performs a full build as if the cache were not present
+  ** Use this when you want to ensure no cache influence on the build
+
+* Skip cache (force rebuild): `mvn -Dmaven.build.cache.skipCache=true clean 
install`
+  ** Skips looking up artifacts in caches but still writes new results to cache
+  ** Forces Maven to rebuild everything without using cached artifacts
+  ** New build results will be stored in cache for future builds
+  ** Use this to force a complete rebuild while keeping cache functionality 
active
+
+* Enable cache (default): `mvn -Dmaven.build.cache.enabled=true clean install`
+  ** Enables full cache functionality (read and write)
+  ** Maven will use cached artifacts when available and store new results
+  ** This is the default behavior when the cache is enabled
+
+Purging the build cache:
+
+To completely reset the build cache and force a full rebuild, manually delete 
the cache directory: `rm -rf ~/.m2/build-cache` (Unix/Mac) or `rmdir /s 
%USERPROFILE%\.m2\build-cache` (Windows)
+
+For more information, see the official documentation:
+
+* https://maven.apache.org/extensions/maven-build-cache-extension/[Maven Build 
Cache Extension Overview]
+* 
https://maven.apache.org/extensions/maven-build-cache-extension/parameters.html[Build
 Cache Parameters Reference]
 
 ==== Installing a Search Engine
 
diff --git a/manual/src/main/asciidoc/index.adoc 
b/manual/src/main/asciidoc/index.adoc
index 99089917a..375255aab 100644
--- a/manual/src/main/asciidoc/index.adoc
+++ b/manual/src/main/asciidoc/index.adoc
@@ -144,3 +144,5 @@ include::shell-commands.adoc[]
 include::writing-plugins.adoc[]
 
 include::patches.adoc[]
+
+include::migrate-es7-to-es9.adoc[]
diff --git a/manual/src/main/asciidoc/migrate-es7-to-es9.adoc 
b/manual/src/main/asciidoc/migrate-es7-to-es9.adoc
new file mode 100644
index 000000000..d7a9c40ef
--- /dev/null
+++ b/manual/src/main/asciidoc/migrate-es7-to-es9.adoc
@@ -0,0 +1,131 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+=== Migrate from Elasticsearch 7 to Elasticsearch 9
+
+You can use the *remote reindex* API to upgrade directly from Elasticsearch 7 
to Elasticsearch 9. This approach runs both clusters in parallel and uses 
Elasticsearch's remote reindex feature.
+
+To execute the migration, you should have one Elasticsearch 7 running (your 
source) and one Elasticsearch 9 running (your target).
+
+This upgrade relies on a script. If you are sharing the Elasticsearch instance 
with other projects, it might need to be adjusted.
+
+The script migration_es7-es9.sh at the root of the project and handles:
+* Regular indices and rollover indices with their aliases
+* ILM policies migration
+* Data reindexing from ES7 to ES9
+* Validation and comparison reporting
+
+==== Prerequisites
+
+* `bash` shell
+* `jq` command-line JSON processor
+* `curl` for HTTP requests
+* Access to both ES7 (source) and ES9 (destination) clusters
+* *ES9 must have `reindex.remote.whitelist` configured* (see configuration 
below)
+* Ensure the machine where ES9 is running have access to the ES7 environment
+
+Install `jq` if not already installed:
+
+[source,bash]
+----
+# macOS
+brew install jq
+
+# Linux
+apt-get install jq
+# or
+yum install jq
+----
+
+==== Elasticsearch 9 Remote Reindex Configuration
+
+Before running the script, you must configure the remote reindex whitelist on 
your ES9 cluster. Add this to your `elasticsearch.yml` configuration file:
+
+[source,yaml]
+----
+reindex.remote.whitelist: "your-es7-host:9200"
+----
+
+==== Script Configuration
+
+The script uses environment variables for configuration. Export variables 
before running the script:
+
+[source,bash]
+----
+export ES7_HOST="http://your-es7-host:9200";
+export ES7_USER="elastic"
+export ES7_HOST_FROM_ES9="http://your-es7-host-viewed-from-es9:9200";
+export ES7_PASSWORD="your-es7-password"
+
+export ES9_HOST="http://your-es9-host:9200";
+export ES9_USER="elastic"
+export ES9_PASSWORD="your-es9-password"
+
+export INDEX_PREFIX="context-"
+export BATCH_SIZE="1000"
+----
+
+==== Configuration Variables
+
+[cols="1,3,1", options="header"]
+|===
+| Variable | Description | Default
+
+| ES7_HOST | Elasticsearch 7 URL | http://localhost:9200
+| ES7_HOST_FROM_ES9 | Elasticsearch 7 URL visible from Elasticsearch 9 | 
(value of ES7_HOST)
+| ES7_USER | ES7 username | elastic
+| ES7_PASSWORD | ES7 password | password
+| ES9_HOST | Elasticsearch 9 URL | http://localhost:9201
+| ES9_USER | ES9 username | elastic
+| ES9_PASSWORD | ES9 password | password
+| INDEX_PREFIX | Prefix for index names | context-
+| BATCH_SIZE | Reindex batch size | 1000
+|===
+
+== Execution
+
+Make the script executable and run it:
+
+[source,bash]
+----
+chmod +x migration_es7-es9.sh
+./migration_es7-es9.sh
+----
+
+==== What the Script Does
+
+* Discovers indices matching the configured patterns on ES7
+* Collects source statistics (document count, size) for each index
+* Migrates ILM policies from ES7 to ES9 if they exist
+* Creates indices on ES9 with the same settings and mappings
+* Recreates aliases with proper write index flags for rollover indices
+* Reindexes data from ES7 to ES9 using the remote reindex API
+* Collects destination statistics after migration
+* Displays a comparison report showing document counts and any mismatches
+
+==== Output
+
+The script provides detailed logging with timestamps and a final comparison 
report:
+
+----
+==========================================
+MIGRATION COMPARISON REPORT
+==========================================
+Index                                    |    Source Docs |      Dest Docs |   
  Difference |     Status
+-----------------------------------------+----------------+----------------+----------------+-----------
+context-profile                          |          15420 |          15420 |   
          +0 |      ✓ OK
+context-session-000001                   |           3420 |           3420 |   
          +0 |      ✓ OK
+==========================================
+✓ All indices migrated successfully!
+==========================================
+----
diff --git a/manual/src/main/asciidoc/privacy.adoc 
b/manual/src/main/asciidoc/privacy.adoc
index 530af94c6..91021cb68 100644
--- a/manual/src/main/asciidoc/privacy.adoc
+++ b/manual/src/main/asciidoc/privacy.adoc
@@ -38,7 +38,7 @@ Here's an example of a request to anonymize a profile:
 
 [source]
 ----
-curl -X POST 
http://localhost:8181/cxs/profiles/{profileID}/anonymize?scope=ASCOPE
+curl -X POST 
http://localhost:8181/cxs/privacy/profiles/{profileID}/anonymize?scope=ASCOPE
 ----
 
 where `{profileID}` must be replaced by the actual identifier of a profile
@@ -69,7 +69,7 @@ session data will also be detached from the current profile 
and anonymized.
 
 [source]
 ----
-curl -X DELETE http://localhost:8181/cxs/profiles/{profileID}?withData=false 
--user karaf:karaf
+curl -X DELETE 
http://localhost:8181/cxs/privacy/profiles/{profileID}?withData=false --user 
karaf:karaf
 ----
 
 where `{profileID}` must be replaced by the actual identifier of a profile
diff --git a/manual/src/main/asciidoc/whats-new.adoc 
b/manual/src/main/asciidoc/whats-new.adoc
index 9051c2ab4..bafd19357 100644
--- a/manual/src/main/asciidoc/whats-new.adoc
+++ b/manual/src/main/asciidoc/whats-new.adoc
@@ -23,7 +23,11 @@ is not supported anymore.
 
 The documentation of the client can be found here: 
https://www.elastic.co/docs/reference/elasticsearch/clients/java
 
-==== Karaf upgrade
+===  Elasticsearch 7 data migration
+
+A procedure to migrate your data from Elasticsearch 7 to Elasticsearch 9 can 
be found in the <<Migrate from Elasticsearch 7 to Elasticsearch 9>> section
+
+=== Karaf upgrade
 
 The Karaf version has been upgraded from 4.2.15 to 4.4.8 in order to support 
the latest versions of the dependencies.
 This upgrade also brings support for Java 17.
diff --git a/migration_es7-es9.sh b/migration_es7-es9.sh
new file mode 100644
index 000000000..bfb2c6a3a
--- /dev/null
+++ b/migration_es7-es9.sh
@@ -0,0 +1,750 @@
+#!/bin/bash
+
+################################################################################
+#
+#    Licensed to the Apache Software Foundation (ASF) under one or more
+#    contributor license agreements.  See the NOTICE file distributed with
+#    this work for additional information regarding copyright ownership.
+#    The ASF licenses this file to You under the Apache License, Version 2.0
+#    (the "License"); you may not use this file except in compliance with
+#    the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#
+################################################################################
+
+##############################################################################
+# Elasticsearch Reindex Script
+# This script reindexes data from Elasticsearch 7 to Elasticsearch 9
+# It handles both regular indices and rollover indices with their aliases
+##############################################################################
+
+# Ensure we're running with bash
+if [ -z "$BASH_VERSION" ]; then
+    echo "Error: This script requires bash. Please run with: bash $0"
+    exit 1
+fi
+
+set -e  # Exit on error
+
+##############################################################################
+# CONFIGURATION
+##############################################################################
+
+# Environment prefix (configurable for multiple environments)
+INDEX_PREFIX="${INDEX_PREFIX:-context-}"
+
+# Elasticsearch source (ES7) configuration
+ES7_HOST="${ES7_HOST:-http://localhost:9200}";
+ES7_HOST_FROM_ES9="${ES7_HOST_FROM_ES9:-${ES7_HOST}}"
+ES7_USER="${ES7_USER:-elastic}"
+ES7_PASSWORD="${ES7_PASSWORD:-password}"
+
+# Elasticsearch destination (ES9) configuration
+ES9_HOST="${ES9_HOST:-http://localhost:9201}";
+ES9_USER="${ES9_USER:-elastic}"
+ES9_PASSWORD="${ES9_PASSWORD:-password}"
+
+# Batch size for reindexing
+BATCH_SIZE="${BATCH_SIZE:-1000}"
+
+##############################################################################
+# INDICES CONFIGURATION
+##############################################################################
+
+# Regular indices (no rollover)
+REGULAR_INDICES=(
+    "profile"
+    "systemitems"
+    "personasession"
+    "profilealias"
+    "geonameentry"
+)
+
+# Rollover indices patterns
+# Format: "pattern:alias_name"
+# The script will automatically discover all matching indices
+ROLLOVER_PATTERNS=(
+    "session-*:session"
+    "event-*:event"
+)
+
+##############################################################################
+# FUNCTIONS
+##############################################################################
+
+# Function to log messages with timestamp
+log() {
+    echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1"
+}
+
+# Function to log errors
+log_error() {
+    echo "[$(date +'%Y-%m-%d %H:%M:%S')] ERROR: $1" >&2
+}
+
+# Function to get index stats
+get_index_stats() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+
+    response=$(curl -s \
+        -u "${user}:${password}" \
+        "${host}/${index}/_stats")
+
+    if [ $? -eq 0 ]; then
+        echo "$response"
+        return 0
+    else
+        return 1
+    fi
+}
+
+# Function to extract key stats from index stats response
+extract_index_stats() {
+    local stats_json=$1
+    local index=$2
+
+    local doc_count=$(echo "$stats_json" | jq -r 
".indices[\"${index}\"].primaries.docs.count // 0")
+    local doc_deleted=$(echo "$stats_json" | jq -r 
".indices[\"${index}\"].primaries.docs.deleted // 0")
+    local store_size=$(echo "$stats_json" | jq -r 
".indices[\"${index}\"].primaries.store.size_in_bytes // 0")
+    local store_size_mb=$((store_size / 1024 / 1024))
+
+    echo "${doc_count}|${doc_deleted}|${store_size_mb}"
+}
+
+# Temporary files to store source and destination stats
+STATS_DIR=$(mktemp -d)
+SOURCE_STATS_FILE="${STATS_DIR}/source_stats.txt"
+DEST_STATS_FILE="${STATS_DIR}/dest_stats.txt"
+
+# Cleanup function
+cleanup() {
+    rm -rf "$STATS_DIR"
+}
+
+# Register cleanup on exit
+trap cleanup EXIT
+
+# Function to discover indices matching a pattern
+discover_indices() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local pattern=$4
+
+    response=$(curl -s \
+        -u "${user}:${password}" \
+        "${host}/_cat/indices/${pattern}?h=index&format=json")
+
+    # Extract index names
+    echo "$response" | jq -r '.[].index' 2>/dev/null || echo ""
+}
+
+# Function to check if index exists
+check_index_exists() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+
+    response=$(curl -s -o /dev/null -w "%{http_code}" \
+        -u "${user}:${password}" \
+        "${host}/${index}")
+
+    if [ "$response" == "200" ]; then
+        return 0
+    else
+        return 1
+    fi
+}
+
+# Function to get index settings
+get_index_settings() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+
+    curl -s -u "${user}:${password}" \
+        "${host}/${index}/_settings" | jq .
+}
+
+# Function to get index mappings
+get_index_mappings() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+
+    curl -s -u "${user}:${password}" \
+        "${host}/${index}/_mapping" | jq .
+}
+
+# Function to get index aliases
+get_index_aliases() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+
+    curl -s -u "${user}:${password}" \
+        "${host}/${index}/_alias" | jq .
+}
+
+# Function to get ILM policy
+get_ilm_policy() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local policy_name=$4
+
+    response=$(curl -s -w "\n%{http_code}" \
+        -u "${user}:${password}" \
+        "${host}/_ilm/policy/${policy_name}")
+
+    http_code=$(echo "$response" | tail -n 1)
+    response_body=$(echo "$response" | sed '$d')
+
+    if [ "$http_code" == "200" ]; then
+        echo "$response_body" | jq -r --arg policy "$policy_name" 
'.[$policy].policy'
+        return 0
+    else
+        return 1
+    fi
+}
+
+# Function to create ILM policy
+create_ilm_policy() {
+    local policy_name=$1
+    local policy_body=$2
+
+    log "Creating ILM policy ${policy_name} on ES9..."
+
+    response=$(curl -s -w "\n%{http_code}" \
+        -u "${ES9_USER}:${ES9_PASSWORD}" \
+        -X PUT "${ES9_HOST}/_ilm/policy/${policy_name}" \
+        -H 'Content-Type: application/json' \
+        -d "{\"policy\": ${policy_body}}")
+
+    http_code=$(echo "$response" | tail -n 1)
+    response_body=$(echo "$response" | sed '$d')
+
+    if [ "$http_code" == "200" ]; then
+        log "ILM policy ${policy_name} created successfully"
+        return 0
+    else
+        log_error "Failed to create ILM policy ${policy_name}: 
${response_body}"
+        return 1
+    fi
+}
+
+# Function to check if ILM policy exists
+check_ilm_policy_exists() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local policy_name=$4
+
+    response=$(curl -s -o /dev/null -w "%{http_code}" \
+        -u "${user}:${password}" \
+        "${host}/_ilm/policy/${policy_name}")
+
+    if [ "$response" == "200" ]; then
+        return 0
+    else
+        return 1
+    fi
+}
+
+# Function to get ILM policy name from index settings
+get_index_ilm_policy() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+
+    settings=$(get_index_settings "$host" "$user" "$password" "$index")
+    policy_name=$(echo "$settings" | jq -r 
".[\"${index}\"].settings.index.lifecycle.name // \"\"")
+
+    if [ -n "$policy_name" ] && [ "$policy_name" != "null" ] && [ 
"$policy_name" != "" ]; then
+        echo "$policy_name"
+        return 0
+    else
+        return 1
+    fi
+}
+
+# Function to create index on destination
+create_index() {
+    local index=$1
+    local settings=$2
+    local mappings=$3
+    local ilm_policy=${4:-""}
+
+    log "Creating index ${index} on ES9..."
+
+    # If ILM policy is provided, add it to settings
+    if [ -n "$ilm_policy" ] && [ "$ilm_policy" != "null" ]; then
+        settings=$(echo "$settings" | jq --arg policy "$ilm_policy" --arg 
rollover_alias "$(echo $index| sed -r "s/(.*)-[0-9]+/\1/")" '. + {"lifecycle": 
{"name": $policy,"rollover_alias": $rollover_alias}}')
+        log "Index will be associated with ILM policy: ${ilm_policy}"
+    fi
+
+    # Prepare the request body
+    local body=$(jq -n \
+        --argjson settings "$settings" \
+        --argjson mappings "$mappings" \
+        '{settings: $settings, mappings: $mappings}')
+
+    response=$(curl -s -w "\n%{http_code}" \
+        -u "${ES9_USER}:${ES9_PASSWORD}" \
+        -X PUT "${ES9_HOST}/${index}" \
+        -H 'Content-Type: application/json' \
+        -d "$body")
+
+    http_code=$(echo "$response" | tail -n 1)
+    response_body=$(echo "$response" | sed '$d')
+
+    if [ "$http_code" == "200" ] || [ "$http_code" == "201" ]; then
+        log "Index ${index} created successfully"
+        return 0
+    else
+        log_error "Failed to create index ${index}: ${response_body}"
+        return 1
+    fi
+}
+
+# Function to create alias
+create_alias() {
+    local index=$1
+    local alias=$2
+    local is_write_index=${3:-false}
+
+    log "Creating alias ${alias} for index ${index} 
(is_write_index=${is_write_index})..."
+
+    local alias_config="{\"index\": \"${index}\", \"alias\": \"${alias}\""
+
+    if [ "$is_write_index" == "true" ]; then
+        alias_config="${alias_config}, \"is_write_index\": true"
+    fi
+
+    alias_config="${alias_config}}"
+
+    response=$(curl -s -w "\n%{http_code}" \
+        -u "${ES9_USER}:${ES9_PASSWORD}" \
+        -X POST "${ES9_HOST}/_aliases" \
+        -H 'Content-Type: application/json' \
+        -d "{
+            \"actions\": [
+                {
+                    \"add\": ${alias_config}
+                }
+            ]
+        }")
+
+    http_code=$(echo "$response" | tail -n 1)
+
+    if [ "$http_code" == "200" ]; then
+        log "Alias ${alias} created successfully"
+        return 0
+    else
+        log_error "Failed to create alias ${alias}"
+        return 1
+    fi
+}
+
+# Function to check task status
+check_task_status() {
+    local task_id=$1
+
+    response=$(curl -s -w "\n%{http_code}" \
+        -u "${ES9_USER}:${ES9_PASSWORD}" \
+        "${ES9_HOST}/_tasks/${task_id}")
+
+    http_code=$(echo "$response" | tail -n 1)
+    response_body=$(echo "$response" | sed '$d')
+
+    if [ "$http_code" == "200" ]; then
+        echo "$response_body" | tr -d '\000-\037'
+        return 0
+    else
+        log_error "Failed to get task status: ${response_body}"
+        return 1
+    fi
+}
+
+# Function to wait for task completion
+wait_for_task() {
+    local task_id=$1
+    local check_interval=5  # Check every 5 seconds
+
+    log "Waiting for task ${task_id} to complete..."
+
+    while true; do
+        sleep $check_interval
+
+        task_status=$(check_task_status "$task_id")
+        if [ $? -ne 0 ]; then
+            log_error "Failed to check task status"
+            return 1
+        fi
+
+        completed=$(echo "$task_status" | jq -r '.completed')
+
+        if [ "$completed" == "true" ]; then
+            log "Task completed successfully"
+
+            # Get task response
+            response_data=$(echo "$task_status" | jq -r '.response')
+
+            # Parse and display reindex results
+            total=$(echo "$response_data" | jq -r '.total // 0')
+            created=$(echo "$response_data" | jq -r '.created // 0')
+            updated=$(echo "$response_data" | jq -r '.updated // 0')
+            failures=$(echo "$response_data" | jq -r '.failures | length')
+
+            log "Reindex results: Total=${total}, Created=${created}, 
Updated=${updated}, Failures=${failures}"
+
+            if [ "$failures" -gt 0 ]; then
+                log_error "Some documents failed to reindex:"
+                echo "$response_data" | jq '.failures'
+            fi
+
+            return 0
+        fi
+
+        # Display progress
+        task=$(echo "$task_status" | jq -r '.task')
+        status=$(echo "$task" | jq -r '.status')
+        total=$(echo "$status" | jq -r '.total // 0')
+        created=$(echo "$status" | jq -r '.created // 0')
+        updated=$(echo "$status" | jq -r '.updated // 0')
+
+        log "Progress: ${created}/${total} documents created, ${updated} 
updated"
+    done
+}
+
+# Function to reindex data
+reindex_data() {
+    local source_index=$1
+    local dest_index=$2
+
+    log "Starting reindex from ${source_index} to ${dest_index}..."
+
+    response=$(curl -s -w "\n%{http_code}" \
+        -u "${ES9_USER}:${ES9_PASSWORD}" \
+        -X POST "${ES9_HOST}/_reindex?wait_for_completion=false" \
+        -H 'Content-Type: application/json' \
+        -d "{
+            \"source\": {
+                \"remote\": {
+                    \"host\": \"${ES7_HOST_FROM_ES9}\",
+                    \"username\": \"${ES7_USER}\",
+                    \"password\": \"${ES7_PASSWORD}\"
+                },
+                \"index\": \"${source_index}\",
+                \"size\": ${BATCH_SIZE}
+            },
+            \"dest\": {
+                \"index\": \"${dest_index}\"
+            }
+        }")
+
+    http_code=$(echo "$response" | tail -n 1)
+    response_body=$(echo "$response" | sed '$d')
+
+    if [ "$http_code" == "200" ]; then
+        # Get task ID
+        task_id=$(echo "$response_body" | jq -r '.task')
+        log "Reindex task created with ID: ${task_id}"
+
+        # Wait for task completion
+        wait_for_task "$task_id"
+        return $?
+    else
+        log_error "Failed to start reindex: ${response_body}"
+        return 1
+    fi
+}
+
+# Function to process regular index
+process_regular_index() {
+    local index_name=$1
+    local full_index_name="${INDEX_PREFIX}${index_name}"
+
+    log "=========================================="
+    log "Processing regular index: ${full_index_name}"
+    log "=========================================="
+
+    # Check if source index exists
+    if ! check_index_exists "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name"; then
+        log_error "Source index ${full_index_name} does not exist on ES7. 
Skipping..."
+        return 1
+    fi
+
+    # Collect source stats
+    collect_index_stats "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name" "SOURCE"
+
+    # Get settings and mappings from source
+    log "Retrieving settings and mappings from source..."
+    settings=$(get_index_settings "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name" | \
+        jq ".[\"${full_index_name}\"].settings.index |
+            del(.creation_date, .uuid, .version, .provided_name, .lifecycle)")
+    mappings=$(get_index_mappings "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name" | \
+        jq ".[\"${full_index_name}\"].mappings")
+
+    # Check if index has an ILM policy
+    ilm_policy_name=""
+    if ilm_policy_name=$(get_index_ilm_policy "$ES7_HOST" "$ES7_USER" 
"$ES7_PASSWORD" "$full_index_name"); then
+        log "Index has ILM policy: ${ilm_policy_name}"
+
+        # Check if policy exists on ES9, if not, migrate it
+        if ! check_ilm_policy_exists "$ES9_HOST" "$ES9_USER" "$ES9_PASSWORD" 
"$ilm_policy_name"; then
+            log "ILM policy ${ilm_policy_name} does not exist on ES9, 
migrating it..."
+
+            if ilm_policy_body=$(get_ilm_policy "$ES7_HOST" "$ES7_USER" 
"$ES7_PASSWORD" "$ilm_policy_name"); then
+                create_ilm_policy "$ilm_policy_name" "$ilm_policy_body"
+            else
+                log_error "Failed to retrieve ILM policy ${ilm_policy_name} 
from ES7"
+                ilm_policy_name=""
+            fi
+        else
+            log "ILM policy ${ilm_policy_name} already exists on ES9"
+        fi
+    else
+        log "Index does not have an ILM policy"
+    fi
+
+    # Create index on destination if it doesn't exist
+    if ! check_index_exists "$ES9_HOST" "$ES9_USER" "$ES9_PASSWORD" 
"$full_index_name"; then
+        create_index "$full_index_name" "$settings" "$mappings" 
"$ilm_policy_name"
+    else
+        log "Index ${full_index_name} already exists on ES9. Skipping 
creation..."
+    fi
+
+    # Reindex data
+    reindex_data "$full_index_name" "$full_index_name"
+}
+
+# Function to collect stats for an index
+collect_index_stats() {
+    local host=$1
+    local user=$2
+    local password=$3
+    local index=$4
+    local array_name=$5
+
+    if check_index_exists "$host" "$user" "$password" "$index"; then
+        local stats_json=$(get_index_stats "$host" "$user" "$password" 
"$index")
+        local stats=$(extract_index_stats "$stats_json" "$index")
+
+        if [ "$array_name" == "SOURCE" ]; then
+            echo "${index}:${stats}" >> "$SOURCE_STATS_FILE"
+        else
+            echo "${index}:${stats}" >> "$DEST_STATS_FILE"
+        fi
+    fi
+}
+
+# Function to display comparison report
+display_comparison_report() {
+    log "=========================================="
+    log "MIGRATION COMPARISON REPORT"
+    log "=========================================="
+
+    printf "%-40s | %15s | %15s | %15s | %10s\n" "Index" "Source Docs" "Dest 
Docs" "Difference" "Status"
+    printf "%-40s-+-%15s-+-%15s-+-%15s-+-%10s\n" 
"----------------------------------------" "---------------" "---------------" 
"---------------" "----------"
+
+    local total_issues=0
+
+    # Read source stats file
+    if [ -f "$SOURCE_STATS_FILE" ]; then
+        while IFS=':' read -r index source_stats; do
+            # Find corresponding dest stats
+            dest_stats="0|0|0"
+            if [ -f "$DEST_STATS_FILE" ]; then
+                dest_line=$(grep "^${index}:" "$DEST_STATS_FILE" 2>/dev/null 
|| echo "")
+                if [ -n "$dest_line" ]; then
+                    dest_stats="${dest_line#*:}"
+                fi
+            fi
+
+            IFS='|' read -r source_docs source_deleted source_size <<< 
"$source_stats"
+            IFS='|' read -r dest_docs dest_deleted dest_size <<< "$dest_stats"
+
+            local diff=$((dest_docs - source_docs))
+            local status="✓ OK"
+
+            if [ "$dest_docs" -ne "$source_docs" ]; then
+                status="✗ MISMATCH"
+                ((total_issues++))
+            fi
+
+            printf "%-40s | %15s | %15s | %+15s | %10s\n" "$index" 
"$source_docs" "$dest_docs" "$diff" "$status"
+        done < "$SOURCE_STATS_FILE"
+    fi
+
+    log "=========================================="
+    if [ $total_issues -eq 0 ]; then
+        log "✓ All indices migrated successfully!"
+    else
+        log_error "✗ ${total_issues} index(es) have mismatched document counts"
+    fi
+    log "=========================================="
+}
+
+# Function to process rollover index
+process_rollover_index() {
+    local index_name=$1
+    local alias_name=$2
+
+    local full_index_name="${INDEX_PREFIX}${index_name}"
+    local full_alias_name="${INDEX_PREFIX}${alias_name}"
+
+    log "=========================================="
+    log "Processing rollover index: ${full_index_name}"
+    log "Alias: ${full_alias_name}"
+    log "=========================================="
+
+    # Check if source index exists
+    if ! check_index_exists "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name"; then
+        log_error "Source index ${full_index_name} does not exist on ES7. 
Skipping..."
+        return 1
+    fi
+
+    # Collect source stats
+    collect_index_stats "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name" "SOURCE"
+
+    # Get settings, mappings and aliases from source
+    log "Retrieving settings, mappings and aliases from source..."
+    settings=$(get_index_settings "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name" | \
+        jq ".[\"${full_index_name}\"].settings.index |
+            del(.creation_date, .uuid, .version, .provided_name, .lifecycle)")
+    mappings=$(get_index_mappings "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name" | \
+        jq ".[\"${full_index_name}\"].mappings")
+
+    # Get alias information from source to check if it's a write index
+    aliases_info=$(get_index_aliases "$ES7_HOST" "$ES7_USER" "$ES7_PASSWORD" 
"$full_index_name")
+    is_write_index=$(echo "$aliases_info" | jq -r 
".[\"${full_index_name}\"].aliases[\"${full_alias_name}\"].is_write_index // 
false")
+
+    log "Index ${full_index_name} is_write_index: ${is_write_index}"
+
+    # Check if index has an ILM policy
+    ilm_policy_name=""
+    if ilm_policy_name=$(get_index_ilm_policy "$ES7_HOST" "$ES7_USER" 
"$ES7_PASSWORD" "$full_index_name"); then
+        log "Index has ILM policy: ${ilm_policy_name}"
+
+        # Check if policy exists on ES9, if not, migrate it
+        if ! check_ilm_policy_exists "$ES9_HOST" "$ES9_USER" "$ES9_PASSWORD" 
"$ilm_policy_name"; then
+            log "ILM policy ${ilm_policy_name} does not exist on ES9, 
migrating it..."
+
+            if ilm_policy_body=$(get_ilm_policy "$ES7_HOST" "$ES7_USER" 
"$ES7_PASSWORD" "$ilm_policy_name"); then
+                create_ilm_policy "$ilm_policy_name" "$ilm_policy_body"
+            else
+                log_error "Failed to retrieve ILM policy ${ilm_policy_name} 
from ES7"
+                ilm_policy_name=""
+            fi
+        else
+            log "ILM policy ${ilm_policy_name} already exists on ES9"
+        fi
+    else
+        log "Index does not have an ILM policy"
+    fi
+
+    # Create index on destination if it doesn't exist
+    if ! check_index_exists "$ES9_HOST" "$ES9_USER" "$ES9_PASSWORD" 
"$full_index_name"; then
+        create_index "$full_index_name" "$settings" "$mappings" 
"$ilm_policy_name"
+    else
+        log "Index ${full_index_name} already exists on ES9. Skipping 
creation..."
+    fi
+
+    # Create alias with is_write_index parameter
+    create_alias "$full_index_name" "$full_alias_name" "$is_write_index"
+
+    # Reindex data
+    reindex_data "$full_index_name" "$full_index_name"
+}
+
+##############################################################################
+# MAIN EXECUTION
+##############################################################################
+
+main() {
+    log "=========================================="
+    log "Elasticsearch Reindex Script Started"
+    log "=========================================="
+    log "Configuration:"
+    log "  Index Prefix: ${INDEX_PREFIX}"
+    log "  ES7 Host: ${ES7_HOST}"
+    log "  ES7 Host From ES9: ${ES7_HOST_FROM_ES9}"
+    log "  ES9 Host: ${ES9_HOST}"
+    log "  Batch Size: ${BATCH_SIZE}"
+    log "=========================================="
+
+    # Check if jq is installed
+    if ! command -v jq &> /dev/null; then
+        log_error "jq is required but not installed. Please install jq."
+        exit 1
+    fi
+
+    # Process regular indices
+    log "Processing regular indices..."
+    for index_name in "${REGULAR_INDICES[@]}"; do
+        process_regular_index "$index_name"
+        echo ""
+    done
+
+    # Process rollover indices
+    log "Processing rollover indices..."
+    for pattern_config in "${ROLLOVER_PATTERNS[@]}"; do
+        IFS=':' read -r pattern alias_name <<< "$pattern_config"
+
+        # Add prefix to pattern
+        full_pattern="${INDEX_PREFIX}${pattern}"
+
+        log "Discovering indices matching pattern: ${full_pattern}"
+        discovered_indices=$(discover_indices "$ES7_HOST" "$ES7_USER" 
"$ES7_PASSWORD" "$full_pattern")
+
+        if [ -z "$discovered_indices" ]; then
+            log "No indices found matching pattern: ${full_pattern}"
+            continue
+        fi
+
+        # Process each discovered index
+        while IFS= read -r full_index_name; do
+            # Remove prefix to get the index name without prefix
+            index_name="${full_index_name#$INDEX_PREFIX}"
+            process_rollover_index "$index_name" "$alias_name"
+            echo ""
+        done <<< "$discovered_indices"
+    done
+
+    log "=========================================="
+    log "Collecting destination statistics..."
+    log "=========================================="
+
+    # Collect destination stats for all migrated indices
+    if [ -f "$SOURCE_STATS_FILE" ]; then
+        while IFS=':' read -r index stats; do
+            collect_index_stats "$ES9_HOST" "$ES9_USER" "$ES9_PASSWORD" 
"$index" "DEST"
+        done < "$SOURCE_STATS_FILE"
+    fi
+
+    # Display comparison report
+    display_comparison_report
+
+    log "=========================================="
+    log "Elasticsearch Reindex Script Completed"
+    log "=========================================="
+}
+
+# Run main function
+main
diff --git a/package/src/main/resources/NOTICE 
b/package/src/main/resources/NOTICE
index eedd211df..cc2423145 100644
--- a/package/src/main/resources/NOTICE
+++ b/package/src/main/resources/NOTICE
@@ -1,5 +1,5 @@
 Apache Unomi
-Copyright 2015-2023 The Apache Software Foundation
+Copyright 2015-2025 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).
@@ -61,13 +61,11 @@ License: within 
bindings/java/src/org/hyperic/sigar/util/PrintfFormat.java
 Notice for Groovy
 
---------------------------------------------------------------------------------------------------
 
-Groovy Language
-   Copyright 2003-2014 The respective authors and developers
-   Developers and Contributors are listed in the project POM file
-   and Gradle build file
+Apache Groovy
+   Copyright 2003-2025 The Apache Software Foundation
 
-   This product includes software developed by
-   The Groovy community (http://groovy.codehaus.org/).
+   This product includes software developed at
+   The Apache Software Foundation (http://www.apache.org/).
 
 
 Notice for org.apache.ws.xmlschema
diff --git 
a/persistence-elasticsearch/core/src/main/java/org/apache/unomi/persistence/elasticsearch/ConditionESQueryBuilderDispatcher.java
 
b/persistence-elasticsearch/core/src/main/java/org/apache/unomi/persistence/elasticsearch/ConditionESQueryBuilderDispatcher.java
index 61ffab6a3..b7386e804 100644
--- 
a/persistence-elasticsearch/core/src/main/java/org/apache/unomi/persistence/elasticsearch/ConditionESQueryBuilderDispatcher.java
+++ 
b/persistence-elasticsearch/core/src/main/java/org/apache/unomi/persistence/elasticsearch/ConditionESQueryBuilderDispatcher.java
@@ -19,7 +19,8 @@ package org.apache.unomi.persistence.elasticsearch;
 
 import co.elastic.clients.elasticsearch._types.query_dsl.Query;
 import org.apache.unomi.api.conditions.Condition;
-import 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcherSupport;
+import org.apache.unomi.persistence.spi.conditions.ConditionContextHelper;
+import 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcher;
 import org.apache.unomi.scripting.ScriptExecutor;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap;
  * Responsibilities:
  * - Maintain a registry of available query builders by their IDs
  * - Resolve legacy queryBuilder IDs to the canonical IDs using centralized 
mapping in
- *   {@link 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcherSupport}
+ *   {@link 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcher}
  *   (with deprecation warnings)
  * - Build query fragments (filters) and full queries from {@link 
org.apache.unomi.api.conditions.Condition}
  * <p>
@@ -43,12 +44,11 @@ import java.util.concurrent.ConcurrentHashMap;
  * - Legacy mappings are centralized in SPI support; there is no runtime 
customization
  * - New IDs are always preferred; legacy IDs trigger a warning and are mapped 
transparently
  */
-public class ConditionESQueryBuilderDispatcher {
+public class ConditionESQueryBuilderDispatcher extends 
ConditionQueryBuilderDispatcher {
     private static final Logger LOGGER = 
LoggerFactory.getLogger(ConditionESQueryBuilderDispatcher.class.getName());
 
     private Map<String, ConditionESQueryBuilder> queryBuilders = new 
ConcurrentHashMap<>();
     private ScriptExecutor scriptExecutor;
-    private final ConditionQueryBuilderDispatcherSupport support = new 
ConditionQueryBuilderDispatcherSupport();
 
     public ConditionESQueryBuilderDispatcher() {
     }
@@ -100,15 +100,14 @@ public class ConditionESQueryBuilderDispatcher {
         }
 
         // Find the appropriate query builder key (new or legacy)
-        String finalQueryBuilderKey = support.findQueryBuilderKey(
+        String finalQueryBuilderKey = findQueryBuilderKey(
                 queryBuilderKey,
                 condition.getConditionTypeId(),
-                queryBuilders::containsKey,
-                LOGGER);
+                queryBuilders::containsKey);
 
         if (finalQueryBuilderKey != null) {
             ConditionESQueryBuilder queryBuilder = 
queryBuilders.get(finalQueryBuilderKey);
-            Condition contextualCondition = support.contextualize(condition, 
context, scriptExecutor);
+            Condition contextualCondition = 
ConditionContextHelper.getContextualCondition(condition, context, 
scriptExecutor);
             if (contextualCondition != null) {
                 return queryBuilder.buildQuery(contextualCondition, context, 
this);
             }
@@ -141,15 +140,14 @@ public class ConditionESQueryBuilderDispatcher {
         }
 
         // Find the appropriate query builder key (new or legacy)
-        String finalQueryBuilderKey = support.findQueryBuilderKey(
+        String finalQueryBuilderKey = findQueryBuilderKey(
                 queryBuilderKey,
                 condition.getConditionTypeId(),
-                queryBuilders::containsKey,
-                LOGGER);
+                queryBuilders::containsKey);
 
         if (finalQueryBuilderKey != null) {
             ConditionESQueryBuilder queryBuilder = 
queryBuilders.get(finalQueryBuilderKey);
-            Condition contextualCondition = support.contextualize(condition, 
context, scriptExecutor);
+            Condition contextualCondition = 
ConditionContextHelper.getContextualCondition(condition, context, 
scriptExecutor);
             if (contextualCondition != null) {
                 return queryBuilder.count(contextualCondition, context, this);
             }
@@ -161,4 +159,9 @@ public class ConditionESQueryBuilderDispatcher {
         throw new UnsupportedOperationException();
     }
 
+    @Override
+    protected Logger getLogger() {
+        return LOGGER;
+    }
+
 }
diff --git 
a/persistence-opensearch/core/src/main/java/org/apache/unomi/persistence/opensearch/ConditionOSQueryBuilderDispatcher.java
 
b/persistence-opensearch/core/src/main/java/org/apache/unomi/persistence/opensearch/ConditionOSQueryBuilderDispatcher.java
index 825b88b93..01ed4f973 100644
--- 
a/persistence-opensearch/core/src/main/java/org/apache/unomi/persistence/opensearch/ConditionOSQueryBuilderDispatcher.java
+++ 
b/persistence-opensearch/core/src/main/java/org/apache/unomi/persistence/opensearch/ConditionOSQueryBuilderDispatcher.java
@@ -18,7 +18,8 @@
 package org.apache.unomi.persistence.opensearch;
 
 import org.apache.unomi.api.conditions.Condition;
-import 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcherSupport;
+import org.apache.unomi.persistence.spi.conditions.ConditionContextHelper;
+import 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcher;
 import org.apache.unomi.scripting.ScriptExecutor;
 import org.opensearch.client.opensearch._types.query_dsl.Query;
 import org.slf4j.Logger;
@@ -35,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap;
  * Responsibilities:
  * - Maintain a registry of available query builders by their IDs
  * - Resolve legacy queryBuilder IDs to the canonical IDs using centralized 
mapping in
- *   {@link 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcherSupport}
+ *   {@link 
org.apache.unomi.persistence.spi.conditions.dispatcher.ConditionQueryBuilderDispatcher}
  *   (with deprecation warnings)
  * - Build query fragments (filters) and full queries from {@link 
org.apache.unomi.api.conditions.Condition}
  * <p>
@@ -43,12 +44,11 @@ import java.util.concurrent.ConcurrentHashMap;
  * - Legacy mappings are centralized in SPI support; there is no runtime 
customization
  * - New IDs are always preferred; legacy IDs trigger a warning and are mapped 
transparently
  */
-public class ConditionOSQueryBuilderDispatcher {
+public class ConditionOSQueryBuilderDispatcher extends 
ConditionQueryBuilderDispatcher {
     private static final Logger LOGGER = 
LoggerFactory.getLogger(ConditionOSQueryBuilderDispatcher.class.getName());
 
     private Map<String, ConditionOSQueryBuilder> queryBuilders = new 
ConcurrentHashMap<>();
     private ScriptExecutor scriptExecutor;
-    private final ConditionQueryBuilderDispatcherSupport support = new 
ConditionQueryBuilderDispatcherSupport();
 
     public ConditionOSQueryBuilderDispatcher() {
     }
@@ -80,11 +80,11 @@ public class ConditionOSQueryBuilderDispatcher {
     }
 
     public Query buildFilter(Condition condition) {
-        return buildFilter(condition, new HashMap<String, Object>());
+        return buildFilter(condition, new HashMap<>());
     }
 
     public Query buildFilter(Condition condition, Map<String, Object> context) 
{
-        if(condition == null || condition.getConditionType() == null) {
+        if (condition == null || condition.getConditionType() == null) {
             throw new IllegalArgumentException("Condition is null or doesn't 
have type, impossible to build filter");
         }
 
@@ -99,15 +99,14 @@ public class ConditionOSQueryBuilderDispatcher {
         }
 
         // Find the appropriate query builder key (new or legacy)
-        String finalQueryBuilderKey = support.findQueryBuilderKey(
+        String finalQueryBuilderKey = findQueryBuilderKey(
                 queryBuilderKey,
                 condition.getConditionTypeId(),
-                queryBuilders::containsKey,
-                LOGGER);
+                queryBuilders::containsKey);
 
         if (finalQueryBuilderKey != null) {
             ConditionOSQueryBuilder queryBuilder = 
queryBuilders.get(finalQueryBuilderKey);
-            Condition contextualCondition = support.contextualize(condition, 
context, scriptExecutor);
+            Condition contextualCondition = 
ConditionContextHelper.getContextualCondition(condition, context, 
scriptExecutor);
             if (contextualCondition != null) {
                 return queryBuilder.buildQuery(contextualCondition, context, 
this);
             }
@@ -127,7 +126,7 @@ public class ConditionOSQueryBuilderDispatcher {
     }
 
     public long count(Condition condition, Map<String, Object> context) {
-        if(condition == null || condition.getConditionType() == null) {
+        if (condition == null || condition.getConditionType() == null) {
             throw new IllegalArgumentException("Condition is null or doesn't 
have type, impossible to build filter");
         }
 
@@ -142,15 +141,14 @@ public class ConditionOSQueryBuilderDispatcher {
         }
 
         // Find the appropriate query builder key (new or legacy)
-        String finalQueryBuilderKey = support.findQueryBuilderKey(
+        String finalQueryBuilderKey = findQueryBuilderKey(
                 queryBuilderKey,
                 condition.getConditionTypeId(),
-                queryBuilders::containsKey,
-                LOGGER);
+                queryBuilders::containsKey);
 
         if (finalQueryBuilderKey != null) {
             ConditionOSQueryBuilder queryBuilder = 
queryBuilders.get(finalQueryBuilderKey);
-            Condition contextualCondition = support.contextualize(condition, 
context, scriptExecutor);
+            Condition contextualCondition = 
ConditionContextHelper.getContextualCondition(condition, context, 
scriptExecutor);
             if (contextualCondition != null) {
                 return queryBuilder.count(contextualCondition, context, this);
             }
@@ -164,4 +162,9 @@ public class ConditionOSQueryBuilderDispatcher {
         throw new UnsupportedOperationException();
     }
 
+    @Override
+    protected Logger getLogger() {
+        return LOGGER;
+    }
+
 }
diff --git 
a/persistence-spi/src/main/java/org/apache/unomi/persistence/spi/conditions/dispatcher/ConditionQueryBuilderDispatcherSupport.java
 
b/persistence-spi/src/main/java/org/apache/unomi/persistence/spi/conditions/dispatcher/ConditionQueryBuilderDispatcher.java
similarity index 58%
rename from 
persistence-spi/src/main/java/org/apache/unomi/persistence/spi/conditions/dispatcher/ConditionQueryBuilderDispatcherSupport.java
rename to 
persistence-spi/src/main/java/org/apache/unomi/persistence/spi/conditions/dispatcher/ConditionQueryBuilderDispatcher.java
index 0533f768f..95c3f2a40 100644
--- 
a/persistence-spi/src/main/java/org/apache/unomi/persistence/spi/conditions/dispatcher/ConditionQueryBuilderDispatcherSupport.java
+++ 
b/persistence-spi/src/main/java/org/apache/unomi/persistence/spi/conditions/dispatcher/ConditionQueryBuilderDispatcher.java
@@ -17,26 +17,20 @@
 
 package org.apache.unomi.persistence.spi.conditions.dispatcher;
 
-import org.apache.unomi.api.conditions.Condition;
-import org.apache.unomi.persistence.spi.conditions.ConditionContextHelper;
-import org.apache.unomi.scripting.ScriptExecutor;
 import org.slf4j.Logger;
 
 import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.function.Predicate;
 
 /**
- * Shared helper for condition query builder dispatchers (ES/OS). Centralizes 
logic that is
- * backend-agnostic: contextualization, legacy ID mapping with logging, and 
queryBuilder key resolution.
+ * Abstract base class for condition query builder dispatchers (ES/OS). 
Centralizes logic that is
+ * backend-agnostic: legacy ID mapping with logging, and queryBuilder key 
resolution.
  * The legacy-to-new queryBuilder identifiers are centralized here in
- * {@link #LEGACY_TO_NEW_QUERY_BUILDER_IDS} and are used by the no-arg
- * {@link #resolveLegacyQueryBuilderId(String, String, org.slf4j.Logger)} and
- * {@link #findQueryBuilderKey(String, String, java.util.function.Predicate, 
org.slf4j.Logger)} methods.
- * This helper intentionally avoids any dependency on backend-specific query 
types.
+ * {@link #LEGACY_TO_NEW_QUERY_BUILDER_IDS} and are used by the
+ * {@link #findQueryBuilderKey(String, String, java.util.function.Predicate)} 
method.
+ * This abstract class intentionally avoids any dependency on backend-specific 
query types.
  */
-public class ConditionQueryBuilderDispatcherSupport {
+public abstract class ConditionQueryBuilderDispatcher {
 
     /**
      * Backend-agnostic legacy-to-new mapping of queryBuilder identifiers.
@@ -54,40 +48,23 @@ public class ConditionQueryBuilderDispatcherSupport {
     );
 
     /**
-     * Cache to track which condition type + queryBuilderId combinations have 
already logged deprecation warnings.
-     * Key format: "conditionTypeId:queryBuilderId"
+     * Returns the logger instance for the concrete dispatcher implementation.
+     *
+     * @return the logger instance
      */
-    private static final Set<String> loggedDeprecationWarnings = 
ConcurrentHashMap.newKeySet();
-
-    /**
-     * Returns a contextualized copy of the provided condition if any dynamic 
parameters are present,
-     * otherwise returns {@code null} to indicate that a default fallback 
should be used by callers.
-     */
-    public Condition contextualize(Condition condition, Map<String, Object> 
context, ScriptExecutor scriptExecutor) {
-        return ConditionContextHelper.getContextualCondition(condition, 
context, scriptExecutor);
-    }
+    protected abstract Logger getLogger();
 
     /**
      * Resolves a legacy queryBuilder identifier to its new canonical 
identifier and logs a deprecation warning.
      * Returns {@code null} if the provided identifier is not legacy-mapped.
-     * The deprecation warning is only logged once per unique condition type + 
queryBuilderId combination
-     * to avoid polluting logs with repeated warnings for the same condition.
      */
-    public String resolveLegacyQueryBuilderId(String queryBuilderId, String 
conditionTypeId, Logger logger) {
+    private String resolveLegacyQueryBuilderId(String queryBuilderId, String 
conditionTypeId) {
         if (!LEGACY_TO_NEW_QUERY_BUILDER_IDS.containsKey(queryBuilderId)) {
             return null;
         }
         String mappedId = LEGACY_TO_NEW_QUERY_BUILDER_IDS.get(queryBuilderId);
-        
-        // Create a unique key for this condition type + queryBuilderId 
combination
-        String warningKey = conditionTypeId + ":" + queryBuilderId;
-        
-        // Only log the warning once per unique combination
-        if (loggedDeprecationWarnings.add(warningKey)) {
-        logger.warn("DEPRECATED: Using legacy queryBuilderId '{}' for 
condition type '{}'. Please update your condition definition to use the new 
queryBuilderId '{}'. Legacy mappings are deprecated and may be removed in 
future versions.",
+        getLogger().warn("DEPRECATED: Using legacy queryBuilderId '{}' for 
condition type '{}'. Please update your condition definition to use the new 
queryBuilderId '{}'. Legacy mappings are deprecated and may be removed in 
future versions.",
                 queryBuilderId, conditionTypeId, mappedId);
-        }
-        
         return mappedId;
     }
 
@@ -96,11 +73,11 @@ public class ConditionQueryBuilderDispatcherSupport {
      * The {@code hasBuilder} predicate is used to test the presence of a 
builder for a given key.
      */
     public String findQueryBuilderKey(String queryBuilderKey, String 
conditionTypeId,
-                                      Predicate<String> hasBuilder, Logger 
logger) {
+                                      Predicate<String> hasBuilder) {
         if (hasBuilder.test(queryBuilderKey)) {
             return queryBuilderKey;
         }
-        String legacyMappedId = resolveLegacyQueryBuilderId(queryBuilderKey, 
conditionTypeId, logger);
+        String legacyMappedId = resolveLegacyQueryBuilderId(queryBuilderKey, 
conditionTypeId);
         if (legacyMappedId != null && hasBuilder.test(legacyMappedId)) {
             return legacyMappedId;
         }
@@ -108,4 +85,3 @@ public class ConditionQueryBuilderDispatcherSupport {
     }
 }
 
-
diff --git a/pom.xml b/pom.xml
index f38c68222..8be830a89 100644
--- a/pom.xml
+++ b/pom.xml
@@ -143,7 +143,6 @@
         <maven-javadoc.plugin.version>3.11.2</maven-javadoc.plugin.version>
         <maven-surefire.plugin.version>3.5.3</maven-surefire.plugin.version>
         <maven-failsafe.plugin.version>3.5.2</maven-failsafe.plugin.version>
-        <maven-source.plugin.version>3.3.1</maven-source.plugin.version>
         <maven-assembly.plugin.version>3.7.1</maven-assembly.plugin.version>
         
<maven-dependency.plugin.version>3.8.1</maven-dependency.plugin.version>
         
<maven-scm-publish.plugin.version>3.2.1</maven-scm-publish.plugin.version>
@@ -889,19 +888,6 @@
                     <artifactId>maven-failsafe-plugin</artifactId>
                     <version>${maven-failsafe.plugin.version}</version>
                 </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-source-plugin</artifactId>
-                    <version>${maven-source.plugin.version}</version>
-                    <executions>
-                        <execution>
-                            <id>attach-sources</id>
-                            <goals>
-                                <goal>jar</goal>
-                            </goals>
-                        </execution>
-                    </executions>
-                </plugin>
                 <plugin>
                     <artifactId>maven-assembly-plugin</artifactId>
                     <version>${maven-assembly.plugin.version}</version>

Reply via email to