This is an automated email from the ASF dual-hosted git repository.
markt pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tomcat-jakartaee-migration.git
The following commit(s) were added to refs/heads/main by this push:
new bc2c118 Add a shared, optional,file based jar cache to improve
performance of follow on runs of the tool (#87)
bc2c118 is described below
commit bc2c1184b787f328550fe92f1356b5cc656e1a5e
Author: Aaron Cosand <[email protected]>
AuthorDate: Thu Nov 20 12:28:26 2025 -0500
Add a shared, optional,file based jar cache to improve performance of
follow on runs of the tool (#87)
---
.../org/apache/tomcat/jakartaee/CacheEntry.java | 118 +++++
.../org/apache/tomcat/jakartaee/Migration.java | 79 +++-
.../org/apache/tomcat/jakartaee/MigrationCLI.java | 38 +-
.../apache/tomcat/jakartaee/MigrationCache.java | 481 +++++++++++++++++++++
.../tomcat/jakartaee/LocalStrings.properties | 35 +-
.../tomcat/jakartaee/MigrationCacheTest.java | 269 ++++++++++++
.../org/apache/tomcat/jakartaee/MigrationTest.java | 151 +++++++
7 files changed, 1160 insertions(+), 11 deletions(-)
diff --git a/src/main/java/org/apache/tomcat/jakartaee/CacheEntry.java
b/src/main/java/org/apache/tomcat/jakartaee/CacheEntry.java
new file mode 100644
index 0000000..710de8d
--- /dev/null
+++ b/src/main/java/org/apache/tomcat/jakartaee/CacheEntry.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.tomcat.jakartaee;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.commons.io.IOUtils;
+
+/**
+ * Represents a single cache entry with operations for reading and writing.
+ * Package-private - only created by MigrationCache.
+ */
+class CacheEntry {
+ private final String hash;
+ private final boolean exists;
+ private final File cacheFile;
+ private final File tempFile;
+
+ CacheEntry(String hash, boolean exists, File cacheFile, File tempFile) {
+ this.hash = hash;
+ this.exists = exists;
+ this.cacheFile = cacheFile;
+ this.tempFile = tempFile;
+ }
+
+ /**
+ * Check if this entry exists in the cache.
+ * @return true if cached
+ */
+ public boolean exists() {
+ return exists;
+ }
+
+ /**
+ * Get the hash for this cache entry.
+ * @return the hash string
+ */
+ public String getHash() {
+ return hash;
+ }
+
+ /**
+ * Copy cached content to destination output stream.
+ * @param dest the destination output stream
+ * @throws IOException if an I/O error occurs
+ */
+ public void copyToDestination(OutputStream dest) throws IOException {
+ if (!exists) {
+ throw new IllegalStateException("Cannot copy - cache entry does
not exist");
+ }
+ try (FileInputStream fis = new FileInputStream(cacheFile)) {
+ IOUtils.copy(fis, dest);
+ }
+ }
+
+ /**
+ * Begin storing to cache - returns an output stream to a temp file.
+ * @return output stream to write converted content to
+ * @throws IOException if an I/O error occurs
+ */
+ public OutputStream beginStore() throws IOException {
+ return new FileOutputStream(tempFile);
+ }
+
+ /**
+ * Commit the store operation - move temp file to final cache location.
+ * @throws IOException if an I/O error occurs
+ */
+ public void commitStore() throws IOException {
+ if (!tempFile.exists()) {
+ throw new IOException("Temp file does not exist: " + tempFile);
+ }
+ // Ensure parent directory exists
+ File parentDir = cacheFile.getParentFile();
+ if (!parentDir.exists()) {
+ parentDir.mkdirs();
+ }
+ // Atomic rename
+ if (!tempFile.renameTo(cacheFile)) {
+ throw new IOException("Failed to rename temp file to cache file: "
+ tempFile + " -> " + cacheFile);
+ }
+ }
+
+ /**
+ * Get the size of the cached file in bytes.
+ * @return the file size in bytes
+ */
+ public long getFileSize() {
+ return cacheFile.length();
+ }
+
+ /**
+ * Rollback the store operation - delete temp file.
+ */
+ public void rollbackStore() {
+ if (tempFile.exists()) {
+ tempFile.delete();
+ }
+ }
+}
diff --git a/src/main/java/org/apache/tomcat/jakartaee/Migration.java
b/src/main/java/org/apache/tomcat/jakartaee/Migration.java
index 00fa3eb..9f15698 100644
--- a/src/main/java/org/apache/tomcat/jakartaee/Migration.java
+++ b/src/main/java/org/apache/tomcat/jakartaee/Migration.java
@@ -107,6 +107,7 @@ public class Migration {
private File destination;
private final List<Converter> converters;
private final Set<String> excludes = new HashSet<>();
+ private MigrationCache cache;
/**
* Construct a new migration tool instance.
@@ -211,6 +212,14 @@ public class Migration {
this.destination = destination;
}
+ /**
+ * Set the migration cache for storing pre-converted archives.
+ * @param cache the migration cache instance (null to disable caching)
+ */
+ public void setCache(MigrationCache cache) {
+ this.cache = cache;
+ }
+
/**
* <b>NOTE</b>:
@@ -257,6 +266,12 @@ public class Migration {
}
}
state = State.COMPLETE;
+
+ // Finalize cache operations (save metadata and prune expired entries)
+ if (cache != null) {
+ cache.finalizeCacheOperations();
+ }
+
logger.log(Level.INFO, sm.getString("migration.done",
Long.valueOf(TimeUnit.MILLISECONDS.convert(System.nanoTime() -
t1, TimeUnit.NANOSECONDS))));
}
@@ -415,14 +430,62 @@ public class Migration {
Util.copy(src, dest);
logger.log(Level.INFO, sm.getString("migration.skip", name));
} else if (isArchive(name)) {
- if (zipInMemory) {
- logger.log(Level.INFO,
sm.getString("migration.archive.memory", name));
- convertedStream = migrateArchiveInMemory(src, dest);
- logger.log(Level.INFO,
sm.getString("migration.archive.complete", name));
- } else {
- logger.log(Level.INFO,
sm.getString("migration.archive.stream", name));
- convertedStream = migrateArchiveStreaming(src, dest);
- logger.log(Level.INFO,
sm.getString("migration.archive.complete", name));
+ // Only cache nested archives (e.g., JARs inside WARs), not
top-level files
+ // Top-level files will have absolute paths starting with "/"
+ boolean isNestedArchive = !name.startsWith("/") &&
!name.startsWith("\\");
+
+ CacheEntry cacheEntry = null;
+ if (isNestedArchive && cache != null) {
+ // Buffer source to compute hash and check cache
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ IOUtils.copy(src, buffer);
+ byte[] sourceBytes = buffer.toByteArray();
+
+ // Get cache entry (computes hash and marks as accessed)
+ cacheEntry = cache.getCacheEntry(sourceBytes, profile);
+
+ if (cacheEntry.exists()) {
+ // Cache hit! Copy cached result to dest and return
+ logger.log(Level.INFO, sm.getString("cache.hit", name,
cacheEntry.getHash()));
+ cacheEntry.copyToDestination(dest);
+ return true;
+ }
+
+ // Cache miss - use buffered source for conversion
+ logger.log(Level.FINE, sm.getString("cache.miss", name,
cacheEntry.getHash()));
+ src = new ByteArrayInputStream(sourceBytes);
+ }
+
+ // Process archive - stream directly to destination (and cache if
needed)
+ OutputStream targetOutputStream = dest;
+ if (cacheEntry != null) {
+ // Tee output to both destination and cache temp file
+ targetOutputStream = new
org.apache.commons.io.output.TeeOutputStream(dest, cacheEntry.beginStore());
+ }
+
+ try {
+ if (zipInMemory) {
+ logger.log(Level.INFO,
sm.getString("migration.archive.memory", name));
+ convertedStream = migrateArchiveInMemory(src,
targetOutputStream);
+ logger.log(Level.INFO,
sm.getString("migration.archive.complete", name));
+ } else {
+ logger.log(Level.INFO,
sm.getString("migration.archive.stream", name));
+ convertedStream = migrateArchiveStreaming(src,
targetOutputStream);
+ logger.log(Level.INFO,
sm.getString("migration.archive.complete", name));
+ }
+
+ // Commit to cache on success
+ if (cacheEntry != null) {
+ cacheEntry.commitStore();
+ logger.log(Level.FINE, sm.getString("cache.store",
cacheEntry.getHash(),
+ Long.valueOf(cacheEntry.getFileSize())));
+ }
+ } catch (IOException e) {
+ // Rollback cache on error
+ if (cacheEntry != null) {
+ cacheEntry.rollbackStore();
+ }
+ throw e;
}
} else {
for (Converter converter : converters) {
diff --git a/src/main/java/org/apache/tomcat/jakartaee/MigrationCLI.java
b/src/main/java/org/apache/tomcat/jakartaee/MigrationCLI.java
index ca10baf..6f1a117 100644
--- a/src/main/java/org/apache/tomcat/jakartaee/MigrationCLI.java
+++ b/src/main/java/org/apache/tomcat/jakartaee/MigrationCLI.java
@@ -38,6 +38,9 @@ public class MigrationCLI {
private static final String PROFILE_ARG = "-profile=";
private static final String ZIPINMEMORY_ARG = "-zipInMemory";
private static final String MATCHEXCLUDESPATH_ARG
="-matchExcludesAgainstPathName";
+ private static final String CACHE_ARG = "-cache";
+ private static final String CACHE_LOCATION_ARG = "-cacheLocation=";
+ private static final String CACHE_RETENTION_ARG = "-cacheRetention=";
/**
* Build the migration tool CLI instance.
@@ -55,7 +58,12 @@ public class MigrationCLI {
System.setProperty("java.util.logging.SimpleFormatter.format",
"%5$s%n");
Migration migration = new Migration();
- // Process arguments
+ // Cache settings - opt-in by default is false
+ File cacheDir = null;
+ boolean enableCache = false;
+ int cacheRetentionDays = 30; // Default retention period
+
+ // Process argumnets
List<String> arguments = new ArrayList<>(Arrays.asList(args));
// Process the custom log level if present
@@ -95,6 +103,28 @@ public class MigrationCLI {
} else if (argument.equals(MATCHEXCLUDESPATH_ARG)) {
iter.remove();
migration.setMatchExcludesAgainstPathName(true);
+ } else if (argument.equals(CACHE_ARG)) {
+ iter.remove();
+ enableCache = true;
+ // Use default cache directory if not specified via
-cacheLocation
+ if (cacheDir == null) {
+ cacheDir = new File(System.getProperty("user.home"),
".migration-cache");
+ }
+ } else if (argument.startsWith(CACHE_LOCATION_ARG)) {
+ iter.remove();
+ String cachePath =
argument.substring(CACHE_LOCATION_ARG.length());
+ cacheDir = new File(cachePath);
+ } else if (argument.startsWith(CACHE_RETENTION_ARG)) {
+ iter.remove();
+ String retentionStr =
argument.substring(CACHE_RETENTION_ARG.length());
+ try {
+ cacheRetentionDays = Integer.parseInt(retentionStr);
+ if (cacheRetentionDays < 1) {
+ invalidArguments();
+ }
+ } catch (NumberFormatException e) {
+ invalidArguments();
+ }
}
}
@@ -108,6 +138,12 @@ public class MigrationCLI {
migration.setSource(new File(source));
migration.setDestination(new File(dest));
+ // Only enable cache if -cache argument was provided
+ if (enableCache) {
+ MigrationCache migrationCache = new MigrationCache(cacheDir,
cacheRetentionDays);
+ migration.setCache(migrationCache);
+ }
+
migration.execute();
}
diff --git a/src/main/java/org/apache/tomcat/jakartaee/MigrationCache.java
b/src/main/java/org/apache/tomcat/jakartaee/MigrationCache.java
new file mode 100644
index 0000000..d7bde18
--- /dev/null
+++ b/src/main/java/org/apache/tomcat/jakartaee/MigrationCache.java
@@ -0,0 +1,481 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.tomcat.jakartaee;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.time.LocalDate;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeParseException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Cache for storing and retrieving pre-converted archive files.
+ *
+ * <h2>Cache Structure</h2>
+ * <p>The cache organizes files in a directory structure based on hash
values:</p>
+ * <pre>
+ * {cacheDir}/
+ * ├── cache-metadata.txt # Metadata file tracking access times
+ * ├── {XX}/ # Subdirectory named by first 2 chars of hash
+ * │ └── {hash}.jar # Cached converted archive (full SHA-256 hash)
+ * ├── {YY}/
+ * │ └── {hash}.jar
+ * └── temp-{uuid}.tmp # Temporary files during conversion
+ * </pre>
+ *
+ * <h2>Cache Key</h2>
+ * <p>Each cache entry is keyed by a SHA-256 hash computed from:</p>
+ * <ul>
+ * <li>The migration profile name (e.g., "TOMCAT", "EE")</li>
+ * <li>The pre-conversion archive content (as bytes)</li>
+ * </ul>
+ * <p>This ensures that the same archive converted with different profiles
+ * produces different cache entries.</p>
+ *
+ * <h2>Metadata Format</h2>
+ * <p>The {@code cache-metadata.txt} file tracks access times for cache
pruning:</p>
+ * <pre>
+ * # Migration cache metadata - hash|last_access_date
+ * {hash}|{YYYY-MM-DD}
+ * {hash}|{YYYY-MM-DD}
+ * </pre>
+ *
+ * <h2>Temporary Files</h2>
+ * <p>During conversion, output is written to temporary files named {@code
temp-{uuid}.tmp}.
+ * These files are cleaned up on startup to handle crashes or unexpected
shutdowns.</p>
+ */
+public class MigrationCache {
+
+ private static final Logger logger =
Logger.getLogger(MigrationCache.class.getCanonicalName());
+ private static final StringManager sm =
StringManager.getManager(MigrationCache.class);
+ private static final String METADATA_FILE = "cache-metadata.txt";
+ private static final DateTimeFormatter DATE_FORMATTER =
DateTimeFormatter.ISO_LOCAL_DATE;
+
+ private final File cacheDir;
+ private final int retentionDays;
+ private final Map<String, LocalDate> cacheMetadata;
+ private final File metadataFile;
+
+ /**
+ * Construct a new migration cache.
+ *
+ * @param cacheDir the directory to store cached files (null to disable
caching)
+ * @param retentionDays the number of days to retain cached files
+ * @throws IOException if the cache directory cannot be created
+ */
+ public MigrationCache(File cacheDir, int retentionDays) throws IOException
{
+ this.retentionDays = retentionDays;
+ this.cacheMetadata = new HashMap<>();
+ this.cacheDir = cacheDir;
+ this.metadataFile = cacheDir == null ? null : new File(cacheDir,
METADATA_FILE);
+
+ if (cacheDir != null) {
+ // Create cache directory if it doesn't exist
+ if (!cacheDir.exists()) {
+ if (!cacheDir.mkdirs()) {
+ throw new IOException(sm.getString("cache.cannotCreate",
cacheDir.getAbsolutePath()));
+ }
+ }
+
+ if (!cacheDir.isDirectory()) {
+ throw new IOException(sm.getString("cache.notDirectory",
cacheDir.getAbsolutePath()));
+ }
+
+ // Load existing metadata
+ loadMetadata();
+
+ // Clean up any orphaned temp files from previous crashes
+ cleanupTempFiles();
+
+ logger.log(Level.INFO, sm.getString("cache.enabled",
cacheDir.getAbsolutePath(), retentionDays));
+ }
+ }
+
+ /**
+ * Clean up any temporary files left over from previous crashes or
unexpected shutdowns.
+ * Scans the cache directory for temp-*.tmp files and deletes them.
+ */
+ private void cleanupTempFiles() {
+ File[] files = cacheDir.listFiles();
+ if (files != null) {
+ int cleanedCount = 0;
+ for (File file : files) {
+ if (file.isFile() && file.getName().startsWith("temp-") &&
file.getName().endsWith(".tmp")) {
+ if (file.delete()) {
+ cleanedCount++;
+ logger.log(Level.FINE,
sm.getString("cache.tempfile.cleaned", file.getName()));
+ } else {
+ logger.log(Level.WARNING,
sm.getString("cache.tempfile.cleanFailed", file.getName()));
+ }
+ }
+ }
+ if (cleanedCount > 0) {
+ logger.log(Level.INFO, sm.getString("cache.tempfiles.cleaned",
cleanedCount));
+ }
+ }
+ }
+
+ /**
+ * Load cache metadata from disk.
+ * Format: hash|YYYY-MM-DD
+ * If file doesn't exist or is corrupt, assumes all existing cached jars
were accessed today.
+ */
+ private void loadMetadata() {
+ LocalDate today = LocalDate.now();
+
+ if (!metadataFile.exists()) {
+ // Metadata file doesn't exist - scan cache directory and assume
all files accessed today
+ logger.log(Level.FINE, sm.getString("cache.metadata.notFound"));
+ scanCacheDirectory(today);
+ return;
+ }
+
+ try (BufferedReader reader = new BufferedReader(new
FileReader(metadataFile))) {
+ String line;
+ while ((line = reader.readLine()) != null) {
+ line = line.trim();
+ if (line.isEmpty() || line.startsWith("#")) {
+ continue;
+ }
+
+ String[] parts = line.split("\\|");
+ if (parts.length == 2) {
+ String hash = parts[0];
+ try {
+ LocalDate lastAccessed = LocalDate.parse(parts[1],
DATE_FORMATTER);
+ cacheMetadata.put(hash, lastAccessed);
+ } catch (DateTimeParseException e) {
+ logger.log(Level.WARNING,
sm.getString("cache.metadata.invalidDate", line));
+ }
+ } else {
+ logger.log(Level.WARNING,
sm.getString("cache.metadata.invalidLine", line));
+ }
+ }
+
+ // Check for any cached files not in metadata and add them with
today's date
+ Set<String> existingHashes = scanCacheDirectory(null);
+ for (String hash : existingHashes) {
+ if (!cacheMetadata.containsKey(hash)) {
+ cacheMetadata.put(hash, today);
+ }
+ }
+
+ logger.log(Level.FINE, sm.getString("cache.metadata.loaded",
cacheMetadata.size()));
+ } catch (IOException e) {
+ // Corrupt or unreadable - assume all cached files accessed today
+ logger.log(Level.WARNING,
sm.getString("cache.metadata.loadError"), e);
+ cacheMetadata.clear();
+ scanCacheDirectory(today);
+ }
+ }
+
+ /**
+ * Scan cache directory for existing cache files and return their hashes.
+ * If accessDate is not null, adds all found hashes to metadata with that
date.
+ *
+ * @param accessDate the date to use for all found files (null to not
update metadata)
+ * @return set of hashes found in cache directory
+ */
+ private Set<String> scanCacheDirectory(LocalDate accessDate) {
+ Set<String> hashes = new HashSet<>();
+
+ File[] subdirs = cacheDir.listFiles();
+ if (subdirs != null) {
+ for (File subdir : subdirs) {
+ if (subdir.isDirectory()) {
+ File[] files = subdir.listFiles();
+ if (files != null) {
+ for (File file : files) {
+ if (file.isFile() &&
file.getName().endsWith(".jar")) {
+ String hash = file.getName().substring(0,
file.getName().length() - 4);
+ hashes.add(hash);
+ if (accessDate != null) {
+ cacheMetadata.put(hash, accessDate);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return hashes;
+ }
+
+ /**
+ * Get a cache entry for the given source bytes and profile.
+ * This computes the hash, checks if cached, and marks the entry as
accessed.
+ *
+ * @param sourceBytes the pre-conversion content
+ * @param profile the migration profile being used
+ * @return a CacheEntry object with all operations for this entry
+ * @throws IOException if an I/O error occurs
+ */
+ public CacheEntry getCacheEntry(byte[] sourceBytes, EESpecProfile profile)
throws IOException {
+ if (cacheDir == null) {
+ throw new IllegalStateException("Cache is not enabled");
+ }
+
+ // Compute hash once (includes profile)
+ String hash = computeHash(sourceBytes, profile);
+
+ // Get cache file location
+ File cachedFile = getCacheFile(hash);
+ boolean exists = cachedFile.exists();
+
+ // Create temp file for storing
+ File tempFile = new File(cacheDir, "temp-" + UUID.randomUUID() +
".tmp");
+
+ // Mark as accessed now
+ updateAccessTime(hash);
+
+ return new CacheEntry(hash, exists, cachedFile, tempFile);
+ }
+
+
+ /**
+ * Get the cache file for a given hash.
+ *
+ * @param hash the hash string
+ * @return the cache file
+ */
+ private File getCacheFile(String hash) {
+ // Use subdirectories based on first 2 chars of hash to avoid too many
files in one directory
+ String subdir = hash.substring(0, 2);
+ File subdirFile = new File(cacheDir, subdir);
+ if (!subdirFile.exists()) {
+ subdirFile.mkdirs();
+ }
+ return new File(subdirFile, hash + ".jar");
+ }
+
+ /**
+ * Compute SHA-256 hash of the given bytes combined with the profile name.
+ * The profile is included to ensure different profiles produce different
cache entries.
+ *
+ * @param bytes the bytes to hash
+ * @param profile the migration profile
+ * @return the hash as a hex string
+ * @throws IOException if hashing fails
+ */
+ private String computeHash(byte[] bytes, EESpecProfile profile) throws
IOException {
+ try {
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ // Include profile name in hash to differentiate between profiles
+
digest.update(profile.toString().getBytes(java.nio.charset.StandardCharsets.UTF_8));
+ digest.update(bytes);
+ byte[] hashBytes = digest.digest();
+
+ // Convert to hex string
+ StringBuilder sb = new StringBuilder();
+ for (byte b : hashBytes) {
+ sb.append(String.format("%02x", b));
+ }
+ return sb.toString();
+ } catch (NoSuchAlgorithmException e) {
+ throw new IOException(sm.getString("cache.hashError"), e);
+ }
+ }
+
+ /**
+ * Clear the cache directory.
+ *
+ * @throws IOException if an I/O error occurs
+ */
+ public void clear() throws IOException {
+ if (cacheDir == null) {
+ return;
+ }
+
+ deleteDirectory(cacheDir);
+ cacheDir.mkdirs();
+ logger.log(Level.INFO, sm.getString("cache.cleared"));
+ }
+
+ /**
+ * Recursively delete a directory.
+ *
+ * @param dir the directory to delete
+ * @throws IOException if an I/O error occurs
+ */
+ private void deleteDirectory(File dir) throws IOException {
+ if (dir.isDirectory()) {
+ File[] files = dir.listFiles();
+ if (files != null) {
+ for (File file : files) {
+ deleteDirectory(file);
+ }
+ }
+ }
+ if (!Files.deleteIfExists(dir.toPath()) && dir.exists()) {
+ throw new IOException(sm.getString("cache.deleteFailed",
dir.getAbsolutePath()));
+ }
+ }
+
+ /**
+ * Update the access time for a cache entry.
+ *
+ * @param hash the hash of the cache entry
+ */
+ private void updateAccessTime(String hash) {
+ cacheMetadata.put(hash, LocalDate.now());
+ }
+
+ /**
+ * Save cache metadata to disk.
+ * Format: hash|YYYY-MM-DD
+ *
+ * @throws IOException if an I/O error occurs
+ */
+ private void saveMetadata() throws IOException {
+ if (cacheDir == null) {
+ return;
+ }
+
+ try (BufferedWriter writer = new BufferedWriter(new
FileWriter(metadataFile))) {
+ writer.write("# Migration cache metadata -
hash|last_access_date\n");
+ for (Map.Entry<String, LocalDate> entry :
cacheMetadata.entrySet()) {
+ writer.write(entry.getKey());
+ writer.write("|");
+ writer.write(entry.getValue().format(DATE_FORMATTER));
+ writer.write("\n");
+ }
+ }
+
+ logger.log(Level.FINE, sm.getString("cache.metadata.saved",
cacheMetadata.size()));
+ }
+
+ /**
+ * Prune cache entries that haven't been accessed within the retention
period.
+ * This should be called after migration completes.
+ *
+ * @throws IOException if an I/O error occurs
+ */
+ public void pruneCache() throws IOException {
+ if (cacheDir == null) {
+ return;
+ }
+
+ LocalDate cutoffDate = LocalDate.now().minusDays(retentionDays);
+ int prunedCount = 0;
+ long prunedSize = 0;
+
+ Set<String> toRemove = new HashSet<>();
+
+ for (Map.Entry<String, LocalDate> entry : cacheMetadata.entrySet()) {
+ String hash = entry.getKey();
+ LocalDate lastAccessed = entry.getValue();
+
+ if (lastAccessed.isBefore(cutoffDate)) {
+ File cachedFile = getCacheFile(hash);
+ if (cachedFile.exists()) {
+ long fileSize = cachedFile.length();
+ if (cachedFile.delete()) {
+ prunedSize += fileSize;
+ prunedCount++;
+ toRemove.add(hash);
+ logger.log(Level.FINE,
sm.getString("cache.pruned.entry", hash, lastAccessed));
+ } else {
+ logger.log(Level.WARNING,
sm.getString("cache.pruned.failed", hash));
+ }
+ } else {
+ // File doesn't exist, remove from metadata anyway
+ toRemove.add(hash);
+ }
+ }
+ }
+
+ // Remove pruned entries from metadata
+ for (String hash : toRemove) {
+ cacheMetadata.remove(hash);
+ }
+
+ // Save updated metadata
+ saveMetadata();
+
+ if (prunedCount > 0) {
+ logger.log(Level.INFO, sm.getString("cache.pruned.summary",
prunedCount, prunedSize / 1024 / 1024, retentionDays));
+ } else {
+ logger.log(Level.FINE, sm.getString("cache.pruned.none",
retentionDays));
+ }
+ }
+
+ /**
+ * Finalize cache operations - save metadata and perform cleanup.
+ * Should be called after migration completes.
+ *
+ * @throws IOException if an I/O error occurs
+ */
+ public void finalizeCacheOperations() throws IOException {
+ if (cacheDir == null) {
+ return;
+ }
+
+ // Save updated metadata
+ saveMetadata();
+
+ // Prune expired entries
+ pruneCache();
+ }
+
+ /**
+ * Get cache statistics.
+ *
+ * @return a string describing cache size and entry count
+ */
+ public String getStats() {
+ if (cacheDir == null) {
+ return sm.getString("cache.disabled");
+ }
+
+ long totalSize = 0;
+ int entryCount = 0;
+
+ File[] subdirs = cacheDir.listFiles();
+ if (subdirs != null) {
+ for (File subdir : subdirs) {
+ if (subdir.isDirectory()) {
+ File[] files = subdir.listFiles();
+ if (files != null) {
+ for (File file : files) {
+ if (file.isFile()) {
+ totalSize += file.length();
+ entryCount++;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return sm.getString("cache.stats", entryCount, totalSize / 1024 /
1024);
+ }
+}
diff --git
a/src/main/resources/org/apache/tomcat/jakartaee/LocalStrings.properties
b/src/main/resources/org/apache/tomcat/jakartaee/LocalStrings.properties
index 89c373b..89c0395 100644
--- a/src/main/resources/org/apache/tomcat/jakartaee/LocalStrings.properties
+++ b/src/main/resources/org/apache/tomcat/jakartaee/LocalStrings.properties
@@ -55,7 +55,17 @@ where options includes:\n\
\ -matchExcludesAgainstPathName\n\
\ By default, exclusions are matched against file name. If
this\n\
\ option is enabled, exclusions will be matched against the
full\n\
-\ path.
+\ path.\n\
+\ -cache\n\
+\ Enable caching of converted archives. This avoids
re-processing\n\
+\ unchanged bundled libraries. Cache is stored in
~/.migration-cache\n\
+\ by default, or use -cacheLocation to specify a custom
directory.\n\
+\ -cacheLocation=<directory path>\n\
+\ Specify a custom directory for caching converted archives.\n\
+\ Implies -cache.\n\
+\ -cacheRetention=<days>\n\
+\ Number of days to retain cached files (default: 30, minimum:
1).\n\
+\ Cache entries not accessed within this period will be removed.
migration.warnSignatureRemoval=Removed cryptographic signature from JAR file
@@ -68,4 +78,25 @@ manifestConverter.converted=Migrated manifest file [{0}]
manifestConverter.updated=Updated manifest file [{0}]
manifestConverter.updatedVersion=Updated manifest version to [{0}]
manifestConverter.removeSignature=Remove cryptographic signature for [{0}]
-manifestConverter.noConversion=No manifest conversion necessary for [{0}]
\ No newline at end of file
+manifestConverter.noConversion=No manifest conversion necessary for [{0}]
+
+cache.cannotCreate=Cannot create cache directory [{0}]
+cache.notDirectory=[{0}] is not a directory
+cache.enabled=Migration cache enabled at [{0}] with {1} day retention period
+cache.disabled=Cache is disabled
+cache.hit=Cache hit for archive [{0}] (hash: {1})
+cache.miss=Cache miss for archive [{0}] (hash: {1})
+cache.store=Stored converted archive in cache (hash: {0}, size: {1} bytes)
+cache.hashError=Error computing hash for cache
+cache.cleared=Cache cleared successfully
+cache.stats=Cache contains {0} entries, total size: {1} MB
+cache.metadata.notFound=Cache metadata file not found, initializing all cached
files with current date
+cache.metadata.loaded=Loaded {0} entries from cache metadata
+cache.metadata.saved=Saved {0} entries to cache metadata
+cache.metadata.loadError=Error loading cache metadata, assuming all cached
files accessed today
+cache.metadata.invalidLine=Invalid line in cache metadata: {0}
+cache.metadata.invalidDate=Invalid date in cache metadata: {0}
+cache.pruned.entry=Pruned cache entry {0} (last accessed: {1})
+cache.pruned.failed=Failed to delete cache entry {0}
+cache.pruned.summary=Pruned {0} cache entries totaling {1} MB (retention
period: {2} days)
+cache.pruned.none=No cache entries to prune (retention period: {0} days)
\ No newline at end of file
diff --git a/src/test/java/org/apache/tomcat/jakartaee/MigrationCacheTest.java
b/src/test/java/org/apache/tomcat/jakartaee/MigrationCacheTest.java
new file mode 100644
index 0000000..331edb8
--- /dev/null
+++ b/src/test/java/org/apache/tomcat/jakartaee/MigrationCacheTest.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tomcat.jakartaee;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class MigrationCacheTest {
+
+ private File tempCacheDir;
+
+ @Before
+ public void setUp() throws Exception {
+ // Create a temporary cache directory for each test
+ tempCacheDir =
Files.createTempDirectory("migration-cache-test").toFile();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ // Clean up the temporary cache directory
+ if (tempCacheDir != null && tempCacheDir.exists()) {
+ FileUtils.deleteDirectory(tempCacheDir);
+ }
+ }
+
+ @Test
+ public void testCacheEnabledWithValidDirectory() throws Exception {
+ new MigrationCache(tempCacheDir, 30);
+ assertTrue("Cache directory should exist", tempCacheDir.exists());
+ }
+
+ @Test
+ public void testCacheCreatesDirectory() throws Exception {
+ File newCacheDir = new File(tempCacheDir, "new-cache");
+ assertFalse("Cache directory should not exist yet",
newCacheDir.exists());
+
+ new MigrationCache(newCacheDir, 30);
+ assertTrue("Cache directory should be created", newCacheDir.exists());
+ }
+
+ @Test
+ public void testCacheMiss() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ byte[] sourceData = "test source
content".getBytes(StandardCharsets.UTF_8);
+
+ // Get cache entry - should not exist
+ CacheEntry entry = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertFalse("Cache entry should not exist", entry.exists());
+ assertNotNull("Hash should be computed", entry.getHash());
+ }
+
+ @Test
+ public void testCacheHit() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ byte[] sourceData = "test source
content".getBytes(StandardCharsets.UTF_8);
+ byte[] convertedData = "converted
content".getBytes(StandardCharsets.UTF_8);
+
+ // Store in cache
+ CacheEntry entry1 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertFalse("Entry should not exist initially", entry1.exists());
+
+ try (OutputStream os = entry1.beginStore()) {
+ os.write(convertedData);
+ }
+ entry1.commitStore();
+
+ // Now check for cache hit
+ CacheEntry entry2 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertTrue("Entry should exist now", entry2.exists());
+
+ ByteArrayOutputStream destOutput = new ByteArrayOutputStream();
+ entry2.copyToDestination(destOutput);
+ assertArrayEquals("Cached content should match",
+ convertedData, destOutput.toByteArray());
+ }
+
+ @Test
+ public void testCacheStoresAndRetrieves() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ byte[] sourceData = "original jar
content".getBytes(StandardCharsets.UTF_8);
+ byte[] convertedData = "migrated jar
content".getBytes(StandardCharsets.UTF_8);
+
+ // Store the conversion result
+ CacheEntry entry1 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ try (OutputStream os = entry1.beginStore()) {
+ os.write(convertedData);
+ }
+ entry1.commitStore();
+
+ // Verify it was stored by trying to retrieve it
+ CacheEntry entry2 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertTrue("Should be cached", entry2.exists());
+
+ ByteArrayOutputStream destOutput = new ByteArrayOutputStream();
+ entry2.copyToDestination(destOutput);
+ assertArrayEquals("Retrieved content should match stored content",
+ convertedData, destOutput.toByteArray());
+ }
+
+ @Test
+ public void testCacheDifferentContent() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ byte[] sourceData1 = "content 1".getBytes(StandardCharsets.UTF_8);
+ byte[] convertedData1 = "converted 1".getBytes(StandardCharsets.UTF_8);
+ byte[] sourceData2 = "content 2".getBytes(StandardCharsets.UTF_8);
+
+ // Store first conversion
+ CacheEntry entry1 = cache.getCacheEntry(sourceData1,
EESpecProfiles.TOMCAT);
+ try (OutputStream os = entry1.beginStore()) {
+ os.write(convertedData1);
+ }
+ entry1.commitStore();
+
+ // Check with different source content
+ CacheEntry entry2 = cache.getCacheEntry(sourceData2,
EESpecProfiles.TOMCAT);
+ assertFalse("Should be cache miss for different content",
entry2.exists());
+ }
+
+ @Test
+ public void testCacheClear() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ byte[] sourceData = "test content".getBytes(StandardCharsets.UTF_8);
+ byte[] convertedData = "converted
content".getBytes(StandardCharsets.UTF_8);
+
+ // Store in cache
+ CacheEntry entry1 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ try (OutputStream os = entry1.beginStore()) {
+ os.write(convertedData);
+ }
+ entry1.commitStore();
+
+ // Verify it's cached
+ CacheEntry entry2 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertTrue("Should be cache hit before clear", entry2.exists());
+
+ // Clear the cache
+ cache.clear();
+
+ // Verify it's no longer cached
+ CacheEntry entry3 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertFalse("Should be cache miss after clear", entry3.exists());
+ }
+
+ @Test
+ public void testCacheStats() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ String stats = cache.getStats();
+ assertNotNull("Stats should not be null", stats);
+ assertTrue("Stats should contain entry count", stats.contains("0"));
+ }
+
+ @Test
+ public void testCacheStatsDisabled() throws Exception {
+ MigrationCache cache = new MigrationCache(null, 30);
+
+ String stats = cache.getStats();
+ assertNotNull("Stats should not be null", stats);
+ assertTrue("Stats should indicate cache is disabled",
stats.toLowerCase().contains("disabled"));
+ }
+
+ @Test
+ public void testCacheWithLargeContent() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ // Create large content (1MB)
+ byte[] sourceData = new byte[1024 * 1024];
+ for (int i = 0; i < sourceData.length; i++) {
+ sourceData[i] = (byte) (i % 256);
+ }
+ byte[] convertedData = new byte[1024 * 1024];
+ for (int i = 0; i < convertedData.length; i++) {
+ convertedData[i] = (byte) ((i + 100) % 256);
+ }
+
+ // Store and retrieve
+ CacheEntry entry1 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ try (OutputStream os = entry1.beginStore()) {
+ os.write(convertedData);
+ }
+ entry1.commitStore();
+
+ CacheEntry entry2 = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertTrue("Should be cache hit for large content", entry2.exists());
+
+ ByteArrayOutputStream destOutput = new ByteArrayOutputStream();
+ entry2.copyToDestination(destOutput);
+ assertArrayEquals("Large content should be retrieved correctly",
+ convertedData, destOutput.toByteArray());
+ }
+
+ @Test
+ public void testCacheWithMultipleEntries() throws Exception {
+ MigrationCache cache = new MigrationCache(tempCacheDir, 30);
+
+ // Store multiple different entries
+ for (int i = 0; i < 5; i++) {
+ byte[] sourceData = ("source " +
i).getBytes(StandardCharsets.UTF_8);
+ byte[] convertedData = ("converted " +
i).getBytes(StandardCharsets.UTF_8);
+
+ CacheEntry entry = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ try (OutputStream os = entry.beginStore()) {
+ os.write(convertedData);
+ }
+ entry.commitStore();
+ }
+
+ // Verify all can be retrieved
+ for (int i = 0; i < 5; i++) {
+ byte[] sourceData = ("source " +
i).getBytes(StandardCharsets.UTF_8);
+ byte[] expectedConverted = ("converted " +
i).getBytes(StandardCharsets.UTF_8);
+
+ CacheEntry entry = cache.getCacheEntry(sourceData,
EESpecProfiles.TOMCAT);
+ assertTrue("Should be cache hit for entry " + i, entry.exists());
+
+ ByteArrayOutputStream destOutput = new ByteArrayOutputStream();
+ entry.copyToDestination(destOutput);
+ assertArrayEquals("Content should match for entry " + i,
+ expectedConverted, destOutput.toByteArray());
+ }
+ }
+
+ @Test
+ public void testCacheDisabledNoOperations() throws Exception {
+ MigrationCache cache = new MigrationCache(null, 30);
+
+ byte[] sourceData = "test content".getBytes(StandardCharsets.UTF_8);
+
+ // getCacheEntry should throw exception when cache is disabled
+ try {
+ cache.getCacheEntry(sourceData, EESpecProfiles.TOMCAT);
+ fail("Should throw exception when cache is disabled");
+ } catch (IllegalStateException e) {
+ // Expected
+ assertTrue("Error message should mention cache not enabled",
+ e.getMessage().contains("not enabled"));
+ }
+ }
+}
diff --git a/src/test/java/org/apache/tomcat/jakartaee/MigrationTest.java
b/src/test/java/org/apache/tomcat/jakartaee/MigrationTest.java
index 5332978..ac5f319 100644
--- a/src/test/java/org/apache/tomcat/jakartaee/MigrationTest.java
+++ b/src/test/java/org/apache/tomcat/jakartaee/MigrationTest.java
@@ -289,4 +289,155 @@ public class MigrationTest {
assertTrue("Unable to delete " + jarFileTmp.getAbsolutePath(),
jarFileTmp.delete());
}
}
+
+ @Test
+ public void testMigrateJarWithCache() throws Exception {
+ File jarFile = new File("target/test-classes/hellocgi.jar");
+ File jarFileTarget = new
File("target/test-classes/hellocgi-cached.jar");
+ File cacheDir = new File("target/test-classes/cache-test");
+
+ try {
+ // Clean up cache directory
+ if (cacheDir.exists()) {
+ FileUtils.deleteDirectory(cacheDir);
+ }
+
+ // First migration - cache miss
+ Migration migration1 = new Migration();
+ migration1.setSource(jarFile);
+ migration1.setDestination(jarFileTarget);
+ migration1.setCache(new MigrationCache(cacheDir, 30));
+ long startTime1 = System.currentTimeMillis();
+ migration1.execute();
+ long duration1 = System.currentTimeMillis() - startTime1;
+
+ assertTrue("Target JAR should exist after first migration",
jarFileTarget.exists());
+ assertTrue("Cache directory should be created", cacheDir.exists());
+
+ // Verify the migrated JAR works
+ File cgiapiFile = new File("target/test-classes/cgi-api.jar");
+ URLClassLoader classloader1 = new URLClassLoader(
+ new URL[]{jarFileTarget.toURI().toURL(),
cgiapiFile.toURI().toURL()},
+ ClassLoader.getSystemClassLoader().getParent());
+ Class<?> cls1 =
Class.forName("org.apache.tomcat.jakartaee.HelloCGI", true, classloader1);
+ assertEquals("jakarta.servlet.CommonGatewayInterface",
cls1.getSuperclass().getName());
+
+ // Delete target and migrate again - cache hit
+ jarFileTarget.delete();
+ assertFalse("Target should be deleted", jarFileTarget.exists());
+
+ Migration migration2 = new Migration();
+ migration2.setSource(jarFile);
+ migration2.setDestination(jarFileTarget);
+ migration2.setCache(new MigrationCache(cacheDir, 30));
+ long startTime2 = System.currentTimeMillis();
+ migration2.execute();
+ long duration2 = System.currentTimeMillis() - startTime2;
+
+ assertTrue("Target JAR should exist after second migration",
jarFileTarget.exists());
+
+ // Verify the cached JAR works
+ URLClassLoader classloader2 = new URLClassLoader(
+ new URL[]{jarFileTarget.toURI().toURL(),
cgiapiFile.toURI().toURL()},
+ ClassLoader.getSystemClassLoader().getParent());
+ Class<?> cls2 =
Class.forName("org.apache.tomcat.jakartaee.HelloCGI", true, classloader2);
+ assertEquals("jakarta.servlet.CommonGatewayInterface",
cls2.getSuperclass().getName());
+
+ // Note: We don't assert that duration2 < duration1 because the
times are too short
+ // and can vary. The important thing is both migrations work
correctly.
+ } finally {
+ // Clean up
+ if (cacheDir.exists()) {
+ FileUtils.deleteDirectory(cacheDir);
+ }
+ }
+ }
+
+ @Test
+ public void testMigrateJarWithCacheDisabled() throws Exception {
+ File jarFile = new File("target/test-classes/hellocgi.jar");
+ File jarFileTarget = new
File("target/test-classes/hellocgi-nocache.jar");
+
+ Migration migration = new Migration();
+ migration.setSource(jarFile);
+ migration.setDestination(jarFileTarget);
+ // Don't set cache - should work without caching
+ migration.execute();
+
+ assertTrue("Target JAR should exist", jarFileTarget.exists());
+
+ File cgiapiFile = new File("target/test-classes/cgi-api.jar");
+ URLClassLoader classloader = new URLClassLoader(
+ new URL[]{jarFileTarget.toURI().toURL(),
cgiapiFile.toURI().toURL()},
+ ClassLoader.getSystemClassLoader().getParent());
+ Class<?> cls = Class.forName("org.apache.tomcat.jakartaee.HelloCGI",
true, classloader);
+ assertEquals("jakarta.servlet.CommonGatewayInterface",
cls.getSuperclass().getName());
+ }
+
+ @Test
+ public void testMigrateCLIWithCacheOption() throws Exception {
+ File sourceFile = new File("target/test-classes/hellocgi.jar");
+ File targetFile = new
File("target/test-classes/hellocgi-cli-cached.jar");
+ File cacheDir = new File("target/test-classes/cache-cli-test");
+
+ try {
+ // Clean up
+ if (cacheDir.exists()) {
+ FileUtils.deleteDirectory(cacheDir);
+ }
+ if (targetFile.exists()) {
+ targetFile.delete();
+ }
+
+ // Run with custom cache
+ MigrationCLI.main(new String[] {
+ "-cache",
+ "-cacheLocation=" + cacheDir.getAbsolutePath(),
+ sourceFile.getAbsolutePath(),
+ targetFile.getAbsolutePath()
+ });
+
+ assertTrue("Target file should exist", targetFile.exists());
+ assertTrue("Cache directory should be created", cacheDir.exists());
+
+ // Verify the migrated JAR works
+ File cgiapiFile = new File("target/test-classes/cgi-api.jar");
+ URLClassLoader classloader = new URLClassLoader(
+ new URL[]{targetFile.toURI().toURL(),
cgiapiFile.toURI().toURL()},
+ ClassLoader.getSystemClassLoader().getParent());
+ Class<?> cls =
Class.forName("org.apache.tomcat.jakartaee.HelloCGI", true, classloader);
+ assertEquals("jakarta.servlet.CommonGatewayInterface",
cls.getSuperclass().getName());
+ } finally {
+ // Clean up
+ if (cacheDir.exists()) {
+ FileUtils.deleteDirectory(cacheDir);
+ }
+ }
+ }
+
+ @Test
+ public void testMigrateCLIWithNoCacheOption() throws Exception {
+ File sourceFile = new File("target/test-classes/hellocgi.jar");
+ File targetFile = new
File("target/test-classes/hellocgi-cli-nocache.jar");
+
+ if (targetFile.exists()) {
+ targetFile.delete();
+ }
+
+ // Run without cache (no -cache option)
+ MigrationCLI.main(new String[] {
+ sourceFile.getAbsolutePath(),
+ targetFile.getAbsolutePath()
+ });
+
+ assertTrue("Target file should exist", targetFile.exists());
+
+ // Verify the migrated JAR works
+ File cgiapiFile = new File("target/test-classes/cgi-api.jar");
+ URLClassLoader classloader = new URLClassLoader(
+ new URL[]{targetFile.toURI().toURL(),
cgiapiFile.toURI().toURL()},
+ ClassLoader.getSystemClassLoader().getParent());
+ Class<?> cls = Class.forName("org.apache.tomcat.jakartaee.HelloCGI",
true, classloader);
+ assertEquals("jakarta.servlet.CommonGatewayInterface",
cls.getSuperclass().getName());
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]