This is an automated email from the ASF dual-hosted git repository.
tilman pushed a commit to branch branch_3x
in repository https://gitbox.apache.org/repos/asf/tika.git
The following commit(s) were added to refs/heads/branch_3x by this push:
new 00064600d TIKA-4440: replace deprecated, use for loops
00064600d is described below
commit 00064600d969196521327e8f85cf747df579cfc7
Author: Tilman Hausherr <[email protected]>
AuthorDate: Wed Jul 2 12:45:04 2025 +0200
TIKA-4440: replace deprecated, use for loops
---
.../tika/eval/core/tokens/AnalyzerDeserializer.java | 21 +++++----------------
1 file changed, 5 insertions(+), 16 deletions(-)
diff --git
a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
index a20eafbbb..e0a8487f6 100644
---
a/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
+++
b/tika-eval/tika-eval-core/src/main/java/org/apache/tika/eval/core/tokens/AnalyzerDeserializer.java
@@ -21,15 +21,11 @@ import java.io.IOException;
import java.io.Reader;
import java.util.Collections;
import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.TokenFilterFactory;
import org.apache.lucene.analysis.custom.CustomAnalyzer;
import org.apache.lucene.util.ClasspathResourceLoader;
@@ -53,9 +49,7 @@ class AnalyzerDeserializer {
throw new IllegalArgumentException(
"root object must be object with an 'analyzers' element");
}
- for (Iterator<Map.Entry<String, JsonNode>> it =
root.get(ANALYZERS).fields();
- it.hasNext(); ) {
- Map.Entry<String, JsonNode> e = it.next();
+ for (Map.Entry<String, JsonNode> e : root.get(ANALYZERS).properties())
{
String analyzerName = e.getKey();
Analyzer analyzer = buildAnalyzer(analyzerName, e.getValue(),
maxTokens);
analyzers.put(analyzerName, analyzer);
@@ -72,8 +66,7 @@ class AnalyzerDeserializer {
CustomAnalyzer.Builder builder =
CustomAnalyzer.builder(new
ClasspathResourceLoader(AnalyzerDeserializer.class));
- for (Iterator<Map.Entry<String, JsonNode>> it = node.fields();
it.hasNext(); ) {
- Map.Entry<String, JsonNode> e = it.next();
+ for (Map.Entry<String, JsonNode> e : node.properties()) {
String k = e.getKey();
if (k.equals(CHAR_FILTERS)) {
buildCharFilters(e.getValue(), analyzerName, builder);
@@ -123,8 +116,7 @@ class AnalyzerDeserializer {
analyzerName);
}
- for (Iterator<JsonNode> it = el.elements(); it.hasNext(); ) {
- JsonNode filterMap = it.next();
+ for (JsonNode filterMap : el) {
if (!filterMap.isObject()) {
throw new IllegalArgumentException(
"Expecting a map with \"factory\" string and
\"params\" map in char filter factory;" +
@@ -157,9 +149,7 @@ class AnalyzerDeserializer {
analyzerName);
}
- List<TokenFilterFactory> ret = new LinkedList<>();
- for (Iterator<JsonNode> it = el.elements(); it.hasNext(); ) {
- JsonNode filterMap = it.next();
+ for (JsonNode filterMap : el) {
if (!filterMap.isObject()) {
throw new IllegalArgumentException(
"Expecting a map with \"factory\" string and
\"params\" map in token filter factory;" +
@@ -194,8 +184,7 @@ class AnalyzerDeserializer {
throw new IllegalArgumentException("Expecting map, not: " +
paramsEl.toString());
}
Map<String, String> params = new HashMap<>();
- for (Iterator<Map.Entry<String, JsonNode>> it = paramsEl.fields();
it.hasNext(); ) {
- Map.Entry<String, JsonNode> e = it.next();
+ for (Map.Entry<String, JsonNode> e : paramsEl.properties()) {
JsonNode value = e.getValue();
if (value.isObject() || value.isArray() || value.isNull()) {
throw new IllegalArgumentException(