anchela commented on a change in pull request #99:
URL: 
https://github.com/apache/sling-org-apache-sling-feature-cpconverter/pull/99#discussion_r699130698



##########
File path: 
src/main/java/org/apache/sling/feature/cpconverter/vltpkg/WorkspaceFilterBuilder.java
##########
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sling.feature.cpconverter.vltpkg;
+
+import org.apache.jackrabbit.util.Text;
+import org.apache.jackrabbit.vault.fs.api.FilterSet;
+import org.apache.jackrabbit.vault.fs.api.PathFilter;
+import org.apache.jackrabbit.vault.fs.api.PathFilterSet;
+import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.config.ConfigurationException;
+import org.apache.jackrabbit.vault.fs.config.DefaultWorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.filter.DefaultPathFilter;
+import org.jetbrains.annotations.NotNull;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Utility class to build a new {@link WorkspaceFilter} from a given base 
filter and a new set of paths corresponding 
+ * to the content of the new converted content packages, which no longer 
contains entries that were moved to repo-init.
+ */
+class WorkspaceFilterBuilder {
+    
+    private final WorkspaceFilter baseFilter;
+    private final Set<String> filteredPaths;
+    private final Set<String> cpPaths;
+    private final Set<String> extractedPaths;
+
+    /**
+     * Create a new {@link WorkspaceFilterBuilder}
+     * 
+     * @param baseFilter The base {@link WorkspaceFilter} as computed from the 
original content package.
+     * @param filteredRepositoryPaths A set of repository paths that got moved 
from the original content package to repo 
+     *                                init and which there should no longer be 
referenced in the new {@link WorkspaceFilter}.
+     * @param convertedRepositoryPaths A set of repository paths representing 
entries in the converted content packages.
+     * @param extractedRepositoryPaths A set of repository paths extracted 
from those {@link org.apache.jackrabbit.vault.util.Constants#DOT_CONTENT_XML 
.content.xml} 
+     *                                 files of the original content package 
that contain paths listed in the original {@link WorkspaceFilter}. 
+     */
+    WorkspaceFilterBuilder(@NotNull WorkspaceFilter baseFilter, 
+                           @NotNull Set<String> filteredRepositoryPaths,
+                           @NotNull Set<String> convertedRepositoryPaths,
+                           @NotNull Set<String> extractedRepositoryPaths) {
+        this.baseFilter = baseFilter;
+        this.filteredPaths = filteredRepositoryPaths;
+        this.cpPaths = convertedRepositoryPaths;
+        this.extractedPaths = extractedRepositoryPaths;
+        
+    }
+
+    /**
+     * Build a new {@link WorkspaceFilter}
+     * 
+     * @return a new {@link WorkspaceFilter}
+     * @throws IOException If an error occurs.
+     */
+    @NotNull WorkspaceFilter build() throws IOException {
+        try {
+            DefaultWorkspaceFilter dwf = new DefaultWorkspaceFilter();
+            Map<String, PathFilterSet> propFilters = 
extractPropertyFilters(baseFilter);
+            for (PathFilterSet pfs : baseFilter.getFilterSets()) {
+                // add the filter to the new workspace filter if it either 
covers content from the converted package
+                // or doesn't match any of the content that has been removed 
to repo-init. the latter condition 
+                // make sure filter sets without any corresponding content 
(i.e. removal) are not dropped.
+                if (coversConvertedPath(pfs) || !coversFilteredPath(pfs)) {
+                    processPathFilterSet(dwf, pfs, propFilters);
+                }
+            }
+            return dwf;
+        } catch (ConfigurationException e) {
+            throw new IOException(e);
+        }
+    }
+
+    /**
+     * Extract all property-filters and remember them for later as 
DefaultWorkspaceFilter.addPropertyFilterSet
+     * is deprecated in favor of DefaultWorkspaceFilter.add(PathFilterSet 
nodeFilter, PathFilterSet propFilter).
+     * The map created then allows to process property-filters together with 
node filters.
+     *
+     * @param base A {@link WorkspaceFilter} from which to extract the 
property filters.
+     * @return A map of path (the root) to the corresponding property @{@link 
PathFilterSet}.
+     * @see WorkspaceFilter#getPropertyFilterSets() 
+     */
+    static @NotNull Map<String, PathFilterSet> extractPropertyFilters(@NotNull 
WorkspaceFilter base) {
+        Map<String, PathFilterSet> propFilters = new LinkedHashMap<>();
+        base.getPropertyFilterSets().forEach(pathFilterSet -> 
propFilters.put(pathFilterSet.getRoot(), pathFilterSet));
+        return propFilters;
+    }
+    
+    private boolean coversFilteredPath(@NotNull PathFilterSet pfs) {
+        return filteredPaths.stream().anyMatch(pfs::covers);
+    }
+
+    /**
+     * Test if the given {@link PathFilterSet} is still required in the 
converted content package.
+     * The following two conditions are taking into account:
+     * - the given filter-set covers any of the paths included in the 
converted content package
+     * - the given filter-set covers any of the extracted paths hidden in a 
.content.xml
+     * - it's root path is a sibling of a manually extracted paths hidden in a 
.content.xml that get 
+     *   installed despite not being covered by the filter (issue in older 
versions of Jackrabbit FileVault)).
+     * 
+     * @param pfs A {@link PathFilterSet} of the original base filter.
+     * @return {@code true} if the given {@code PathFilterSet} is still 
relevant for the new {@code WorkspaceFilter} given
+     * the new content of the converted package as reflected by the recorded 
paths.
+     */
+    private boolean coversConvertedPath(@NotNull PathFilterSet pfs) {
+        return cpPaths.stream().anyMatch(pfs::covers) ||
+                // test if a extracted path matches and workaround 
sibling-issue in older versions Jackrabbit FileVault 

Review comment:
       hi @kwin i asked you for the link in SLING-10760. i searched in 
filevault jira and didn't find which issue you were referring to that fixed the 
sibling problem in newer versions of filevault.

##########
File path: 
src/test/java/org/apache/sling/feature/cpconverter/vltpkg/WorkspaceFilterBuilderTest.java
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sling.feature.cpconverter.vltpkg;
+
+import org.apache.jackrabbit.vault.fs.api.ImportMode;
+import org.apache.jackrabbit.vault.fs.api.PathFilterSet;
+import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.filter.DefaultPathFilter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static junit.framework.TestCase.assertNotSame;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
+
+public class WorkspaceFilterBuilderTest {
+    
+    private static final String TEST_ROOT_PATH = "/some/test/path";
+    private static final String PATH_NOT_COVERED = 
"/path/not/covered/by/filter";
+    
+    private final WorkspaceFilter base = mock(WorkspaceFilter.class);
+
+    private final PathFilterSet pfs = new PathFilterSet(TEST_ROOT_PATH);
+    private final List<PathFilterSet> filterSets = new ArrayList<>();
+    
+    @Before
+    public void before() throws Exception {
+        pfs.setImportMode(ImportMode.UPDATE);
+        pfs.addExclude(new DefaultPathFilter("test/exclude"));
+        pfs.addInclude(new DefaultPathFilter("test/include"));

Review comment:
       this is unit-test for the builder and not an integration test. it's used 
to verify that the include/exclude entries are copied over not that they are 
sensible.

##########
File path: 
src/main/java/org/apache/sling/feature/cpconverter/shared/AbstractJcrNodeParser.java
##########
@@ -27,15 +27,14 @@
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
 
-import org.apache.sling.feature.cpconverter.ConverterException;
 import org.jetbrains.annotations.NotNull;
 import org.xml.sax.Attributes;
 import org.xml.sax.SAXException;
 import org.xml.sax.helpers.DefaultHandler;
 
 public abstract class AbstractJcrNodeParser<O> extends DefaultHandler {
 
-    private static final String JCR_ROOT = "jcr:root";
+    public static final String JCR_ROOT = "jcr:root";

Review comment:
       hi @kwin , which constant you are referring to? i checked 
org.apache.jackrabbit.vault.util.Constants.java and it only contains `String 
ROOT_DIR = "jcr_root"`. i would have used a "jcr:root" constant if i had found 
one :)

##########
File path: 
src/main/java/org/apache/sling/feature/cpconverter/vltpkg/WorkspaceFilterBuilder.java
##########
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sling.feature.cpconverter.vltpkg;
+
+import org.apache.jackrabbit.util.Text;
+import org.apache.jackrabbit.vault.fs.api.FilterSet;
+import org.apache.jackrabbit.vault.fs.api.PathFilter;
+import org.apache.jackrabbit.vault.fs.api.PathFilterSet;
+import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.config.ConfigurationException;
+import org.apache.jackrabbit.vault.fs.config.DefaultWorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.filter.DefaultPathFilter;
+import org.jetbrains.annotations.NotNull;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Utility class to build a new {@link WorkspaceFilter} from a given base 
filter and a new set of paths corresponding 
+ * to the content of the new converted content packages, which no longer 
contains entries that were moved to repo-init.
+ */
+class WorkspaceFilterBuilder {
+    
+    private final WorkspaceFilter baseFilter;
+    private final Set<String> filteredPaths;
+    private final Set<String> cpPaths;
+    private final Set<String> extractedPaths;
+
+    /**
+     * Create a new {@link WorkspaceFilterBuilder}
+     * 
+     * @param baseFilter The base {@link WorkspaceFilter} as computed from the 
original content package.
+     * @param filteredRepositoryPaths A set of repository paths that got moved 
from the original content package to repo 
+     *                                init and which there should no longer be 
referenced in the new {@link WorkspaceFilter}.
+     * @param convertedRepositoryPaths A set of repository paths representing 
entries in the converted content packages.
+     * @param extractedRepositoryPaths A set of repository paths extracted 
from those {@link org.apache.jackrabbit.vault.util.Constants#DOT_CONTENT_XML 
.content.xml} 
+     *                                 files of the original content package 
that contain paths listed in the original {@link WorkspaceFilter}. 
+     */
+    WorkspaceFilterBuilder(@NotNull WorkspaceFilter baseFilter, 
+                           @NotNull Set<String> filteredRepositoryPaths,
+                           @NotNull Set<String> convertedRepositoryPaths,
+                           @NotNull Set<String> extractedRepositoryPaths) {
+        this.baseFilter = baseFilter;
+        this.filteredPaths = filteredRepositoryPaths;
+        this.cpPaths = convertedRepositoryPaths;
+        this.extractedPaths = extractedRepositoryPaths;
+        
+    }
+
+    /**
+     * Build a new {@link WorkspaceFilter}
+     * 
+     * @return a new {@link WorkspaceFilter}
+     * @throws IOException If an error occurs.
+     */
+    @NotNull WorkspaceFilter build() throws IOException {
+        try {
+            DefaultWorkspaceFilter dwf = new DefaultWorkspaceFilter();
+            Map<String, PathFilterSet> propFilters = 
extractPropertyFilters(baseFilter);
+            for (PathFilterSet pfs : baseFilter.getFilterSets()) {
+                // add the filter to the new workspace filter if it either 
covers content from the converted package
+                // or doesn't match any of the content that has been removed 
to repo-init. the latter condition 
+                // make sure filter sets without any corresponding content 
(i.e. removal) are not dropped.
+                if (coversConvertedPath(pfs) || !coversFilteredPath(pfs)) {
+                    processPathFilterSet(dwf, pfs, propFilters);
+                }
+            }
+            return dwf;
+        } catch (ConfigurationException e) {
+            throw new IOException(e);
+        }
+    }
+
+    /**
+     * Extract all property-filters and remember them for later as 
DefaultWorkspaceFilter.addPropertyFilterSet
+     * is deprecated in favor of DefaultWorkspaceFilter.add(PathFilterSet 
nodeFilter, PathFilterSet propFilter).
+     * The map created then allows to process property-filters together with 
node filters.
+     *
+     * @param base A {@link WorkspaceFilter} from which to extract the 
property filters.
+     * @return A map of path (the root) to the corresponding property @{@link 
PathFilterSet}.
+     * @see WorkspaceFilter#getPropertyFilterSets() 
+     */
+    static @NotNull Map<String, PathFilterSet> extractPropertyFilters(@NotNull 
WorkspaceFilter base) {
+        Map<String, PathFilterSet> propFilters = new LinkedHashMap<>();
+        base.getPropertyFilterSets().forEach(pathFilterSet -> 
propFilters.put(pathFilterSet.getRoot(), pathFilterSet));
+        return propFilters;
+    }
+    
+    private boolean coversFilteredPath(@NotNull PathFilterSet pfs) {
+        return filteredPaths.stream().anyMatch(pfs::covers);
+    }
+
+    /**
+     * Test if the given {@link PathFilterSet} is still required in the 
converted content package.
+     * The following two conditions are taking into account:
+     * - the given filter-set covers any of the paths included in the 
converted content package
+     * - the given filter-set covers any of the extracted paths hidden in a 
.content.xml
+     * - it's root path is a sibling of a manually extracted paths hidden in a 
.content.xml that get 
+     *   installed despite not being covered by the filter (issue in older 
versions of Jackrabbit FileVault)).
+     * 
+     * @param pfs A {@link PathFilterSet} of the original base filter.
+     * @return {@code true} if the given {@code PathFilterSet} is still 
relevant for the new {@code WorkspaceFilter} given
+     * the new content of the converted package as reflected by the recorded 
paths.
+     */
+    private boolean coversConvertedPath(@NotNull PathFilterSet pfs) {
+        return cpPaths.stream().anyMatch(pfs::covers) ||
+                // test if a extracted path matches and workaround 
sibling-issue in older versions Jackrabbit FileVault 

Review comment:
       hi @kwin in SLING-10760 you stated:
   ```
   With which version of FileVault have you tested importing that package (AEM 
uses a pretty outdated one)? Everything not covered by the filter.xml should 
not be installed! The only edge case is nodes represented by empty elements 
(http://jackrabbit.apache.org/filevault/docview.html#Empty_Elements) in 
enhanced docview which will not be removed, but this is not really related to 
the filter.xml question.
   
   There is currently a bug related to authorization nodes (JCRVLT-522) which 
are applied independent if they are contained in the filter rules. Not sure we 
can ever fix this, as a lot of packages probably now rely on this bug!
   ```
   my reading was that importing non-covered nodes as i see it in the test is 
therefore an issue that has been fixed in Jackrabbit FileVault. I searched JIRA 
but was not able to find the corresponding ticket. Therefore I asked you for 
the corresponding link in 
https://issues.apache.org/jira/browse/SLING-10760?focusedCommentId=17405809&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#comment-17405809.

##########
File path: 
src/main/java/org/apache/sling/feature/cpconverter/vltpkg/WorkspaceFilterBuilder.java
##########
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sling.feature.cpconverter.vltpkg;
+
+import org.apache.jackrabbit.util.Text;
+import org.apache.jackrabbit.vault.fs.api.FilterSet;
+import org.apache.jackrabbit.vault.fs.api.PathFilter;
+import org.apache.jackrabbit.vault.fs.api.PathFilterSet;
+import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.config.ConfigurationException;
+import org.apache.jackrabbit.vault.fs.config.DefaultWorkspaceFilter;
+import org.apache.jackrabbit.vault.fs.filter.DefaultPathFilter;
+import org.jetbrains.annotations.NotNull;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Utility class to build a new {@link WorkspaceFilter} from a given base 
filter and a new set of paths corresponding 
+ * to the content of the new converted content packages, which no longer 
contains entries that were moved to repo-init.
+ */
+class WorkspaceFilterBuilder {
+    
+    private final WorkspaceFilter baseFilter;
+    private final Set<String> filteredPaths;
+    private final Set<String> cpPaths;
+    private final Set<String> extractedPaths;
+
+    /**
+     * Create a new {@link WorkspaceFilterBuilder}
+     * 
+     * @param baseFilter The base {@link WorkspaceFilter} as computed from the 
original content package.
+     * @param filteredRepositoryPaths A set of repository paths that got moved 
from the original content package to repo 
+     *                                init and which there should no longer be 
referenced in the new {@link WorkspaceFilter}.
+     * @param convertedRepositoryPaths A set of repository paths representing 
entries in the converted content packages.
+     * @param extractedRepositoryPaths A set of repository paths extracted 
from those {@link org.apache.jackrabbit.vault.util.Constants#DOT_CONTENT_XML 
.content.xml} 
+     *                                 files of the original content package 
that contain paths listed in the original {@link WorkspaceFilter}. 
+     */
+    WorkspaceFilterBuilder(@NotNull WorkspaceFilter baseFilter, 
+                           @NotNull Set<String> filteredRepositoryPaths,
+                           @NotNull Set<String> convertedRepositoryPaths,
+                           @NotNull Set<String> extractedRepositoryPaths) {
+        this.baseFilter = baseFilter;
+        this.filteredPaths = filteredRepositoryPaths;
+        this.cpPaths = convertedRepositoryPaths;
+        this.extractedPaths = extractedRepositoryPaths;
+        
+    }
+
+    /**
+     * Build a new {@link WorkspaceFilter}
+     * 
+     * @return a new {@link WorkspaceFilter}
+     * @throws IOException If an error occurs.
+     */
+    @NotNull WorkspaceFilter build() throws IOException {
+        try {
+            DefaultWorkspaceFilter dwf = new DefaultWorkspaceFilter();
+            Map<String, PathFilterSet> propFilters = 
extractPropertyFilters(baseFilter);
+            for (PathFilterSet pfs : baseFilter.getFilterSets()) {
+                // add the filter to the new workspace filter if it either 
covers content from the converted package
+                // or doesn't match any of the content that has been removed 
to repo-init. the latter condition 
+                // make sure filter sets without any corresponding content 
(i.e. removal) are not dropped.
+                if (coversConvertedPath(pfs) || !coversFilteredPath(pfs)) {
+                    processPathFilterSet(dwf, pfs, propFilters);
+                }
+            }
+            return dwf;
+        } catch (ConfigurationException e) {
+            throw new IOException(e);
+        }
+    }
+
+    /**
+     * Extract all property-filters and remember them for later as 
DefaultWorkspaceFilter.addPropertyFilterSet
+     * is deprecated in favor of DefaultWorkspaceFilter.add(PathFilterSet 
nodeFilter, PathFilterSet propFilter).
+     * The map created then allows to process property-filters together with 
node filters.
+     *
+     * @param base A {@link WorkspaceFilter} from which to extract the 
property filters.
+     * @return A map of path (the root) to the corresponding property @{@link 
PathFilterSet}.
+     * @see WorkspaceFilter#getPropertyFilterSets() 
+     */
+    static @NotNull Map<String, PathFilterSet> extractPropertyFilters(@NotNull 
WorkspaceFilter base) {
+        Map<String, PathFilterSet> propFilters = new LinkedHashMap<>();
+        base.getPropertyFilterSets().forEach(pathFilterSet -> 
propFilters.put(pathFilterSet.getRoot(), pathFilterSet));
+        return propFilters;
+    }
+    
+    private boolean coversFilteredPath(@NotNull PathFilterSet pfs) {
+        return filteredPaths.stream().anyMatch(pfs::covers);
+    }
+
+    /**
+     * Test if the given {@link PathFilterSet} is still required in the 
converted content package.
+     * The following two conditions are taking into account:
+     * - the given filter-set covers any of the paths included in the 
converted content package
+     * - the given filter-set covers any of the extracted paths hidden in a 
.content.xml
+     * - it's root path is a sibling of a manually extracted paths hidden in a 
.content.xml that get 
+     *   installed despite not being covered by the filter (issue in older 
versions of Jackrabbit FileVault)).
+     * 
+     * @param pfs A {@link PathFilterSet} of the original base filter.
+     * @return {@code true} if the given {@code PathFilterSet} is still 
relevant for the new {@code WorkspaceFilter} given
+     * the new content of the converted package as reflected by the recorded 
paths.
+     */
+    private boolean coversConvertedPath(@NotNull PathFilterSet pfs) {
+        return cpPaths.stream().anyMatch(pfs::covers) ||
+                // test if a extracted path matches and workaround 
sibling-issue in older versions Jackrabbit FileVault 

Review comment:
       @kwin feel free to use the packages as created for this ticket for tests 
in jackrabbit filevault. i will adjust the comment accordingly.

##########
File path: 
src/main/java/org/apache/sling/feature/cpconverter/handlers/DefaultEntryParser.java
##########
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sling.feature.cpconverter.handlers;
+
+import org.apache.jackrabbit.vault.util.PathUtil;
+import org.apache.sling.feature.cpconverter.shared.AbstractJcrNodeParser;
+import org.jetbrains.annotations.NotNull;
+import org.xml.sax.Attributes;
+
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.Set;
+
+/**
+ * Implementation of {@link AbstractJcrNodeParser} that builds and records 
paths of all elements (nodes) using the 
+ * specified initial repository path.
+ */
+public class DefaultEntryParser extends AbstractJcrNodeParser<Set<String>> {
+    
+    private final LinkedList<String> currentPath = new LinkedList<>();
+    private final Set<String> coveredNodePaths = new LinkedHashSet<>();
+
+    /**
+     * Create a new {@link DefaultEntryParser}.
+     * 
+     * @param repositoryPath The base repository path used to build absolute 
paths from the parsed elements.
+     */
+    public DefaultEntryParser(@NotNull String repositoryPath) {
+        currentPath.push(repositoryPath);
+    }
+
+    @Override
+    public void startElement(String uri, String localName, String qName, 
Attributes attributes) {
+        if (JCR_ROOT.equals(qName)) {

Review comment:
       the sling feature-cp-converter ignores namespace remapping all over the 
place. please file a separate ticket to have it fixed everywhere.

##########
File path: 
src/main/java/org/apache/sling/feature/cpconverter/handlers/DefaultEntryParser.java
##########
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sling.feature.cpconverter.handlers;
+
+import org.apache.jackrabbit.vault.util.PathUtil;
+import org.apache.sling.feature.cpconverter.shared.AbstractJcrNodeParser;
+import org.jetbrains.annotations.NotNull;
+import org.xml.sax.Attributes;
+
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.Set;
+
+/**
+ * Implementation of {@link AbstractJcrNodeParser} that builds and records 
paths of all elements (nodes) using the 
+ * specified initial repository path.
+ */
+public class DefaultEntryParser extends AbstractJcrNodeParser<Set<String>> {
+    
+    private final LinkedList<String> currentPath = new LinkedList<>();
+    private final Set<String> coveredNodePaths = new LinkedHashSet<>();
+
+    /**
+     * Create a new {@link DefaultEntryParser}.
+     * 
+     * @param repositoryPath The base repository path used to build absolute 
paths from the parsed elements.
+     */
+    public DefaultEntryParser(@NotNull String repositoryPath) {
+        currentPath.push(repositoryPath);
+    }
+
+    @Override
+    public void startElement(String uri, String localName, String qName, 
Attributes attributes) {
+        if (JCR_ROOT.equals(qName)) {

Review comment:
       @karlpauls , i created SLING-10768 to keep track of this separate issue.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to