http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java
new file mode 100644
index 0000000..ea7fa99
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/fs/KerberosHadoopFileSystemFactorySelfTest.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.hadoop.fs;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.ObjectInput;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutput;
+import java.io.ObjectOutputStream;
+import java.util.concurrent.Callable;
+
+import org.apache.ignite.testframework.GridTestUtils;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.junit.Assert;
+
+/**
+ * Tests KerberosHadoopFileSystemFactory.
+ */
+public class KerberosHadoopFileSystemFactorySelfTest extends 
GridCommonAbstractTest {
+    /**
+     * Test parameters validation.
+     *
+     * @throws Exception If failed.
+     */
+    public void testParameters() throws Exception {
+        checkParameters(null, null, -1);
+
+        checkParameters(null, null, 100);
+        checkParameters(null, "b", -1);
+        checkParameters("a", null, -1);
+
+        checkParameters(null, "b", 100);
+        checkParameters("a", null, 100);
+        checkParameters("a", "b", -1);
+    }
+
+    /**
+     * Check parameters.
+     *
+     * @param keyTab Key tab.
+     * @param keyTabPrincipal Key tab principal.
+     * @param reloginInterval Re-login interval.
+     */
+    @SuppressWarnings("ThrowableResultOfMethodCallIgnored")
+    private void checkParameters(String keyTab, String keyTabPrincipal, long 
reloginInterval) {
+        final KerberosHadoopFileSystemFactory fac = new 
KerberosHadoopFileSystemFactory();
+
+        fac.setKeyTab(keyTab);
+        fac.setKeyTabPrincipal(keyTabPrincipal);
+        fac.setReloginInterval(reloginInterval);
+
+        GridTestUtils.assertThrows(null, new Callable<Object>() {
+            @Override public Object call() throws Exception {
+                fac.start();
+
+                return null;
+            }
+        }, IllegalArgumentException.class, null);
+    }
+
+    /**
+     * Checks serializatuion and deserialization of the secure factory.
+     *
+     * @throws Exception If failed.
+     */
+    public void testSerialization() throws Exception {
+        KerberosHadoopFileSystemFactory fac = new 
KerberosHadoopFileSystemFactory();
+
+        checkSerialization(fac);
+
+        fac = new KerberosHadoopFileSystemFactory();
+
+        fac.setUri("igfs://igfs@localhost:10500/");
+        fac.setConfigPaths("/a/core-sute.xml", "/b/mapred-site.xml");
+        fac.setKeyTabPrincipal("foo");
+        fac.setKeyTab("/etc/krb5.keytab");
+        fac.setReloginInterval(30 * 60 * 1000L);
+
+        checkSerialization(fac);
+    }
+
+    /**
+     * Serializes the factory,
+     *
+     * @param fac The facory to check.
+     * @throws Exception If failed.
+     */
+    private void checkSerialization(KerberosHadoopFileSystemFactory fac) 
throws Exception {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+        ObjectOutput oo = new ObjectOutputStream(baos);
+
+        oo.writeObject(fac);
+
+        ObjectInput in = new ObjectInputStream(new 
ByteArrayInputStream(baos.toByteArray()));
+
+        KerberosHadoopFileSystemFactory fac2 = 
(KerberosHadoopFileSystemFactory)in.readObject();
+
+        assertEquals(fac.getUri(), fac2.getUri());
+        Assert.assertArrayEquals(fac.getConfigPaths(), fac2.getConfigPaths());
+        assertEquals(fac.getKeyTab(), fac2.getKeyTab());
+        assertEquals(fac.getKeyTabPrincipal(), fac2.getKeyTabPrincipal());
+        assertEquals(fac.getReloginInterval(), fac2.getReloginInterval());
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java
new file mode 100644
index 0000000..fd8fdef
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/BasicUserNameMapperSelfTest.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.hadoop.util;
+
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Test for basic user name mapper.
+ */
+public class BasicUserNameMapperSelfTest extends GridCommonAbstractTest {
+    /**
+     * Test null mappings.
+     *
+     * @throws Exception If failed.
+     */
+    public void testNullMappings() throws Exception {
+        checkNullOrEmptyMappings(null);
+    }
+
+    /**
+     * Test empty mappings.
+     *
+     * @throws Exception If failed.
+     */
+    public void testEmptyMappings() throws Exception {
+        checkNullOrEmptyMappings(new HashMap<String, String>());
+    }
+
+    /**
+     * Check null or empty mappings.
+     *
+     * @param map Mappings.
+     * @throws Exception If failed.
+     */
+    private void checkNullOrEmptyMappings(@Nullable Map<String, String> map) 
throws Exception {
+        BasicUserNameMapper mapper = create(map, false, null);
+
+        assertNull(mapper.map(null));
+        assertEquals("1", mapper.map("1"));
+        assertEquals("2", mapper.map("2"));
+
+        mapper = create(map, true, null);
+
+        assertNull(mapper.map(null));
+        assertNull(mapper.map("1"));
+        assertNull(mapper.map("2"));
+
+        mapper = create(map, false, "A");
+
+        assertNull(mapper.map(null));
+        assertEquals("1", mapper.map("1"));
+        assertEquals("2", mapper.map("2"));
+
+        mapper = create(map, true, "A");
+
+        assertEquals("A", mapper.map(null));
+        assertEquals("A", mapper.map("1"));
+        assertEquals("A", mapper.map("2"));
+    }
+
+    /**
+     * Test regular mappings.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMappings() throws Exception {
+        Map<String, String> map = new HashMap<>();
+
+        map.put("1", "101");
+
+        BasicUserNameMapper mapper = create(map, false, null);
+
+        assertNull(mapper.map(null));
+        assertEquals("101", mapper.map("1"));
+        assertEquals("2", mapper.map("2"));
+
+        mapper = create(map, true, null);
+
+        assertNull(mapper.map(null));
+        assertEquals("101", mapper.map("1"));
+        assertNull(mapper.map("2"));
+
+        mapper = create(map, false, "A");
+
+        assertNull(mapper.map(null));
+        assertEquals("101", mapper.map("1"));
+        assertEquals("2", mapper.map("2"));
+
+        mapper = create(map, true, "A");
+
+        assertEquals("A", mapper.map(null));
+        assertEquals("101", mapper.map("1"));
+        assertEquals("A", mapper.map("2"));
+    }
+
+    /**
+     * Create mapper.
+     *
+     * @param dictionary Dictionary.
+     * @param useDfltUsrName Whether to use default user name.
+     * @param dfltUsrName Default user name.
+     * @return Mapper.
+     */
+    private BasicUserNameMapper create(@Nullable Map<String, String> 
dictionary, boolean useDfltUsrName,
+        @Nullable String dfltUsrName) {
+        BasicUserNameMapper mapper = new BasicUserNameMapper();
+
+        mapper.setMappings(dictionary);
+        mapper.setUseDefaultUserName(useDfltUsrName);
+        mapper.setDefaultUserName(dfltUsrName);
+
+        return mapper;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java
new file mode 100644
index 0000000..bfac49c
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/ChainedUserNameMapperSelfTest.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.hadoop.util;
+
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.internal.processors.igfs.IgfsUtils;
+import org.apache.ignite.testframework.GridTestUtils;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+
+import java.util.Collections;
+import java.util.concurrent.Callable;
+
+/**
+ * Tests for chained user name mapper.
+ */
+public class ChainedUserNameMapperSelfTest extends GridCommonAbstractTest {
+    /** Test instance. */
+    private static final String INSTANCE = "test_instance";
+
+    /** Test realm. */
+    private static final String REALM = "test_realm";
+
+    /**
+     * Test case when mappers are null.
+     *
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("ThrowableResultOfMethodCallIgnored")
+    public void testNullMappers() throws Exception {
+        GridTestUtils.assertThrows(null, new Callable<Void>() {
+            @Override public Void call() throws Exception {
+                create((UserNameMapper[])null);
+
+                return null;
+            }
+        }, IgniteException.class, null);
+    }
+
+    /**
+     * Test case when one of mappers is null.
+     *
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("ThrowableResultOfMethodCallIgnored")
+    public void testNullMapperElement() throws Exception {
+        GridTestUtils.assertThrows(null, new Callable<Void>() {
+            @Override public Void call() throws Exception {
+                create(new BasicUserNameMapper(), null);
+
+                return null;
+            }
+        }, IgniteException.class, null);
+    }
+
+    /**
+     * Test actual chaining logic.
+     *
+     * @throws Exception If failed.
+     */
+    public void testChaining() throws Exception {
+        BasicUserNameMapper mapper1 = new BasicUserNameMapper();
+
+        mapper1.setMappings(Collections.singletonMap("1", "101"));
+
+        KerberosUserNameMapper mapper2 = new KerberosUserNameMapper();
+
+        mapper2.setInstance(INSTANCE);
+        mapper2.setRealm(REALM);
+
+        ChainedUserNameMapper mapper = create(mapper1, mapper2);
+
+        assertEquals("101" + "/" + INSTANCE + "@" + REALM, mapper.map("1"));
+        assertEquals("2" + "/" + INSTANCE + "@" + REALM, mapper.map("2"));
+        assertEquals(IgfsUtils.fixUserName(null) + "/" + INSTANCE + "@" + 
REALM, mapper.map(null));
+    }
+
+    /**
+     * Create chained mapper.
+     *
+     * @param mappers Child mappers.
+     * @return Chained mapper.
+     */
+    private ChainedUserNameMapper create(UserNameMapper... mappers) {
+        ChainedUserNameMapper mapper = new ChainedUserNameMapper();
+
+        mapper.setMappers(mappers);
+
+        mapper.start();
+
+        return mapper;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java
new file mode 100644
index 0000000..cc685bb
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/hadoop/util/KerberosUserNameMapperSelfTest.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.hadoop.util;
+
+import org.apache.ignite.internal.processors.igfs.IgfsUtils;
+import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * Tests for Kerberos name mapper.
+ */
+public class KerberosUserNameMapperSelfTest extends GridCommonAbstractTest {
+    /** Test instance. */
+    private static final String INSTANCE = "test_instance";
+
+    /** Test realm. */
+    private static final String REALM = "test_realm";
+
+    /**
+     * Test mapper without instance and realm components.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMapper() throws Exception {
+        KerberosUserNameMapper mapper = create(null, null);
+
+        assertEquals(IgfsUtils.fixUserName(null), mapper.map(null));
+        assertEquals("test", mapper.map("test"));
+    }
+
+    /**
+     * Test mapper with instance component.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMapperInstance() throws Exception {
+        KerberosUserNameMapper mapper = create(INSTANCE, null);
+
+        assertEquals(IgfsUtils.fixUserName(null) + "/" + INSTANCE, 
mapper.map(null));
+        assertEquals("test" + "/" + INSTANCE, mapper.map("test"));
+    }
+
+    /**
+     * Test mapper with realm.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMapperRealm() throws Exception {
+        KerberosUserNameMapper mapper = create(null, REALM);
+
+        assertEquals(IgfsUtils.fixUserName(null) + "@" + REALM, 
mapper.map(null));
+        assertEquals("test" + "@" + REALM, mapper.map("test"));
+    }
+
+    /**
+     * Test mapper with instance and realm components.
+     *
+     * @throws Exception If failed.
+     */
+    public void testMapperInstanceAndRealm() throws Exception {
+        KerberosUserNameMapper mapper = create(INSTANCE, REALM);
+
+        assertEquals(IgfsUtils.fixUserName(null) + "/" + INSTANCE + "@" + 
REALM, mapper.map(null));
+        assertEquals("test" + "/" + INSTANCE + "@" + REALM, 
mapper.map("test"));
+    }
+
+    /**
+     * Create mapper.
+     *
+     * @param instance Instance.
+     * @param realm Realm.
+     * @return Mapper.
+     */
+    private KerberosUserNameMapper create(@Nullable String instance, @Nullable 
String realm) {
+        KerberosUserNameMapper mapper = new KerberosUserNameMapper();
+
+        mapper.setInstance(instance);
+        mapper.setRealm(realm);
+
+        mapper.start();
+
+        return mapper;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java
new file mode 100644
index 0000000..2c25a06
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1DualAbstractTest.java
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.igfs;
+
+import java.io.IOException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory;
+import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem;
+import org.apache.ignite.hadoop.util.ChainedUserNameMapper;
+import org.apache.ignite.hadoop.util.KerberosUserNameMapper;
+import org.apache.ignite.hadoop.util.UserNameMapper;
+import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem;
+import org.apache.ignite.internal.processors.igfs.IgfsDualAbstractSelfTest;
+import org.apache.ignite.lifecycle.LifecycleAware;
+import org.jetbrains.annotations.Nullable;
+
+import static org.apache.ignite.IgniteFileSystem.IGFS_SCHEME;
+import static 
org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.SECONDARY_CFG_PATH;
+import static 
org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.configuration;
+import static 
org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.mkUri;
+import static 
org.apache.ignite.igfs.HadoopSecondaryFileSystemConfigurationTest.writeConfiguration;
+import static org.apache.ignite.igfs.IgfsMode.PRIMARY;
+
+/**
+ * Abstract test for Hadoop 1.0 file system stack.
+ */
+public abstract class Hadoop1DualAbstractTest extends IgfsDualAbstractSelfTest 
{
+    /** Secondary grid name */
+    private static final String GRID_NAME = "grid_secondary";
+
+    /** Secondary file system name */
+    private static final String IGFS_NAME = "igfs_secondary";
+
+    /** Secondary file system REST endpoint port */
+    private static final int PORT = 11500;
+
+    /** Secondary file system REST endpoint configuration map. */
+    private static final IgfsIpcEndpointConfiguration SECONDARY_REST_CFG = new 
IgfsIpcEndpointConfiguration() {{
+        setType(IgfsIpcEndpointType.TCP);
+        setPort(PORT);
+    }};
+
+    /** Secondary file system authority. */
+    private static final String SECONDARY_AUTHORITY = IGFS_NAME + ":" + 
GRID_NAME + "@127.0.0.1:" + PORT;
+
+    /** Secondary Fs configuration full path. */
+    protected String secondaryConfFullPath;
+
+    /** Secondary Fs URI. */
+    protected String secondaryUri;
+
+    /** Constructor. */
+    public Hadoop1DualAbstractTest(IgfsMode mode) {
+        super(mode);
+    }
+
+    /**
+     * Creates secondary filesystems.
+     * @return IgfsSecondaryFileSystem
+     * @throws Exception On failure.
+     */
+    @Override protected IgfsSecondaryFileSystem 
createSecondaryFileSystemStack() throws Exception {
+        startUnderlying();
+
+        prepareConfiguration();
+
+        KerberosUserNameMapper mapper1 = new KerberosUserNameMapper();
+
+        mapper1.setRealm("TEST.COM");
+
+        TestUserNameMapper mapper2 = new TestUserNameMapper();
+
+        ChainedUserNameMapper mapper = new ChainedUserNameMapper();
+
+        mapper.setMappers(mapper1, mapper2);
+
+        CachingHadoopFileSystemFactory factory = new 
CachingHadoopFileSystemFactory();
+
+        factory.setUri(secondaryUri);
+        factory.setConfigPaths(secondaryConfFullPath);
+        factory.setUserNameMapper(mapper);
+
+        IgniteHadoopIgfsSecondaryFileSystem second = new 
IgniteHadoopIgfsSecondaryFileSystem();
+
+        second.setFileSystemFactory(factory);
+
+        igfsSecondary = new HadoopIgfsSecondaryFileSystemTestAdapter(factory);
+
+        return second;
+    }
+
+    /**
+     * Starts underlying Ignite process.
+     * @throws IOException On failure.
+     */
+    protected void startUnderlying() throws Exception {
+        startGridWithIgfs(GRID_NAME, IGFS_NAME, PRIMARY, null, 
SECONDARY_REST_CFG, secondaryIpFinder);
+    }
+
+    /**
+     * Prepares Fs configuration.
+     * @throws IOException On failure.
+     */
+    protected void prepareConfiguration() throws IOException {
+        Configuration secondaryConf = configuration(IGFS_SCHEME, 
SECONDARY_AUTHORITY, true, true);
+
+        secondaryConf.setInt("fs.igfs.block.size", 1024);
+
+        secondaryConfFullPath = writeConfiguration(secondaryConf, 
SECONDARY_CFG_PATH);
+
+        secondaryUri = mkUri(IGFS_SCHEME, SECONDARY_AUTHORITY);
+    }
+
+    /**
+     * Test user name mapper.
+     */
+    private static class TestUserNameMapper implements UserNameMapper, 
LifecycleAware {
+        /** */
+        private static final long serialVersionUID = 0L;
+
+        /** Started flag. */
+        private boolean started;
+
+        /** {@inheritDoc} */
+        @Nullable @Override public String map(String name) {
+            assert started;
+            assert name != null && name.contains("@");
+
+            return name.substring(0, name.indexOf("@"));
+        }
+
+        /** {@inheritDoc} */
+        @Override public void start() throws IgniteException {
+            started = true;
+        }
+
+        /** {@inheritDoc} */
+        @Override public void stop() throws IgniteException {
+            // No-op.
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java
new file mode 100644
index 0000000..bbf1223
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualAsyncTest.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.igfs;
+
+/**
+ * DUAL_ASYNC mode test.
+ */
+public class Hadoop1OverIgfsDualAsyncTest extends Hadoop1DualAbstractTest {
+    /**
+     * Constructor.
+     */
+    public Hadoop1OverIgfsDualAsyncTest() {
+        super(IgfsMode.DUAL_ASYNC);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java
new file mode 100644
index 0000000..c57415c
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/Hadoop1OverIgfsDualSyncTest.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.igfs;
+
+/**
+ * DUAL_SYNC mode.
+ */
+public class Hadoop1OverIgfsDualSyncTest extends Hadoop1DualAbstractTest {
+    /**
+     * Constructor.
+     */
+    public Hadoop1OverIgfsDualSyncTest() {
+        super(IgfsMode.DUAL_SYNC);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/b7489457/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java
new file mode 100644
index 0000000..5be3a64
--- /dev/null
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/igfs/HadoopFIleSystemFactorySelfTest.java
@@ -0,0 +1,317 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.igfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.ignite.IgniteException;
+import org.apache.ignite.cache.CacheWriteSynchronizationMode;
+import org.apache.ignite.configuration.CacheConfiguration;
+import org.apache.ignite.configuration.FileSystemConfiguration;
+import org.apache.ignite.configuration.IgniteConfiguration;
+import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory;
+import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem;
+import org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem;
+import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem;
+import org.apache.ignite.internal.processors.igfs.IgfsCommonAbstractTest;
+import org.apache.ignite.internal.processors.igfs.IgfsEx;
+import org.apache.ignite.internal.util.typedef.G;
+import org.apache.ignite.internal.util.typedef.internal.U;
+import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
+import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
+import org.jetbrains.annotations.Nullable;
+import java.io.Externalizable;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.net.URI;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
+import static org.apache.ignite.cache.CacheMode.PARTITIONED;
+import static org.apache.ignite.cache.CacheMode.REPLICATED;
+
+/**
+ * Tests for Hadoop file system factory.
+ */
+public class HadoopFIleSystemFactorySelfTest extends IgfsCommonAbstractTest {
+    /** Amount of "start" invocations */
+    private static final AtomicInteger START_CNT = new AtomicInteger();
+
+    /** Amount of "stop" invocations */
+    private static final AtomicInteger STOP_CNT = new AtomicInteger();
+
+    /** Path to secondary file system configuration. */
+    private static final String SECONDARY_CFG_PATH = 
"/work/core-site-HadoopFIleSystemFactorySelfTest.xml";
+
+    /** IGFS path for DUAL mode. */
+    private static final Path PATH_DUAL = new Path("/ignite/sync/test_dir");
+
+    /** IGFS path for PROXY mode. */
+    private static final Path PATH_PROXY = new Path("/ignite/proxy/test_dir");
+
+    /** IGFS path for DUAL mode. */
+    private static final IgfsPath IGFS_PATH_DUAL = new 
IgfsPath("/ignite/sync/test_dir");
+
+    /** IGFS path for PROXY mode. */
+    private static final IgfsPath IGFS_PATH_PROXY = new 
IgfsPath("/ignite/proxy/test_dir");
+
+    /** Secondary IGFS. */
+    private IgfsEx secondary;
+
+    /** Primary IGFS. */
+    private IgfsEx primary;
+
+    /** {@inheritDoc} */
+    @Override protected void beforeTest() throws Exception {
+        super.beforeTest();
+
+        START_CNT.set(0);
+        STOP_CNT.set(0);
+
+        secondary = startSecondary();
+        primary = startPrimary();
+    }
+
+    /** {@inheritDoc} */
+    @Override protected void afterTest() throws Exception {
+        super.afterTest();
+
+        secondary = null;
+        primary = null;
+
+        stopAllGrids();
+    }
+
+    /**
+     * Test custom factory.
+     *
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("ThrowableResultOfMethodCallIgnored")
+    public void testCustomFactory() throws Exception {
+        assert START_CNT.get() == 1;
+        assert STOP_CNT.get() == 0;
+
+        // Use IGFS directly.
+        primary.mkdirs(IGFS_PATH_DUAL);
+
+        assert primary.exists(IGFS_PATH_DUAL);
+        assert secondary.exists(IGFS_PATH_DUAL);
+
+        // Create remote instance.
+        FileSystem fs = 
FileSystem.get(URI.create("igfs://primary:primary@127.0.0.1:10500/"), 
baseConfiguration());
+
+        // Ensure lifecycle callback was invoked.
+        assert START_CNT.get() == 2;
+        assert STOP_CNT.get() == 0;
+
+        // Check file system operations.
+        assert fs.exists(PATH_DUAL);
+
+        assert fs.delete(PATH_DUAL, true);
+        assert !primary.exists(IGFS_PATH_DUAL);
+        assert !secondary.exists(IGFS_PATH_DUAL);
+        assert !fs.exists(PATH_DUAL);
+
+        assert fs.mkdirs(PATH_DUAL);
+        assert primary.exists(IGFS_PATH_DUAL);
+        assert secondary.exists(IGFS_PATH_DUAL);
+        assert fs.exists(PATH_DUAL);
+
+        assert fs.mkdirs(PATH_PROXY);
+        assert secondary.exists(IGFS_PATH_PROXY);
+        assert fs.exists(PATH_PROXY);
+
+        // Close file system and ensure that associated factory was notified.
+        fs.close();
+
+        assert START_CNT.get() == 2;
+        assert STOP_CNT.get() == 1;
+
+        // Stop primary node and ensure that base factory was notified.
+        G.stop(primary.context().kernalContext().grid().name(), true);
+
+        assert START_CNT.get() == 2;
+        assert STOP_CNT.get() == 2;
+    }
+
+    /**
+     * Start secondary IGFS.
+     *
+     * @return IGFS.
+     * @throws Exception If failed.
+     */
+    private static IgfsEx startSecondary() throws Exception {
+        return start("secondary", 11500, IgfsMode.PRIMARY, null);
+    }
+
+    /**
+     * Start primary IGFS.
+     *
+     * @return IGFS.
+     * @throws Exception If failed.
+     */
+    private static IgfsEx startPrimary() throws Exception {
+        // Prepare configuration.
+        Configuration conf = baseConfiguration();
+
+        conf.set("fs.defaultFS", 
"igfs://secondary:secondary@127.0.0.1:11500/");
+
+        writeConfigurationToFile(conf);
+
+        // Configure factory.
+        TestFactory factory = new TestFactory();
+
+        factory.setUri("igfs://secondary:secondary@127.0.0.1:11500/");
+        factory.setConfigPaths(SECONDARY_CFG_PATH);
+
+        // Configure file system.
+        IgniteHadoopIgfsSecondaryFileSystem fs = new 
IgniteHadoopIgfsSecondaryFileSystem();
+
+        fs.setFileSystemFactory(factory);
+
+        // Start.
+        return start("primary", 10500, IgfsMode.DUAL_ASYNC, fs);
+    }
+
+    /**
+     * Start Ignite node with IGFS instance.
+     *
+     * @param name Node and IGFS name.
+     * @param endpointPort Endpoint port.
+     * @param dfltMode Default path mode.
+     * @param secondaryFs Secondary file system.
+     * @return Igfs instance.
+     */
+    private static IgfsEx start(String name, int endpointPort, IgfsMode 
dfltMode,
+        @Nullable IgfsSecondaryFileSystem secondaryFs) {
+        IgfsIpcEndpointConfiguration endpointCfg = new 
IgfsIpcEndpointConfiguration();
+
+        endpointCfg.setType(IgfsIpcEndpointType.TCP);
+        endpointCfg.setHost("127.0.0.1");
+        endpointCfg.setPort(endpointPort);
+
+        FileSystemConfiguration igfsCfg = new FileSystemConfiguration();
+
+        igfsCfg.setDataCacheName("dataCache");
+        igfsCfg.setMetaCacheName("metaCache");
+        igfsCfg.setName(name);
+        igfsCfg.setDefaultMode(dfltMode);
+        igfsCfg.setIpcEndpointConfiguration(endpointCfg);
+        igfsCfg.setSecondaryFileSystem(secondaryFs);
+        igfsCfg.setInitializeDefaultPathModes(true);
+
+        CacheConfiguration dataCacheCfg = defaultCacheConfiguration();
+
+        dataCacheCfg.setName("dataCache");
+        dataCacheCfg.setCacheMode(PARTITIONED);
+        
dataCacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
+        dataCacheCfg.setAffinityMapper(new IgfsGroupDataBlocksKeyMapper(2));
+        dataCacheCfg.setBackups(0);
+        dataCacheCfg.setAtomicityMode(TRANSACTIONAL);
+        dataCacheCfg.setOffHeapMaxMemory(0);
+
+        CacheConfiguration metaCacheCfg = defaultCacheConfiguration();
+
+        metaCacheCfg.setName("metaCache");
+        metaCacheCfg.setCacheMode(REPLICATED);
+        
metaCacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
+        metaCacheCfg.setAtomicityMode(TRANSACTIONAL);
+
+        IgniteConfiguration cfg = new IgniteConfiguration();
+
+        cfg.setGridName(name);
+
+        TcpDiscoverySpi discoSpi = new TcpDiscoverySpi();
+
+        discoSpi.setIpFinder(new TcpDiscoveryVmIpFinder(true));
+
+        cfg.setDiscoverySpi(discoSpi);
+        cfg.setCacheConfiguration(dataCacheCfg, metaCacheCfg);
+        cfg.setFileSystemConfiguration(igfsCfg);
+
+        cfg.setLocalHost("127.0.0.1");
+        cfg.setConnectorConfiguration(null);
+
+        return (IgfsEx)G.start(cfg).fileSystem(name);
+    }
+
+    /**
+     * Create base FileSystem configuration.
+     *
+     * @return Configuration.
+     */
+    private static Configuration baseConfiguration() {
+        Configuration conf = new Configuration();
+
+        conf.set("fs.igfs.impl", IgniteHadoopFileSystem.class.getName());
+
+        return conf;
+    }
+
+    /**
+     * Write configuration to file.
+     *
+     * @param conf Configuration.
+     * @throws Exception If failed.
+     */
+    @SuppressWarnings("ResultOfMethodCallIgnored")
+    private static void writeConfigurationToFile(Configuration conf) throws 
Exception {
+        final String path = U.getIgniteHome() + SECONDARY_CFG_PATH;
+
+        File file = new File(path);
+
+        file.delete();
+
+        assertFalse(file.exists());
+
+        try (FileOutputStream fos = new FileOutputStream(file)) {
+            conf.writeXml(fos);
+        }
+
+        assertTrue(file.exists());
+    }
+
+    /**
+     * Test factory.
+     */
+    private static class TestFactory extends CachingHadoopFileSystemFactory {
+        /**
+         * {@link Externalizable} support.
+         */
+        public TestFactory() {
+            // No-op.
+        }
+
+        /** {@inheritDoc} */
+        @Override public void start() throws IgniteException {
+            START_CNT.incrementAndGet();
+
+            super.start();
+        }
+
+        /** {@inheritDoc} */
+        @Override public void stop() throws IgniteException {
+            STOP_CNT.incrementAndGet();
+
+            super.stop();
+        }
+    }
+}

Reply via email to