http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java new file mode 100644 index 0000000..a0f2ca1 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java @@ -0,0 +1,353 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.registry.client.api.RegistryConstants; +import org.apache.hadoop.registry.client.binding.RegistryUtils; +import org.apache.hadoop.registry.client.binding.RegistryTypeUtils; +import org.apache.hadoop.registry.client.types.AddressTypes; +import org.apache.hadoop.registry.client.types.Endpoint; +import org.apache.hadoop.registry.client.types.ProtocolTypes; +import org.apache.hadoop.registry.client.types.ServiceRecord; +import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes; +import org.apache.hadoop.registry.secure.AbstractSecureRegistryTest; +import org.apache.zookeeper.common.PathUtils; +import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.security.auth.Subject; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; +import java.io.File; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; + +import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.*; + +/** + * This is a set of static methods to aid testing the registry operations. + * The methods can be imported statically âor the class used as a base + * class for tests. + */ +public class RegistryTestHelper extends Assert { + public static final String SC_HADOOP = "org-apache-hadoop"; + public static final String USER = "devteam/"; + public static final String NAME = "hdfs"; + public static final String API_WEBHDFS = "classpath:org.apache.hadoop.namenode.webhdfs"; + public static final String API_HDFS = "classpath:org.apache.hadoop.namenode.dfs"; + public static final String USERPATH = RegistryConstants.PATH_USERS + USER; + public static final String PARENT_PATH = USERPATH + SC_HADOOP + "/"; + public static final String ENTRY_PATH = PARENT_PATH + NAME; + public static final String NNIPC = "uuid:423C2B93-C927-4050-AEC6-6540E6646437"; + public static final String IPC2 = "uuid:0663501D-5AD3-4F7E-9419-52F5D6636FCF"; + private static final Logger LOG = + LoggerFactory.getLogger(RegistryTestHelper.class); + private static final RegistryUtils.ServiceRecordMarshal recordMarshal = + new RegistryUtils.ServiceRecordMarshal(); + public static final String HTTP_API = "http://"; + + /** + * Assert the path is valid by ZK rules + * @param path path to check + */ + public static void assertValidZKPath(String path) { + try { + PathUtils.validatePath(path); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Invalid Path " + path + ": " + e, e); + } + } + + /** + * Assert that a string is not empty (null or "") + * @param message message to raise if the string is empty + * @param check string to check + */ + public static void assertNotEmpty(String message, String check) { + if (StringUtils.isEmpty(check)) { + fail(message); + } + } + + /** + * Assert that a string is empty (null or "") + * @param check string to check + */ + public static void assertNotEmpty(String check) { + if (StringUtils.isEmpty(check)) { + fail("Empty string"); + } + } + + /** + * Log the details of a login context + * @param name name to assert that the user is logged in as + * @param loginContext the login context + */ + public static void logLoginDetails(String name, + LoginContext loginContext) { + assertNotNull("Null login context", loginContext); + Subject subject = loginContext.getSubject(); + LOG.info("Logged in as {}:\n {}", name, subject); + } + + /** + * Set the JVM property to enable Kerberos debugging + */ + public static void enableKerberosDebugging() { + System.setProperty(AbstractSecureRegistryTest.SUN_SECURITY_KRB5_DEBUG, + "true"); + } + /** + * Set the JVM property to enable Kerberos debugging + */ + public static void disableKerberosDebugging() { + System.setProperty(AbstractSecureRegistryTest.SUN_SECURITY_KRB5_DEBUG, + "false"); + } + + /** + * General code to validate bits of a component/service entry built iwth + * {@link #addSampleEndpoints(ServiceRecord, String)} + * @param record instance to check + */ + public static void validateEntry(ServiceRecord record) { + assertNotNull("null service record", record); + List<Endpoint> endpoints = record.external; + assertEquals(2, endpoints.size()); + + Endpoint webhdfs = findEndpoint(record, API_WEBHDFS, true, 1, 1); + assertEquals(API_WEBHDFS, webhdfs.api); + assertEquals(AddressTypes.ADDRESS_URI, webhdfs.addressType); + assertEquals(ProtocolTypes.PROTOCOL_REST, webhdfs.protocolType); + List<Map<String, String>> addressList = webhdfs.addresses; + Map<String, String> url = addressList.get(0); + String addr = url.get("uri"); + assertTrue(addr.contains("http")); + assertTrue(addr.contains(":8020")); + + Endpoint nnipc = findEndpoint(record, NNIPC, false, 1,2); + assertEquals("wrong protocol in " + nnipc, ProtocolTypes.PROTOCOL_THRIFT, + nnipc.protocolType); + + Endpoint ipc2 = findEndpoint(record, IPC2, false, 1,2); + assertNotNull(ipc2); + + Endpoint web = findEndpoint(record, HTTP_API, true, 1, 1); + assertEquals(1, web.addresses.size()); + assertEquals(1, web.addresses.get(0).size()); + } + + /** + * Assert that an endpoint matches the criteria + * @param endpoint endpoint to examine + * @param addressType expected address type + * @param protocolType expected protocol type + * @param api API + */ + public static void assertMatches(Endpoint endpoint, + String addressType, + String protocolType, + String api) { + assertNotNull(endpoint); + assertEquals(addressType, endpoint.addressType); + assertEquals(protocolType, endpoint.protocolType); + assertEquals(api, endpoint.api); + } + + /** + * Assert the records match. + * @param source record that was written + * @param resolved the one that resolved. + */ + public static void assertMatches(ServiceRecord source, ServiceRecord resolved) { + assertNotNull("Null source record ", source); + assertNotNull("Null resolved record ", resolved); + assertEquals(source.description, resolved.description); + + Map<String, String> srcAttrs = source.attributes(); + Map<String, String> resolvedAttrs = resolved.attributes(); + String sourceAsString = source.toString(); + String resolvedAsString = resolved.toString(); + assertEquals("Wrong count of attrs in \n" + sourceAsString + + "\nfrom\n" + resolvedAsString, + srcAttrs.size(), + resolvedAttrs.size()); + for (Map.Entry<String, String> entry : srcAttrs.entrySet()) { + String attr = entry.getKey(); + assertEquals("attribute "+ attr, entry.getValue(), resolved.get(attr)); + } + assertEquals("wrong external endpoint count", + source.external.size(), resolved.external.size()); + assertEquals("wrong external endpoint count", + source.internal.size(), resolved.internal.size()); + } + + /** + * Find an endpoint in a record or fail, + * @param record record + * @param api API + * @param external external? + * @param addressElements expected # of address elements? + * @param addressTupleSize expected size of a type + * @return the endpoint. + */ + public static Endpoint findEndpoint(ServiceRecord record, + String api, boolean external, int addressElements, int addressTupleSize) { + Endpoint epr = external ? record.getExternalEndpoint(api) + : record.getInternalEndpoint(api); + if (epr != null) { + assertEquals("wrong # of addresses", + addressElements, epr.addresses.size()); + assertEquals("wrong # of elements in an address tuple", + addressTupleSize, epr.addresses.get(0).size()); + return epr; + } + List<Endpoint> endpoints = external ? record.external : record.internal; + StringBuilder builder = new StringBuilder(); + for (Endpoint endpoint : endpoints) { + builder.append("\"").append(endpoint).append("\" "); + } + fail("Did not find " + api + " in endpoints " + builder); + // never reached; here to keep the compiler happy + return null; + } + + /** + * Log a record + * @param name record name + * @param record details + * @throws IOException only if something bizarre goes wrong marshalling + * a record. + */ + public static void logRecord(String name, ServiceRecord record) throws + IOException { + LOG.info(" {} = \n{}\n", name, recordMarshal.toJson(record)); + } + + /** + * Create a service entry with the sample endpoints + * @param persistence persistence policy + * @return the record + * @throws IOException on a failure + */ + public static ServiceRecord buildExampleServiceEntry(String persistence) throws + IOException, + URISyntaxException { + ServiceRecord record = new ServiceRecord(); + record.set(YarnRegistryAttributes.YARN_ID, "example-0001"); + record.set(YarnRegistryAttributes.YARN_PERSISTENCE, persistence); + addSampleEndpoints(record, "namenode"); + return record; + } + + /** + * Add some endpoints + * @param entry entry + */ + public static void addSampleEndpoints(ServiceRecord entry, String hostname) + throws URISyntaxException { + assertNotNull(hostname); + entry.addExternalEndpoint(webEndpoint(HTTP_API, + new URI("http", hostname + ":80", "/"))); + entry.addExternalEndpoint( + restEndpoint(API_WEBHDFS, + new URI("http", hostname + ":8020", "/"))); + + Endpoint endpoint = ipcEndpoint(API_HDFS, null); + endpoint.addresses.add(RegistryTypeUtils.hostnamePortPair(hostname, 8030)); + entry.addInternalEndpoint(endpoint); + InetSocketAddress localhost = new InetSocketAddress("localhost", 8050); + entry.addInternalEndpoint( + inetAddrEndpoint(NNIPC, ProtocolTypes.PROTOCOL_THRIFT, "localhost", + 8050)); + entry.addInternalEndpoint( + RegistryTypeUtils.ipcEndpoint( + IPC2, localhost)); + } + + /** + * Describe the stage in the process with a box around it -so as + * to highlight it in test logs + * @param log log to use + * @param text text + * @param args logger args + */ + public static void describe(Logger log, String text, Object...args) { + log.info("\n======================================="); + log.info(text, args); + log.info("=======================================\n"); + } + + /** + * log out from a context if non-null ... exceptions are caught and logged + * @param login login context + * @return null, always + */ + public static LoginContext logout(LoginContext login) { + try { + if (login != null) { + LOG.debug("Logging out login context {}", login.toString()); + login.logout(); + } + } catch (LoginException e) { + LOG.warn("Exception logging out: {}", e, e); + } + return null; + } + + /** + * Login via a UGI. Requres UGI to have been set up + * @param user username + * @param keytab keytab to list + * @return the UGI + * @throws IOException + */ + public static UserGroupInformation loginUGI(String user, File keytab) throws + IOException { + LOG.info("Logging in as {} from {}", user, keytab); + return UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, + keytab.getAbsolutePath()); + } + + public static ServiceRecord createRecord(String persistence) { + return createRecord("01", persistence, "description"); + } + + public static ServiceRecord createRecord(String id, String persistence, + String description) { + ServiceRecord serviceRecord = new ServiceRecord(); + serviceRecord.set(YarnRegistryAttributes.YARN_ID, id); + serviceRecord.description = description; + serviceRecord.set(YarnRegistryAttributes.YARN_PERSISTENCE, persistence); + return serviceRecord; + } + + public static ServiceRecord createRecord(String id, String persistence, + String description, String data) { + return createRecord(id, persistence, description); + } +}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/cli/TestRegistryCli.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/cli/TestRegistryCli.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/cli/TestRegistryCli.java new file mode 100644 index 0000000..bd8a38d --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/cli/TestRegistryCli.java @@ -0,0 +1,197 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.registry.cli; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; + +import org.apache.hadoop.registry.AbstractRegistryTest; +import org.apache.hadoop.registry.operations.TestRegistryOperations; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestRegistryCli extends AbstractRegistryTest { + protected static final Logger LOG = + LoggerFactory.getLogger(TestRegistryOperations.class); + + private ByteArrayOutputStream sysOutStream; + private PrintStream sysOut; + private ByteArrayOutputStream sysErrStream; + private PrintStream sysErr; + private RegistryCli cli; + + @Before + public void setUp() throws Exception { + sysOutStream = new ByteArrayOutputStream(); + sysOut = new PrintStream(sysOutStream); + sysErrStream = new ByteArrayOutputStream(); + sysErr = new PrintStream(sysErrStream); + System.setOut(sysOut); + cli = new RegistryCli(operations, createRegistryConfiguration(), sysOut, sysErr); + } + + @After + public void tearDown() throws Exception { + cli.close(); + } + + private void assertResult(RegistryCli cli, int code, String...args) throws Exception { + int result = cli.run(args); + assertEquals(code, result); + } + + @Test + public void testBadCommands() throws Exception { + assertResult(cli, -1, new String[] { }); + assertResult(cli, -1, "foo"); + } + + @Test + public void testInvalidNumArgs() throws Exception { + assertResult(cli, -1, "ls"); + assertResult(cli, -1, "ls", "/path", "/extraPath"); + assertResult(cli, -1, "resolve"); + assertResult(cli, -1, "resolve", "/path", "/extraPath"); + assertResult(cli, -1, "mknode"); + assertResult(cli, -1, "mknode", "/path", "/extraPath"); + assertResult(cli, -1, "rm"); + assertResult(cli, -1, "rm", "/path", "/extraPath"); + assertResult(cli, -1, "bind"); + assertResult(cli, -1, "bind", "foo"); + assertResult(cli, -1, "bind", "-inet", "foo"); + assertResult(cli, -1, "bind", "-inet", "-api", "-p", "378", "-h", "host", "/foo"); + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "-h", "host", "/foo"); + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "/foo"); + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host"); + assertResult(cli, -1, "bind", "-api", "Api", "-p", "378", "-h", "host", "/foo"); + assertResult(cli, -1, "bind", "-webui", "foo"); + assertResult(cli, -1, "bind", "-webui", "-api", "Api", "/foo"); + assertResult(cli, -1, "bind", "-webui", "uriString", "-api", "/foo"); + assertResult(cli, -1, "bind", "-webui", "uriString", "-api", "Api"); + assertResult(cli, -1, "bind", "-rest", "foo"); + assertResult(cli, -1, "bind", "-rest", "uriString", "-api", "Api"); + assertResult(cli, -1, "bind", "-rest", "-api", "Api", "/foo"); + assertResult(cli, -1, "bind", "-rest", "uriString", "-api", "/foo"); + assertResult(cli, -1, "bind", "uriString", "-api", "Api", "/foo"); + } + + @Test + public void testBadArgType() throws Exception { + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "fooPort", "-h", + "host", "/dir"); + } + + @Test + public void testBadPath() throws Exception { + assertResult(cli, -1, "ls", "NonSlashPath"); + assertResult(cli, -1, "ls", "//"); + assertResult(cli, -1, "resolve", "NonSlashPath"); + assertResult(cli, -1, "resolve", "//"); + assertResult(cli, -1, "mknode", "NonSlashPath"); + assertResult(cli, -1, "mknode", "//"); + assertResult(cli, -1, "rm", "NonSlashPath"); + assertResult(cli, -1, "rm", "//"); + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host", "NonSlashPath"); + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host", "//"); + assertResult(cli, -1, "bind", "-webui", "uriString", "-api", "Api", "NonSlashPath"); + assertResult(cli, -1, "bind", "-webui", "uriString", "-api", "Api", "//"); + assertResult(cli, -1, "bind", "-rest", "uriString", "-api", "Api", "NonSlashPath"); + assertResult(cli, -1, "bind", "-rest", "uriString", "-api", "Api", "//"); + } + + @Test + public void testNotExistingPaths() throws Exception { + assertResult(cli, -1, "ls", "/nonexisting_path"); + assertResult(cli, -1, "ls", "/NonExistingDir/nonexisting_path"); + assertResult(cli, -1, "resolve", "/nonexisting_path"); + assertResult(cli, -1, "resolve", "/NonExistingDir/nonexisting_path"); + assertResult(cli, -1, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host", "/NonExistingDir/nonexisting_path"); + assertResult(cli, -1, "bind", "-webui", "uriString", "-api", "Api", "/NonExistingDir/nonexisting_path"); + assertResult(cli, -1, "bind", "-rest", "uriString", "-api", "Api", "/NonExistingDir/nonexisting_path"); + } + + @Test + public void testValidCommands() throws Exception { + assertResult(cli, 0, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host", "/foo"); + assertResult(cli, 0, "resolve", "/foo"); + assertResult(cli, 0, "rm", "/foo"); + assertResult(cli, -1, "resolve", "/foo"); + + assertResult(cli, 0, "bind", "-webui", "uriString", "-api", "Api", "/foo"); + assertResult(cli, 0, "resolve", "/foo"); + assertResult(cli, 0, "rm", "/foo"); + assertResult(cli, -1, "resolve", "/foo"); + + assertResult(cli, 0, "bind", "-rest", "uriString", "-api", "Api", "/foo"); + assertResult(cli, 0, "resolve", "/foo"); + assertResult(cli, 0, "rm", "/foo"); + assertResult(cli, -1, "resolve", "/foo"); + + //Test Sub Directories Binds + assertResult(cli, 0, "mknode", "/subdir"); + assertResult(cli, -1, "resolve", "/subdir"); + + assertResult(cli, 0, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host", "/subdir/foo"); + assertResult(cli, 0, "resolve", "/subdir/foo"); + assertResult(cli, 0, "rm", "/subdir/foo"); + assertResult(cli, -1, "resolve", "/subdir/foo"); + + assertResult(cli, 0, "bind", "-webui", "uriString", "-api", "Api", "/subdir/foo"); + assertResult(cli, 0, "resolve", "/subdir/foo"); + assertResult(cli, 0, "rm", "/subdir/foo"); + assertResult(cli, -1, "resolve", "/subdir/foo"); + + assertResult(cli, 0, "bind", "-rest", "uriString", "-api", "Api", "/subdir/foo"); + assertResult(cli, 0, "resolve", "/subdir/foo"); + assertResult(cli, 0, "rm", "/subdir/foo"); + assertResult(cli, -1, "resolve", "/subdir/foo"); + + assertResult(cli, 0, "rm", "/subdir"); + assertResult(cli, -1, "resolve", "/subdir"); + + //Test Bind that the dir itself + assertResult(cli, 0, "mknode", "/dir"); + assertResult(cli, -1, "resolve", "/dir"); + + assertResult(cli, 0, "bind", "-inet", "-api", "Api", "-p", "378", "-h", "host", "/dir"); + assertResult(cli, 0, "resolve", "/dir"); + assertResult(cli, 0, "rm", "/dir"); + assertResult(cli, -1, "resolve", "/dir"); + + assertResult(cli, 0, "mknode", "/dir"); + assertResult(cli, -1, "resolve", "/dir"); + + assertResult(cli, 0, "bind", "-webui", "uriString", "-api", "Api", "/dir"); + assertResult(cli, 0, "resolve", "/dir"); + assertResult(cli, 0, "rm", "/dir"); + assertResult(cli, -1, "resolve", "/dir"); + + assertResult(cli, 0, "mknode", "/dir"); + assertResult(cli, -1, "resolve", "/dir"); + + assertResult(cli, 0, "bind", "-rest", "uriString", "-api", "Api", "/dir"); + assertResult(cli, 0, "resolve", "/dir"); + assertResult(cli, 0, "rm", "/dir"); + assertResult(cli, -1, "resolve", "/dir"); + + assertResult(cli, 0, "rm", "/Nonexitent"); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java new file mode 100644 index 0000000..f1814d3 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.binding; + +import org.apache.hadoop.registry.RegistryTestHelper; +import org.apache.hadoop.registry.client.exceptions.InvalidRecordException; +import org.apache.hadoop.registry.client.exceptions.NoRecordException; +import org.apache.hadoop.registry.client.types.ServiceRecord; +import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; +import org.junit.rules.Timeout; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Test record marshalling + */ +public class TestMarshalling extends RegistryTestHelper { + private static final Logger + LOG = LoggerFactory.getLogger(TestMarshalling.class); + + @Rule + public final Timeout testTimeout = new Timeout(10000); + @Rule + public TestName methodName = new TestName(); + + private static RegistryUtils.ServiceRecordMarshal marshal; + + @BeforeClass + public static void setupClass() { + marshal = new RegistryUtils.ServiceRecordMarshal(); + } + + @Test + public void testRoundTrip() throws Throwable { + String persistence = PersistencePolicies.PERMANENT; + ServiceRecord record = createRecord(persistence); + record.set("customkey", "customvalue"); + record.set("customkey2", "customvalue2"); + RegistryTypeUtils.validateServiceRecord("", record); + LOG.info(marshal.toJson(record)); + byte[] bytes = marshal.toBytes(record); + ServiceRecord r2 = marshal.fromBytes("", bytes); + assertMatches(record, r2); + RegistryTypeUtils.validateServiceRecord("", r2); + } + + + @Test(expected = NoRecordException.class) + public void testUnmarshallNoData() throws Throwable { + marshal.fromBytes("src", new byte[]{}); + } + + @Test(expected = NoRecordException.class) + public void testUnmarshallNotEnoughData() throws Throwable { + // this is nominally JSON -but without the service record header + marshal.fromBytes("src", new byte[]{'{','}'}, ServiceRecord.RECORD_TYPE); + } + + @Test(expected = InvalidRecordException.class) + public void testUnmarshallNoBody() throws Throwable { + byte[] bytes = "this is not valid JSON at all and should fail".getBytes(); + marshal.fromBytes("src", bytes); + } + + @Test(expected = InvalidRecordException.class) + public void testUnmarshallWrongType() throws Throwable { + byte[] bytes = "{'type':''}".getBytes(); + ServiceRecord serviceRecord = marshal.fromBytes("marshalling", bytes); + RegistryTypeUtils.validateServiceRecord("validating", serviceRecord); + } + + @Test(expected = NoRecordException.class) + public void testUnmarshallWrongLongType() throws Throwable { + ServiceRecord record = new ServiceRecord(); + record.type = "ThisRecordHasALongButNonMatchingType"; + byte[] bytes = marshal.toBytes(record); + ServiceRecord serviceRecord = marshal.fromBytes("marshalling", + bytes, ServiceRecord.RECORD_TYPE); + } + + @Test(expected = NoRecordException.class) + public void testUnmarshallNoType() throws Throwable { + ServiceRecord record = new ServiceRecord(); + record.type = "NoRecord"; + byte[] bytes = marshal.toBytes(record); + ServiceRecord serviceRecord = marshal.fromBytes("marshalling", + bytes, ServiceRecord.RECORD_TYPE); + } + + @Test(expected = InvalidRecordException.class) + public void testRecordValidationWrongType() throws Throwable { + ServiceRecord record = new ServiceRecord(); + record.type = "NotAServiceRecordType"; + RegistryTypeUtils.validateServiceRecord("validating", record); + } + + @Test + public void testUnknownFieldsRoundTrip() throws Throwable { + ServiceRecord record = + createRecord(PersistencePolicies.APPLICATION_ATTEMPT); + record.set("key", "value"); + record.set("intval", "2"); + assertEquals("value", record.get("key")); + assertEquals("2", record.get("intval")); + assertNull(record.get("null")); + assertEquals("defval", record.get("null", "defval")); + byte[] bytes = marshal.toBytes(record); + ServiceRecord r2 = marshal.fromBytes("", bytes); + assertEquals("value", r2.get("key")); + assertEquals("2", r2.get("intval")); + } + + @Test + public void testFieldPropagationInCopy() throws Throwable { + ServiceRecord record = + createRecord(PersistencePolicies.APPLICATION_ATTEMPT); + record.set("key", "value"); + record.set("intval", "2"); + ServiceRecord that = new ServiceRecord(record); + assertMatches(record, that); + } + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryOperationUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryOperationUtils.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryOperationUtils.java new file mode 100644 index 0000000..b07d2ce --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryOperationUtils.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.binding; + +import org.apache.hadoop.security.UserGroupInformation; +import org.junit.Assert; +import org.junit.Test; + +/** + * Tests for the {@link RegistryUtils} class + */ +public class TestRegistryOperationUtils extends Assert { + + @Test + public void testUsernameExtractionEnvVarOverrride() throws Throwable { + String whoami = RegistryUtils.getCurrentUsernameUnencoded("drwho"); + assertEquals("drwho", whoami); + + } + + @Test + public void testUsernameExtractionCurrentuser() throws Throwable { + String whoami = RegistryUtils.getCurrentUsernameUnencoded(""); + String ugiUser = UserGroupInformation.getCurrentUser().getShortUserName(); + assertEquals(ugiUser, whoami); + } + + @Test + public void testShortenUsername() throws Throwable { + assertEquals("hbase", + RegistryUtils.convertUsername("[email protected]")); + assertEquals("hbase", + RegistryUtils.convertUsername("hbase/[email protected]")); + assertEquals("hbase", + RegistryUtils.convertUsername("hbase")); + assertEquals("hbase user", + RegistryUtils.convertUsername("hbase user")); + } + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryPathUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryPathUtils.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryPathUtils.java new file mode 100644 index 0000000..4346c9a --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestRegistryPathUtils.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.binding; + +import static org.apache.hadoop.registry.client.binding.RegistryPathUtils.*; +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.fs.PathNotFoundException; +import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException; +import org.junit.Assert; +import org.junit.Test; + +public class TestRegistryPathUtils extends Assert { + + + public static final String EURO = "\u20AC"; + + @Test + public void testFormatAscii() throws Throwable { + + String in = "hostname01101101-1"; + assertConverted(in, in); + } + + /* + * Euro symbol + */ + @Test + public void testFormatEuroSymbol() throws Throwable { + assertConverted("xn--lzg", EURO); + } + + @Test + public void testFormatIdempotent() throws Throwable { + assertConverted("xn--lzg", RegistryPathUtils.encodeForRegistry(EURO)); + } + + @Test + public void testFormatCyrillicSpaced() throws Throwable { + assertConverted("xn--pa 3-k4di", "\u0413PA\u0414 3"); + } + + protected void assertConverted(String expected, String in) { + String out = RegistryPathUtils.encodeForRegistry(in); + assertEquals("Conversion of " + in, expected, out); + } + + @Test + public void testPaths() throws Throwable { + assertCreatedPathEquals("/", "/", ""); + assertCreatedPathEquals("/", "", ""); + assertCreatedPathEquals("/", "", "/"); + assertCreatedPathEquals("/", "/", "/"); + + assertCreatedPathEquals("/a", "/a", ""); + assertCreatedPathEquals("/a", "/", "a"); + assertCreatedPathEquals("/a/b", "/a", "b"); + assertCreatedPathEquals("/a/b", "/a/", "b"); + assertCreatedPathEquals("/a/b", "/a", "/b"); + assertCreatedPathEquals("/a/b", "/a", "/b/"); + assertCreatedPathEquals("/a", "/a", "/"); + assertCreatedPathEquals("/alice", "/", "/alice"); + assertCreatedPathEquals("/alice", "/alice", "/"); + } + + @Test + public void testGetUserFromPath() throws Exception { + assertEquals("bob", RegistryPathUtils + .getUsername("/registry/users/bob/services/yarn-service/test1/")); + assertEquals("bob-dev", RegistryPathUtils + .getUsername("/registry/users/bob-dev/services/yarn-service/test1")); + assertEquals("bob.dev", RegistryPathUtils + .getUsername("/registry/users/bob.dev/services/yarn-service/test1")); + } + + + @Test + public void testComplexPaths() throws Throwable { + assertCreatedPathEquals("/", "", ""); + assertCreatedPathEquals("/yarn/registry/users/hadoop/org-apache-hadoop", + "/yarn/registry", + "users/hadoop/org-apache-hadoop/"); + } + + + private static void assertCreatedPathEquals(String expected, String base, + String path) throws IOException { + String fullPath = createFullPath(base, path); + assertEquals("\"" + base + "\" + \"" + path + "\" =\"" + fullPath + "\"", + expected, fullPath); + } + + @Test + public void testSplittingEmpty() throws Throwable { + assertEquals(0, split("").size()); + assertEquals(0, split("/").size()); + assertEquals(0, split("///").size()); + } + + + @Test + public void testSplitting() throws Throwable { + assertEquals(1, split("/a").size()); + assertEquals(0, split("/").size()); + assertEquals(3, split("/a/b/c").size()); + assertEquals(3, split("/a/b/c/").size()); + assertEquals(3, split("a/b/c").size()); + assertEquals(3, split("/a/b//c").size()); + assertEquals(3, split("//a/b/c/").size()); + List<String> split = split("//a/b/c/"); + assertEquals("a", split.get(0)); + assertEquals("b", split.get(1)); + assertEquals("c", split.get(2)); + } + + @Test + public void testParentOf() throws Throwable { + assertEquals("/", parentOf("/a")); + assertEquals("/", parentOf("/a/")); + assertEquals("/a", parentOf("/a/b")); + assertEquals("/a/b", parentOf("/a/b/c")); + } + + @Test + public void testLastPathEntry() throws Throwable { + assertEquals("",lastPathEntry("/")); + assertEquals("",lastPathEntry("//")); + assertEquals("c",lastPathEntry("/a/b/c")); + assertEquals("c",lastPathEntry("/a/b/c/")); + } + + @Test(expected = PathNotFoundException.class) + public void testParentOfRoot() throws Throwable { + parentOf("/"); + } + + @Test + public void testValidPaths() throws Throwable { + assertValidPath("/"); + assertValidPath("/a/b/c"); + assertValidPath("/users/drwho/org-apache-hadoop/registry/appid-55-55"); + assertValidPath("/a50"); + } + + @Test + public void testInvalidPaths() throws Throwable { + assertInvalidPath("/a_b"); + assertInvalidPath("/UpperAndLowerCase"); + assertInvalidPath("/space in string"); +// Is this valid? assertInvalidPath("/50"); + } + + + private void assertValidPath(String path) throws InvalidPathnameException { + validateZKPath(path); + } + + + private void assertInvalidPath(String path) throws InvalidPathnameException { + try { + validateElementsAsDNS(path); + fail("path considered valid: " + path); + } catch (InvalidPathnameException expected) { + // expected + } + } + + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/CuratorEventCatcher.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/CuratorEventCatcher.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/CuratorEventCatcher.java new file mode 100644 index 0000000..254ab79 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/CuratorEventCatcher.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.impl; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.api.BackgroundCallback; +import org.apache.curator.framework.api.CuratorEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * This is a little event catcher for curator asynchronous + * operations. + */ +public class CuratorEventCatcher implements BackgroundCallback { + + private static final Logger LOG = + LoggerFactory.getLogger(CuratorEventCatcher.class); + + public final BlockingQueue<CuratorEvent> + events = new LinkedBlockingQueue<CuratorEvent>(1); + + private final AtomicInteger eventCounter = new AtomicInteger(0); + + + @Override + public void processResult(CuratorFramework client, + CuratorEvent event) throws + Exception { + LOG.info("received {}", event); + eventCounter.incrementAndGet(); + events.put(event); + } + + + public int getCount() { + return eventCounter.get(); + } + + /** + * Blocking operation to take the first event off the queue + * @return the first event on the queue, when it arrives + * @throws InterruptedException if interrupted + */ + public CuratorEvent take() throws InterruptedException { + return events.take(); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestCuratorService.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestCuratorService.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestCuratorService.java new file mode 100644 index 0000000..3c8b1d1 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestCuratorService.java @@ -0,0 +1,249 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.impl; + +import org.apache.curator.framework.api.CuratorEvent; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileAlreadyExistsException; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; +import org.apache.hadoop.fs.PathNotFoundException; +import org.apache.hadoop.service.ServiceOperations; +import org.apache.hadoop.registry.AbstractZKRegistryTest; +import org.apache.hadoop.registry.client.impl.zk.CuratorService; +import org.apache.hadoop.registry.client.impl.zk.RegistrySecurity; +import org.apache.zookeeper.CreateMode; +import org.apache.zookeeper.data.ACL; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.List; + +/** + * Test the curator service + */ +public class TestCuratorService extends AbstractZKRegistryTest { + private static final Logger LOG = + LoggerFactory.getLogger(TestCuratorService.class); + + + protected CuratorService curatorService; + + public static final String MISSING = "/missing"; + private List<ACL> rootACL; + + @Before + public void startCurator() throws IOException { + createCuratorService(); + } + + @After + public void stopCurator() { + ServiceOperations.stop(curatorService); + } + + /** + * Create an instance + */ + protected void createCuratorService() throws IOException { + curatorService = new CuratorService("curatorService"); + curatorService.init(createRegistryConfiguration()); + curatorService.start(); + rootACL = RegistrySecurity.WorldReadWriteACL; + curatorService.maybeCreate("", CreateMode.PERSISTENT, rootACL, true); + } + + @Test + public void testLs() throws Throwable { + curatorService.zkList("/"); + } + + @Test(expected = PathNotFoundException.class) + public void testLsNotFound() throws Throwable { + List<String> ls = curatorService.zkList(MISSING); + } + + @Test + public void testExists() throws Throwable { + assertTrue(curatorService.zkPathExists("/")); + } + + @Test + public void testExistsMissing() throws Throwable { + assertFalse(curatorService.zkPathExists(MISSING)); + } + + @Test + public void testVerifyExists() throws Throwable { + pathMustExist("/"); + } + + @Test(expected = PathNotFoundException.class) + public void testVerifyExistsMissing() throws Throwable { + pathMustExist("/file-not-found"); + } + + @Test + public void testMkdirs() throws Throwable { + mkPath("/p1", CreateMode.PERSISTENT); + pathMustExist("/p1"); + mkPath("/p1/p2", CreateMode.EPHEMERAL); + pathMustExist("/p1/p2"); + } + + private void mkPath(String path, CreateMode mode) throws IOException { + curatorService.zkMkPath(path, mode, false, + RegistrySecurity.WorldReadWriteACL); + } + + public void pathMustExist(String path) throws IOException { + curatorService.zkPathMustExist(path); + } + + @Test(expected = PathNotFoundException.class) + public void testMkdirChild() throws Throwable { + mkPath("/testMkdirChild/child", CreateMode.PERSISTENT); + } + + @Test + public void testMaybeCreate() throws Throwable { + assertTrue(curatorService.maybeCreate("/p3", CreateMode.PERSISTENT, + RegistrySecurity.WorldReadWriteACL, false)); + assertFalse(curatorService.maybeCreate("/p3", CreateMode.PERSISTENT, + RegistrySecurity.WorldReadWriteACL, false)); + } + + @Test + public void testRM() throws Throwable { + mkPath("/rm", CreateMode.PERSISTENT); + curatorService.zkDelete("/rm", false, null); + verifyNotExists("/rm"); + curatorService.zkDelete("/rm", false, null); + } + + @Test + public void testRMNonRf() throws Throwable { + mkPath("/rm", CreateMode.PERSISTENT); + mkPath("/rm/child", CreateMode.PERSISTENT); + try { + curatorService.zkDelete("/rm", false, null); + fail("expected a failure"); + } catch (PathIsNotEmptyDirectoryException expected) { + + } + } + + @Test + public void testRMRf() throws Throwable { + mkPath("/rm", CreateMode.PERSISTENT); + mkPath("/rm/child", CreateMode.PERSISTENT); + curatorService.zkDelete("/rm", true, null); + verifyNotExists("/rm"); + curatorService.zkDelete("/rm", true, null); + } + + + @Test + public void testBackgroundDelete() throws Throwable { + mkPath("/rm", CreateMode.PERSISTENT); + mkPath("/rm/child", CreateMode.PERSISTENT); + CuratorEventCatcher events = new CuratorEventCatcher(); + curatorService.zkDelete("/rm", true, events); + CuratorEvent taken = events.take(); + LOG.info("took {}", taken); + assertEquals(1, events.getCount()); + } + + @Test + public void testCreate() throws Throwable { + + curatorService.zkCreate("/testcreate", + CreateMode.PERSISTENT, getTestBuffer(), + rootACL + ); + pathMustExist("/testcreate"); + } + + @Test + public void testCreateTwice() throws Throwable { + byte[] buffer = getTestBuffer(); + curatorService.zkCreate("/testcreatetwice", + CreateMode.PERSISTENT, buffer, + rootACL); + try { + curatorService.zkCreate("/testcreatetwice", + CreateMode.PERSISTENT, buffer, + rootACL); + fail(); + } catch (FileAlreadyExistsException e) { + + } + } + + @Test + public void testCreateUpdate() throws Throwable { + byte[] buffer = getTestBuffer(); + curatorService.zkCreate("/testcreateupdate", + CreateMode.PERSISTENT, buffer, + rootACL + ); + curatorService.zkUpdate("/testcreateupdate", buffer); + } + + @Test(expected = PathNotFoundException.class) + public void testUpdateMissing() throws Throwable { + curatorService.zkUpdate("/testupdatemissing", getTestBuffer()); + } + + @Test + public void testUpdateDirectory() throws Throwable { + mkPath("/testupdatedirectory", CreateMode.PERSISTENT); + curatorService.zkUpdate("/testupdatedirectory", getTestBuffer()); + } + + @Test + public void testUpdateDirectorywithChild() throws Throwable { + mkPath("/testupdatedirectorywithchild", CreateMode.PERSISTENT); + mkPath("/testupdatedirectorywithchild/child", CreateMode.PERSISTENT); + curatorService.zkUpdate("/testupdatedirectorywithchild", getTestBuffer()); + } + + @Test + public void testUseZKServiceForBinding() throws Throwable { + CuratorService cs2 = new CuratorService("curator", zookeeper); + cs2.init(new Configuration()); + cs2.start(); + } + + protected byte[] getTestBuffer() { + byte[] buffer = new byte[1]; + buffer[0] = '0'; + return buffer; + } + + + public void verifyNotExists(String path) throws IOException { + if (curatorService.zkPathExists(path)) { + fail("Path should not exist: " + path); + } + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestFSRegistryOperationsService.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestFSRegistryOperationsService.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestFSRegistryOperationsService.java new file mode 100644 index 0000000..dffa4a7 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestFSRegistryOperationsService.java @@ -0,0 +1,298 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.impl; + +import org.junit.Test; + +import java.io.IOException; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileAlreadyExistsException; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; +import org.apache.hadoop.fs.PathNotFoundException; +import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException; +import org.apache.hadoop.registry.client.types.ServiceRecord; +import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; + +/** + * FSRegistryOperationsService test, using the local filesystem. + */ +public class TestFSRegistryOperationsService { + private static FSRegistryOperationsService registry = + new FSRegistryOperationsService(); + private static FileSystem fs; + + @BeforeClass + public static void initRegistry() throws IOException { + Assert.assertNotNull(registry); + registry.init(new Configuration()); + fs = registry.getFs(); + fs.delete(new Path("test"), true); + } + + @Before + public void createTestDir() throws IOException { + fs.mkdirs(new Path("test")); + } + + @After + public void cleanTestDir() throws IOException { + fs.delete(new Path("test"), true); + } + + @Test + public void testMkNodeNonRecursive() + throws InvalidPathnameException, PathNotFoundException, IOException { + boolean result = false; + System.out.println("Make node with parent already made, nonrecursive"); + result = registry.mknode("test/registryTestNode", false); + Assert.assertTrue(result); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode"))); + + // Expected to fail + try { + System.out.println("Try to make node with no parent, nonrecursive"); + registry.mknode("test/parent/registryTestNode", false); + Assert.fail("Should not have created node"); + } catch (IOException e) { + } + Assert.assertFalse(fs.exists(new Path("test/parent/registryTestNode"))); + } + + @Test + public void testMkNodeRecursive() throws IOException { + boolean result = false; + System.out.println("Make node with parent already made, recursive"); + result = registry.mknode("test/registryTestNode", true); + Assert.assertTrue(result); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode"))); + + result = false; + System.out.println("Try to make node with no parent, recursive"); + result = registry.mknode("test/parent/registryTestNode", true); + Assert.assertTrue(result); + Assert.assertTrue(fs.exists(new Path("test/parent/registryTestNode"))); + + } + + @Test + public void testMkNodeAlreadyExists() throws IOException { + System.out.println("pre-create test path"); + fs.mkdirs(new Path("test/registryTestNode")); + + System.out.println( + "Try to mknode existing path -- should be noop and return false"); + Assert.assertFalse(registry.mknode("test/registryTestNode", true)); + Assert.assertFalse(registry.mknode("test/registryTestNode", false)); + } + + @Test + public void testBindParentPath() throws InvalidPathnameException, + PathNotFoundException, FileAlreadyExistsException, IOException { + ServiceRecord record = createRecord("0"); + + System.out.println("pre-create test path"); + fs.mkdirs(new Path("test/parent1/registryTestNode")); + + registry.bind("test/parent1/registryTestNode", record, 1); + Assert.assertTrue( + fs.exists(new Path("test/parent1/registryTestNode/_record"))); + + // Test without pre-creating path + registry.bind("test/parent2/registryTestNode", record, 1); + Assert.assertTrue(fs.exists(new Path("test/parent2/registryTestNode"))); + + } + + @Test + public void testBindAlreadyExists() throws IOException { + ServiceRecord record1 = createRecord("1"); + ServiceRecord record2 = createRecord("2"); + + System.out.println("Bind record1"); + registry.bind("test/registryTestNode", record1, 1); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/_record"))); + + System.out.println("Bind record2, overwrite = 1"); + registry.bind("test/registryTestNode", record2, 1); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/_record"))); + + // The record should have been overwritten + ServiceRecord readRecord = registry.resolve("test/registryTestNode"); + Assert.assertTrue(readRecord.equals(record2)); + + System.out.println("Bind record3, overwrite = 0"); + try { + registry.bind("test/registryTestNode", record1, 0); + Assert.fail("Should not overwrite record"); + } catch (IOException e) { + } + + // The record should not be overwritten + readRecord = registry.resolve("test/registryTestNode"); + Assert.assertTrue(readRecord.equals(record2)); + } + + @Test + public void testResolve() throws IOException { + ServiceRecord record = createRecord("0"); + registry.bind("test/registryTestNode", record, 1); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/_record"))); + + System.out.println("Read record that exists"); + ServiceRecord readRecord = registry.resolve("test/registryTestNode"); + Assert.assertNotNull(readRecord); + Assert.assertTrue(record.equals(readRecord)); + + System.out.println("Try to read record that does not exist"); + try { + readRecord = registry.resolve("test/nonExistentNode"); + Assert.fail("Should throw an error, record does not exist"); + } catch (IOException e) { + } + } + + @Test + public void testExists() throws IOException { + System.out.println("pre-create test path"); + fs.mkdirs(new Path("test/registryTestNode")); + + System.out.println("Check for existing node"); + boolean exists = registry.exists("test/registryTestNode"); + Assert.assertTrue(exists); + + System.out.println("Check for non-existing node"); + exists = registry.exists("test/nonExistentNode"); + Assert.assertFalse(exists); + } + + @Test + public void testDeleteDirsOnly() throws IOException { + System.out.println("pre-create test path with children"); + fs.mkdirs(new Path("test/registryTestNode")); + fs.mkdirs(new Path("test/registryTestNode/child1")); + fs.mkdirs(new Path("test/registryTestNode/child2")); + + try { + registry.delete("test/registryTestNode", false); + Assert.fail("Deleted dir wich children, nonrecursive flag set"); + } catch (IOException e) { + } + // Make sure nothing was deleted + Assert.assertTrue(fs.exists(new Path("test/registryTestNode"))); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/child1"))); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/child2"))); + + System.out.println("Delete leaf path 'test/registryTestNode/child2'"); + registry.delete("test/registryTestNode/child2", false); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode"))); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/child1"))); + Assert.assertFalse(fs.exists(new Path("test/registryTestNode/child2"))); + + System.out + .println("Recursively delete non-leaf path 'test/registryTestNode'"); + registry.delete("test/registryTestNode", true); + Assert.assertFalse(fs.exists(new Path("test/registryTestNode"))); + } + + @Test + public void testDeleteWithRecords() throws IOException { + System.out.println("pre-create test path with children and mocked records"); + + fs.mkdirs(new Path("test/registryTestNode")); + fs.mkdirs(new Path("test/registryTestNode/child1")); + fs.mkdirs(new Path("test/registryTestNode/child2")); + + // Create and close stream immediately so they aren't blocking + fs.create(new Path("test/registryTestNode/_record")).close(); + fs.create(new Path("test/registryTestNode/child1/_record")).close(); + + System.out.println("Delete dir with child nodes and record file"); + try { + registry.delete("test/registryTestNode", false); + Assert.fail("Nonrecursive delete of non-empty dir"); + } catch (PathIsNotEmptyDirectoryException e) { + } + + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/_record"))); + Assert.assertTrue( + fs.exists(new Path("test/registryTestNode/child1/_record"))); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/child2"))); + + System.out.println("Delete dir with record file and no child dirs"); + registry.delete("test/registryTestNode/child1", false); + Assert.assertFalse(fs.exists(new Path("test/registryTestNode/child1"))); + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/child2"))); + + System.out.println("Delete dir with child dir and no record file"); + try { + registry.delete("test/registryTestNode", false); + Assert.fail("Nonrecursive delete of non-empty dir"); + } catch (PathIsNotEmptyDirectoryException e) { + } + Assert.assertTrue(fs.exists(new Path("test/registryTestNode/child2"))); + } + + @Test + public void testList() throws IOException { + System.out.println("pre-create test path with children and mocked records"); + + fs.mkdirs(new Path("test/registryTestNode")); + fs.mkdirs(new Path("test/registryTestNode/child1")); + fs.mkdirs(new Path("test/registryTestNode/child2")); + + // Create and close stream immediately so they aren't blocking + fs.create(new Path("test/registryTestNode/_record")).close(); + fs.create(new Path("test/registryTestNode/child1/_record")).close(); + + List<String> ls = null; + + ls = registry.list("test/registryTestNode"); + Assert.assertNotNull(ls); + Assert.assertEquals(2, ls.size()); + System.out.println(ls); + Assert.assertTrue(ls.contains("child1")); + Assert.assertTrue(ls.contains("child2")); + + ls = null; + ls = registry.list("test/registryTestNode/child1"); + Assert.assertNotNull(ls); + Assert.assertTrue(ls.isEmpty()); + ls = null; + ls = registry.list("test/registryTestNode/child2"); + Assert.assertNotNull(ls); + Assert.assertTrue(ls.isEmpty()); + } + + private ServiceRecord createRecord(String id) { + System.out.println("Creating mock service record"); + + ServiceRecord record = new ServiceRecord(); + record.set(YarnRegistryAttributes.YARN_ID, id); + record.description = "testRecord"; + return record; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestMicroZookeeperService.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestMicroZookeeperService.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestMicroZookeeperService.java new file mode 100644 index 0000000..7cf39cd --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/client/impl/TestMicroZookeeperService.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.client.impl; + +import org.apache.hadoop.service.ServiceOperations; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.registry.conf.RegistryConfiguration; +import org.apache.hadoop.registry.server.services.MicroZookeeperService; +import org.junit.After; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; +import org.junit.rules.Timeout; + +import java.io.IOException; + +/** + * Simple tests to look at the micro ZK service itself + */ +public class TestMicroZookeeperService extends Assert { + + private MicroZookeeperService zookeeper; + + @Rule + public final Timeout testTimeout = new Timeout(10000); + @Rule + public TestName methodName = new TestName(); + + @After + public void destroyZKServer() throws IOException { + + ServiceOperations.stop(zookeeper); + } + + @Test + public void testTempDirSupport() throws Throwable { + Configuration conf = new RegistryConfiguration(); + zookeeper = new MicroZookeeperService("t1"); + zookeeper.init(conf); + zookeeper.start(); + zookeeper.stop(); + } + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/integration/TestYarnPolicySelector.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/integration/TestYarnPolicySelector.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/integration/TestYarnPolicySelector.java new file mode 100644 index 0000000..441b3d7 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/integration/TestYarnPolicySelector.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.integration; + +import org.apache.hadoop.registry.RegistryTestHelper; +import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; +import org.apache.hadoop.registry.client.types.RegistryPathStatus; +import org.apache.hadoop.registry.client.types.ServiceRecord; +import org.apache.hadoop.registry.server.integration.SelectByYarnPersistence; +import org.apache.hadoop.registry.server.services.RegistryAdminService; +import org.junit.Test; + +public class TestYarnPolicySelector extends RegistryTestHelper { + + + private ServiceRecord record = createRecord("1", + PersistencePolicies.APPLICATION, "one", + null); + private RegistryPathStatus status = new RegistryPathStatus("/", 0, 0, 1); + + public void assertSelected(boolean outcome, + RegistryAdminService.NodeSelector selector) { + boolean select = selector.shouldSelect("/", status, record); + assertEquals(selector.toString(), outcome, select); + } + + @Test + public void testByContainer() throws Throwable { + assertSelected(false, + new SelectByYarnPersistence("1", + PersistencePolicies.CONTAINER)); + } + + @Test + public void testByApp() throws Throwable { + assertSelected(true, + new SelectByYarnPersistence("1", + PersistencePolicies.APPLICATION)); + } + + + @Test + public void testByAppName() throws Throwable { + assertSelected(false, + new SelectByYarnPersistence("2", + PersistencePolicies.APPLICATION)); + } + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java new file mode 100644 index 0000000..853d7f1 --- /dev/null +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java @@ -0,0 +1,331 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.registry.operations; + +import org.apache.hadoop.fs.FileAlreadyExistsException; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; +import org.apache.hadoop.fs.PathNotFoundException; +import org.apache.hadoop.registry.AbstractRegistryTest; +import org.apache.hadoop.registry.client.api.BindFlags; +import org.apache.hadoop.registry.client.binding.RegistryTypeUtils; +import org.apache.hadoop.registry.client.binding.RegistryUtils; +import org.apache.hadoop.registry.client.binding.RegistryPathUtils; +import org.apache.hadoop.registry.client.exceptions.NoRecordException; +import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; +import org.apache.hadoop.registry.client.types.RegistryPathStatus; +import org.apache.hadoop.registry.client.types.ServiceRecord; +import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class TestRegistryOperations extends AbstractRegistryTest { + protected static final Logger LOG = + LoggerFactory.getLogger(TestRegistryOperations.class); + + @Test + public void testPutGetServiceEntry() throws Throwable { + ServiceRecord written = putExampleServiceEntry(ENTRY_PATH, 0, + PersistencePolicies.APPLICATION); + ServiceRecord resolved = operations.resolve(ENTRY_PATH); + validateEntry(resolved); + assertMatches(written, resolved); + } + + @Test + public void testDeleteServiceEntry() throws Throwable { + putExampleServiceEntry(ENTRY_PATH, 0); + operations.delete(ENTRY_PATH, false); + } + + @Test + public void testDeleteNonexistentEntry() throws Throwable { + operations.delete(ENTRY_PATH, false); + operations.delete(ENTRY_PATH, true); + } + + @Test + public void testStat() throws Throwable { + putExampleServiceEntry(ENTRY_PATH, 0); + RegistryPathStatus stat = operations.stat(ENTRY_PATH); + assertTrue(stat.size > 0); + assertTrue(stat.time > 0); + assertEquals(NAME, stat.path); + } + + @Test + public void testLsParent() throws Throwable { + ServiceRecord written = putExampleServiceEntry(ENTRY_PATH, 0); + RegistryPathStatus stat = operations.stat(ENTRY_PATH); + + List<String> children = operations.list(PARENT_PATH); + assertEquals(1, children.size()); + assertEquals(NAME, children.get(0)); + Map<String, RegistryPathStatus> childStats = + RegistryUtils.statChildren(operations, PARENT_PATH); + assertEquals(1, childStats.size()); + assertEquals(stat, childStats.get(NAME)); + + Map<String, ServiceRecord> records = + RegistryUtils.extractServiceRecords(operations, + PARENT_PATH, + childStats.values()); + assertEquals(1, records.size()); + ServiceRecord record = records.get(ENTRY_PATH); + RegistryTypeUtils.validateServiceRecord(ENTRY_PATH, record); + assertMatches(written, record); + } + + @Test + public void testDeleteNonEmpty() throws Throwable { + putExampleServiceEntry(ENTRY_PATH, 0); + try { + operations.delete(PARENT_PATH, false); + fail("Expected a failure"); + } catch (PathIsNotEmptyDirectoryException expected) { + // expected; ignore + } + operations.delete(PARENT_PATH, true); + } + + @Test(expected = PathNotFoundException.class) + public void testStatEmptyPath() throws Throwable { + operations.stat(ENTRY_PATH); + } + + @Test(expected = PathNotFoundException.class) + public void testLsEmptyPath() throws Throwable { + operations.list(PARENT_PATH); + } + + @Test(expected = PathNotFoundException.class) + public void testResolveEmptyPath() throws Throwable { + operations.resolve(ENTRY_PATH); + } + + @Test + public void testMkdirNoParent() throws Throwable { + String path = ENTRY_PATH + "/missing"; + try { + operations.mknode(path, false); + RegistryPathStatus stat = operations.stat(path); + fail("Got a status " + stat); + } catch (PathNotFoundException expected) { + // expected + } + } + + @Test + public void testDoubleMkdir() throws Throwable { + operations.mknode(USERPATH, false); + String path = USERPATH + "newentry"; + assertTrue(operations.mknode(path, false)); + operations.stat(path); + assertFalse(operations.mknode(path, false)); + } + + @Test + public void testPutNoParent() throws Throwable { + ServiceRecord record = new ServiceRecord(); + record.set(YarnRegistryAttributes.YARN_ID, "testPutNoParent"); + String path = "/path/without/parent"; + try { + operations.bind(path, record, 0); + // didn't get a failure + // trouble + RegistryPathStatus stat = operations.stat(path); + fail("Got a status " + stat); + } catch (PathNotFoundException expected) { + // expected + } + } + + @Test + public void testPutMinimalRecord() throws Throwable { + String path = "/path/with/minimal"; + operations.mknode(path, true); + ServiceRecord record = new ServiceRecord(); + operations.bind(path, record, BindFlags.OVERWRITE); + ServiceRecord resolve = operations.resolve(path); + assertMatches(record, resolve); + + } + + @Test(expected = PathNotFoundException.class) + public void testPutNoParent2() throws Throwable { + ServiceRecord record = new ServiceRecord(); + record.set(YarnRegistryAttributes.YARN_ID, "testPutNoParent"); + String path = "/path/without/parent"; + operations.bind(path, record, 0); + } + + @Test + public void testStatDirectory() throws Throwable { + String empty = "/empty"; + operations.mknode(empty, false); + operations.stat(empty); + } + + @Test + public void testStatRootPath() throws Throwable { + operations.mknode("/", false); + operations.stat("/"); + operations.list("/"); + operations.list("/"); + } + + @Test + public void testStatOneLevelDown() throws Throwable { + operations.mknode("/subdir", true); + operations.stat("/subdir"); + } + + @Test + public void testLsRootPath() throws Throwable { + String empty = "/"; + operations.mknode(empty, false); + operations.stat(empty); + } + + @Test + public void testResolvePathThatHasNoEntry() throws Throwable { + String empty = "/empty2"; + operations.mknode(empty, false); + try { + ServiceRecord record = operations.resolve(empty); + fail("expected an exception, got " + record); + } catch (NoRecordException expected) { + // expected + } + } + + @Test + public void testOverwrite() throws Throwable { + ServiceRecord written = putExampleServiceEntry(ENTRY_PATH, 0); + ServiceRecord resolved1 = operations.resolve(ENTRY_PATH); + resolved1.description = "resolved1"; + try { + operations.bind(ENTRY_PATH, resolved1, 0); + fail("overwrite succeeded when it should have failed"); + } catch (FileAlreadyExistsException expected) { + // expected + } + + // verify there's no changed + ServiceRecord resolved2 = operations.resolve(ENTRY_PATH); + assertMatches(written, resolved2); + operations.bind(ENTRY_PATH, resolved1, BindFlags.OVERWRITE); + ServiceRecord resolved3 = operations.resolve(ENTRY_PATH); + assertMatches(resolved1, resolved3); + } + + @Test + public void testPutGetContainerPersistenceServiceEntry() throws Throwable { + + String path = ENTRY_PATH; + ServiceRecord written = buildExampleServiceEntry( + PersistencePolicies.CONTAINER); + + operations.mknode(RegistryPathUtils.parentOf(path), true); + operations.bind(path, written, BindFlags.CREATE); + ServiceRecord resolved = operations.resolve(path); + validateEntry(resolved); + assertMatches(written, resolved); + } + + @Test + public void testAddingWriteAccessIsNoOpEntry() throws Throwable { + + assertFalse(operations.addWriteAccessor("id","pass")); + operations.clearWriteAccessors(); + } + + @Test + public void testListListFully() throws Throwable { + ServiceRecord r1 = new ServiceRecord(); + ServiceRecord r2 = createRecord("i", + PersistencePolicies.PERMANENT, "r2"); + + String path = USERPATH + SC_HADOOP + "/listing" ; + operations.mknode(path, true); + String r1path = path + "/r1"; + operations.bind(r1path, r1, 0); + String r2path = path + "/r2"; + operations.bind(r2path, r2, 0); + + RegistryPathStatus r1stat = operations.stat(r1path); + assertEquals("r1", r1stat.path); + RegistryPathStatus r2stat = operations.stat(r2path); + assertEquals("r2", r2stat.path); + assertNotEquals(r1stat, r2stat); + + // listings now + List<String> list = operations.list(path); + assertEquals("Wrong no. of children", 2, list.size()); + // there's no order here, so create one + Map<String, String> names = new HashMap<String, String>(); + String entries = ""; + for (String child : list) { + names.put(child, child); + entries += child + " "; + } + assertTrue("No 'r1' in " + entries, + names.containsKey("r1")); + assertTrue("No 'r2' in " + entries, + names.containsKey("r2")); + + Map<String, RegistryPathStatus> stats = + RegistryUtils.statChildren(operations, path); + assertEquals("Wrong no. of children", 2, stats.size()); + assertEquals(r1stat, stats.get("r1")); + assertEquals(r2stat, stats.get("r2")); + } + + + @Test + public void testComplexUsernames() throws Throwable { + operations.mknode("/users/user with spaces", true); + operations.mknode("/users/user-with_underscores", true); + operations.mknode("/users/000000", true); + operations.mknode("/users/-storm", true); + operations.mknode("/users/windows\\ user", true); + String home = RegistryUtils.homePathForUser("\u0413PA\u0414_3"); + operations.mknode(home, true); + operations.mknode( + RegistryUtils.servicePath(home, "service.class", "service 4_5"), + true); + + operations.mknode( + RegistryUtils.homePathForUser("[email protected]"), + true); + operations.mknode( + RegistryUtils.homePathForUser("hbase/[email protected]"), + true); + home = RegistryUtils.homePathForUser("ADMINISTRATOR/127.0.0.1"); + assertTrue("No 'administrator' in " + home, home.contains("administrator")); + operations.mknode( + home, + true); + + } +} --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
