http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh index 76d1d6b..6b43416 100644 --- a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh +++ b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh @@ -151,6 +151,7 @@ ### # Registry DNS specific parameters +# This is deprecated and should be done in hadoop-env.sh ### # For privileged registry DNS, user to run as after dropping privileges # This will replace the hadoop.id.str Java property in secure mode.
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/pom.xml index 42edbbb..227e036 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/pom.xml @@ -101,7 +101,7 @@ </dependency> <dependency> <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-registry</artifactId> + <artifactId>hadoop-registry</artifactId> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml index c23c2bd..9386ac2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml @@ -144,7 +144,7 @@ <dependency> <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-registry</artifactId> + <artifactId>hadoop-registry</artifactId> </dependency> <dependency> http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml index 6a7d202..b7c6302 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml @@ -26,245 +26,14 @@ <version>3.3.0-SNAPSHOT</version> <name>Apache Hadoop YARN Registry</name> - <properties> - <!-- Needed for generating FindBugs warnings using parent pom --> - <yarn.basedir>${project.parent.basedir}</yarn.basedir> - </properties> - <dependencies> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-annotations</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-api</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-common</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - </dependency> - - <!-- needed for TimedOutTestsListener --> + <!-- The registry moved to Hadoop commons, this is just a stub pom. --> <dependency> <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <type>test-jar</type> - <scope>test</scope> - </dependency> - - <!-- Mini KDC is used for testing --> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minikdc</artifactId> - <scope>test</scope> - </dependency> - - <dependency> - <groupId>junit</groupId> - <artifactId>junit</artifactId> - <scope>test</scope> - </dependency> - - <dependency> - <groupId>org.apache.zookeeper</groupId> - <artifactId>zookeeper</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.curator</groupId> - <artifactId>curator-client</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.curator</groupId> - <artifactId>curator-framework</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.curator</groupId> - <artifactId>curator-recipes</artifactId> - </dependency> - - <dependency> - <groupId>commons-cli</groupId> - <artifactId>commons-cli</artifactId> - </dependency> - - <dependency> - <groupId>commons-daemon</groupId> - <artifactId>commons-daemon</artifactId> - </dependency> - - <dependency> - <groupId>commons-io</groupId> - <artifactId>commons-io</artifactId> - </dependency> - - <dependency> - <groupId>commons-net</groupId> - <artifactId>commons-net</artifactId> - </dependency> - - <dependency> - <groupId>com.fasterxml.jackson.core</groupId> - <artifactId>jackson-annotations</artifactId> - </dependency> - - <dependency> - <groupId>com.fasterxml.jackson.core</groupId> - <artifactId>jackson-core</artifactId> - </dependency> - - <dependency> - <groupId>com.fasterxml.jackson.core</groupId> - <artifactId>jackson-databind</artifactId> - </dependency> - - <dependency> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </dependency> - - <dependency> - <groupId>dnsjava</groupId> - <artifactId>dnsjava</artifactId> + <artifactId>hadoop-registry</artifactId> </dependency> </dependencies> - <build> - <!-- - Include all files in src/main/resources. By default, do not apply property - substitution (filtering=false), but do apply property substitution to - yarn-version-info.properties (filtering=true). This will substitute the - version information correctly, but prevent Maven from altering other files - like yarn-default.xml. - --> - <resources> - <resource> - <directory>${basedir}/src/main/resources</directory> - <excludes> - <exclude>yarn-version-info.properties</exclude> - </excludes> - <filtering>false</filtering> - </resource> - <resource> - <directory>${basedir}/src/main/resources</directory> - <includes> - <include>yarn-version-info.properties</include> - </includes> - <filtering>true</filtering> - </resource> - </resources> - <plugins> - <plugin> - <groupId>org.apache.rat</groupId> - <artifactId>apache-rat-plugin</artifactId> - <configuration> - <excludes> - <exclude>src/main/resources/.keep</exclude> - </excludes> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-maven-plugins</artifactId> - <executions> - <execution> - <id>version-info</id> - <phase>generate-resources</phase> - <goals> - <goal>version-info</goal> - </goals> - <configuration> - <source> - <directory>${basedir}/src/main</directory> - <includes> - <include>java/**/*.java</include> - <!-- - <include>proto/**/*.proto</include> - --> - </includes> - </source> - </configuration> - </execution> - </executions> - </plugin> - <plugin> - <artifactId>maven-jar-plugin</artifactId> - <executions> - <execution> - <goals> - <goal>test-jar</goal> - </goals> - <phase>test-compile</phase> - </execution> - </executions> - </plugin> - - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-surefire-plugin</artifactId> - <configuration> - <reuseForks>false</reuseForks> - <forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds> - <argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine> - <environmentVariables> - <!-- HADOOP_HOME required for tests on Windows to find winutils --> - <HADOOP_HOME>${hadoop.common.build.dir}</HADOOP_HOME> - <!-- configurable option to turn JAAS debugging on during test runs --> - <HADOOP_JAAS_DEBUG>true</HADOOP_JAAS_DEBUG> - <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib</LD_LIBRARY_PATH> - <MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX> - </environmentVariables> - <systemPropertyVariables> - - <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir> - <hadoop.tmp.dir>${project.build.directory}/tmp</hadoop.tmp.dir> - - <!-- TODO: all references in testcases should be updated to this default --> - <test.build.dir>${test.build.dir}</test.build.dir> - <test.build.data>${test.build.data}</test.build.data> - <test.build.webapps>${test.build.webapps}</test.build.webapps> - <test.cache.data>${test.cache.data}</test.cache.data> - <test.build.classes>${test.build.classes}</test.build.classes> - - <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack> - <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf> - <java.security.egd>${java.security.egd}</java.security.egd> - <require.test.libhadoop>${require.test.libhadoop}</require.test.libhadoop> - </systemPropertyVariables> - <includes> - <include>**/Test*.java</include> - </includes> - <excludes> - <exclude>**/${test.exclude}.java</exclude> - <exclude>${test.exclude.pattern}</exclude> - <exclude>**/Test*$*.java</exclude> - </excludes> - </configuration> - </plugin> - - - </plugins> - </build> </project> http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java deleted file mode 100644 index 480ce0e..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java +++ /dev/null @@ -1,497 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.registry.cli; - -import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.*; - -import java.io.Closeable; -import java.io.IOException; -import java.io.PrintStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.List; -import java.util.Map; - -import com.google.common.base.Preconditions; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.GnuParser; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.PathNotFoundException; -import org.apache.hadoop.security.AccessControlException; -import org.apache.hadoop.service.ServiceOperations; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.util.Tool; -import org.apache.hadoop.util.ToolRunner; -import org.apache.hadoop.registry.client.api.BindFlags; -import org.apache.hadoop.registry.client.api.RegistryOperations; -import org.apache.hadoop.registry.client.api.RegistryOperationsFactory; -import org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException; -import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException; -import org.apache.hadoop.registry.client.exceptions.InvalidRecordException; -import org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException; -import org.apache.hadoop.registry.client.exceptions.NoRecordException; -import org.apache.hadoop.registry.client.types.Endpoint; -import org.apache.hadoop.registry.client.types.ProtocolTypes; -import org.apache.hadoop.registry.client.types.ServiceRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Command line for registry operations. - */ -public class RegistryCli extends Configured implements Tool, Closeable { - private static final Logger LOG = - LoggerFactory.getLogger(RegistryCli.class); - protected final PrintStream sysout; - protected final PrintStream syserr; - - - private RegistryOperations registry; - - private static final String LS_USAGE = "ls pathName"; - private static final String RESOLVE_USAGE = "resolve pathName"; - private static final String BIND_USAGE = - "bind -inet -api apiName -p portNumber -h hostName pathName" + "\n" - + "bind -webui uriString -api apiName pathName" + "\n" - + "bind -rest uriString -api apiName pathName"; - private static final String MKNODE_USAGE = "mknode directoryName"; - private static final String RM_USAGE = "rm pathName"; - private static final String USAGE = - "\n" + LS_USAGE + "\n" + RESOLVE_USAGE + "\n" + BIND_USAGE + "\n" + - MKNODE_USAGE + "\n" + RM_USAGE; - - - public RegistryCli(PrintStream sysout, PrintStream syserr) { - Configuration conf = new Configuration(); - super.setConf(conf); - registry = RegistryOperationsFactory.createInstance(conf); - registry.start(); - this.sysout = sysout; - this.syserr = syserr; - } - - public RegistryCli(RegistryOperations reg, - Configuration conf, - PrintStream sysout, - PrintStream syserr) { - super(conf); - Preconditions.checkArgument(reg != null, "Null registry"); - registry = reg; - this.sysout = sysout; - this.syserr = syserr; - } - - @SuppressWarnings("UseOfSystemOutOrSystemErr") - public static void main(String[] args) throws Exception { - int res = -1; - try (RegistryCli cli = new RegistryCli(System.out, System.err)) { - res = ToolRunner.run(cli, args); - } catch (Exception e) { - ExitUtil.terminate(res, e); - } - ExitUtil.terminate(res); - } - - /** - * Close the object by stopping the registry. - * <p> - * <i>Important:</i> - * <p> - * After this call is made, no operations may be made of this - * object, <i>or of a YARN registry instance used when constructing - * this object. </i> - * @throws IOException - */ - @Override - public void close() throws IOException { - ServiceOperations.stopQuietly(registry); - registry = null; - } - - private int usageError(String err, String usage) { - syserr.println("Error: " + err); - syserr.println("Usage: " + usage); - return -1; - } - - private boolean validatePath(String path) { - if (!path.startsWith("/")) { - syserr.println("Path must start with /; given path was: " + path); - return false; - } - return true; - } - - @Override - public int run(String[] args) throws Exception { - Preconditions.checkArgument(getConf() != null, "null configuration"); - if (args.length > 0) { - switch (args[0]) { - case "ls": - return ls(args); - case "resolve": - return resolve(args); - case "bind": - return bind(args); - case "mknode": - return mknode(args); - case "rm": - return rm(args); - default: - return usageError("Invalid command: " + args[0], USAGE); - } - } - return usageError("No command arg passed.", USAGE); - } - - @SuppressWarnings("unchecked") - public int ls(String[] args) { - - Options lsOption = new Options(); - CommandLineParser parser = new GnuParser(); - try { - CommandLine line = parser.parse(lsOption, args); - - List<String> argsList = line.getArgList(); - if (argsList.size() != 2) { - return usageError("ls requires exactly one path argument", LS_USAGE); - } - if (!validatePath(argsList.get(1))) { - return -1; - } - - try { - List<String> children = registry.list(argsList.get(1)); - for (String child : children) { - sysout.println(child); - } - return 0; - - } catch (Exception e) { - syserr.println(analyzeException("ls", e, argsList)); - } - return -1; - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp, LS_USAGE); - } - } - - @SuppressWarnings("unchecked") - public int resolve(String[] args) { - Options resolveOption = new Options(); - CommandLineParser parser = new GnuParser(); - try { - CommandLine line = parser.parse(resolveOption, args); - - List<String> argsList = line.getArgList(); - if (argsList.size() != 2) { - return usageError("resolve requires exactly one path argument", - RESOLVE_USAGE); - } - if (!validatePath(argsList.get(1))) { - return -1; - } - - try { - ServiceRecord record = registry.resolve(argsList.get(1)); - - for (Endpoint endpoint : record.external) { - sysout.println(" Endpoint(ProtocolType=" - + endpoint.protocolType + ", Api=" - + endpoint.api + ");" - + " Addresses(AddressType=" - + endpoint.addressType + ") are: "); - - for (Map<String, String> address : endpoint.addresses) { - sysout.println("[ "); - for (Map.Entry<String, String> entry : address.entrySet()) { - sysout.print("\t" + entry.getKey() - + ":" + entry.getValue()); - } - - sysout.println("\n]"); - } - sysout.println(); - } - return 0; - } catch (Exception e) { - syserr.println(analyzeException("resolve", e, argsList)); - } - return -1; - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp, RESOLVE_USAGE); - } - - } - - public int bind(String[] args) { - Option rest = OptionBuilder.withArgName("rest") - .hasArg() - .withDescription("rest Option") - .create("rest"); - Option webui = OptionBuilder.withArgName("webui") - .hasArg() - .withDescription("webui Option") - .create("webui"); - Option inet = OptionBuilder.withArgName("inet") - .withDescription("inet Option") - .create("inet"); - Option port = OptionBuilder.withArgName("port") - .hasArg() - .withDescription("port to listen on [9999]") - .create("p"); - Option host = OptionBuilder.withArgName("host") - .hasArg() - .withDescription("host name") - .create("h"); - Option apiOpt = OptionBuilder.withArgName("api") - .hasArg() - .withDescription("api") - .create("api"); - Options inetOption = new Options(); - inetOption.addOption(inet); - inetOption.addOption(port); - inetOption.addOption(host); - inetOption.addOption(apiOpt); - - Options webuiOpt = new Options(); - webuiOpt.addOption(webui); - webuiOpt.addOption(apiOpt); - - Options restOpt = new Options(); - restOpt.addOption(rest); - restOpt.addOption(apiOpt); - - - CommandLineParser parser = new GnuParser(); - ServiceRecord sr = new ServiceRecord(); - CommandLine line; - if (args.length <= 1) { - return usageError("Invalid syntax ", BIND_USAGE); - } - if (args[1].equals("-inet")) { - int portNum; - String hostName; - String api; - - try { - line = parser.parse(inetOption, args); - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp.getMessage(), BIND_USAGE); - } - if (line.hasOption("inet") && line.hasOption("p") && - line.hasOption("h") && line.hasOption("api")) { - try { - portNum = Integer.parseInt(line.getOptionValue("p")); - } catch (NumberFormatException exp) { - return usageError("Invalid Port - int required" + exp.getMessage(), - BIND_USAGE); - } - hostName = line.getOptionValue("h"); - api = line.getOptionValue("api"); - sr.addExternalEndpoint( - inetAddrEndpoint(api, ProtocolTypes.PROTOCOL_HADOOP_IPC, hostName, - portNum)); - - } else { - return usageError("Missing options: must have host, port and api", - BIND_USAGE); - } - - } else if (args[1].equals("-webui")) { - try { - line = parser.parse(webuiOpt, args); - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp.getMessage(), BIND_USAGE); - } - if (line.hasOption("webui") && line.hasOption("api")) { - URI theUri; - try { - theUri = new URI(line.getOptionValue("webui")); - } catch (URISyntaxException e) { - return usageError("Invalid URI: " + e.getMessage(), BIND_USAGE); - } - sr.addExternalEndpoint(webEndpoint(line.getOptionValue("api"), theUri)); - - } else { - return usageError("Missing options: must have value for uri and api", - BIND_USAGE); - } - } else if (args[1].equals("-rest")) { - try { - line = parser.parse(restOpt, args); - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp.getMessage(), BIND_USAGE); - } - if (line.hasOption("rest") && line.hasOption("api")) { - URI theUri = null; - try { - theUri = new URI(line.getOptionValue("rest")); - } catch (URISyntaxException e) { - return usageError("Invalid URI: " + e.getMessage(), BIND_USAGE); - } - sr.addExternalEndpoint( - restEndpoint(line.getOptionValue("api"), theUri)); - - } else { - return usageError("Missing options: must have value for uri and api", - BIND_USAGE); - } - - } else { - return usageError("Invalid syntax", BIND_USAGE); - } - @SuppressWarnings("unchecked") - List<String> argsList = line.getArgList(); - if (argsList.size() != 2) { - return usageError("bind requires exactly one path argument", BIND_USAGE); - } - if (!validatePath(argsList.get(1))) { - return -1; - } - - try { - registry.bind(argsList.get(1), sr, BindFlags.OVERWRITE); - return 0; - } catch (Exception e) { - syserr.println(analyzeException("bind", e, argsList)); - } - - return -1; - } - - @SuppressWarnings("unchecked") - public int mknode(String[] args) { - Options mknodeOption = new Options(); - CommandLineParser parser = new GnuParser(); - try { - CommandLine line = parser.parse(mknodeOption, args); - - List<String> argsList = line.getArgList(); - if (argsList.size() != 2) { - return usageError("mknode requires exactly one path argument", - MKNODE_USAGE); - } - if (!validatePath(argsList.get(1))) { - return -1; - } - - try { - registry.mknode(args[1], false); - return 0; - } catch (Exception e) { - syserr.println(analyzeException("mknode", e, argsList)); - } - return -1; - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp.toString(), MKNODE_USAGE); - } - } - - - @SuppressWarnings("unchecked") - public int rm(String[] args) { - Option recursive = OptionBuilder.withArgName("recursive") - .withDescription("delete recursively") - .create("r"); - - Options rmOption = new Options(); - rmOption.addOption(recursive); - - boolean recursiveOpt = false; - - CommandLineParser parser = new GnuParser(); - try { - CommandLine line = parser.parse(rmOption, args); - - List<String> argsList = line.getArgList(); - if (argsList.size() != 2) { - return usageError("RM requires exactly one path argument", RM_USAGE); - } - if (!validatePath(argsList.get(1))) { - return -1; - } - - try { - if (line.hasOption("r")) { - recursiveOpt = true; - } - - registry.delete(argsList.get(1), recursiveOpt); - return 0; - } catch (Exception e) { - syserr.println(analyzeException("rm", e, argsList)); - } - return -1; - } catch (ParseException exp) { - return usageError("Invalid syntax " + exp.toString(), RM_USAGE); - } - } - - /** - * Given an exception and a possibly empty argument list, generate - * a diagnostics string for use in error messages - * @param operation the operation that failed - * @param e exception - * @param argsList arguments list - * @return a string intended for the user - */ - String analyzeException(String operation, - Exception e, - List<String> argsList) { - - String pathArg = !argsList.isEmpty() ? argsList.get(1) : "(none)"; - if (LOG.isDebugEnabled()) { - LOG.debug("Operation {} on path {} failed with exception {}", - operation, pathArg, e, e); - } - if (e instanceof InvalidPathnameException) { - return "InvalidPath :" + pathArg + ": " + e; - } - if (e instanceof PathNotFoundException) { - return "Path not found: " + pathArg; - } - if (e instanceof NoRecordException) { - return "No service record at path " + pathArg; - } - if (e instanceof AuthenticationFailedException) { - return "Failed to authenticate to registry : " + e; - } - if (e instanceof NoPathPermissionsException) { - return "No Permission to path: " + pathArg + ": " + e; - } - if (e instanceof AccessControlException) { - return "No Permission to path: " + pathArg + ": " + e; - } - if (e instanceof InvalidRecordException) { - return "Unable to read record at: " + pathArg + ": " + e; - } - if (e instanceof IOException) { - return "IO Exception when accessing path :" + pathArg + ": " + e; - } - // something else went very wrong here - return "Exception " + e; - - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/BindFlags.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/BindFlags.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/BindFlags.java deleted file mode 100644 index 5fd2aef..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/BindFlags.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.api; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * Combinable Flags to use when creating a service entry. - */ [email protected] [email protected] -public interface BindFlags { - - /** - * Create the entry.. This is just "0" and can be "or"ed with anything - */ - int CREATE = 0; - - /** - * The entry should be created even if an existing entry is there. - */ - int OVERWRITE = 1; - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperations.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperations.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperations.java deleted file mode 100644 index 3abfb6c..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperations.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.api; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.registry.client.types.ServiceRecord; -import org.apache.hadoop.service.Service; - -import java.io.IOException; - -/** - * DNS Operations. - */ [email protected] [email protected] -public interface DNSOperations extends Service { - - /** - * Register a service based on a service record. - * - * @param path the ZK path. - * @param record record providing DNS registration info. - * @throws IOException Any other IO Exception. - */ - void register(String path, ServiceRecord record) - throws IOException; - - - /** - * Delete a service's registered endpoints. - * - * If the operation returns without an error then the entry has been - * deleted. - * - * @param path the ZK path. - * @param record service record - * @throws IOException Any other IO Exception - * - */ - void delete(String path, ServiceRecord record) - throws IOException; - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperationsFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperationsFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperationsFactory.java deleted file mode 100644 index 1a8bb3e..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/DNSOperationsFactory.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.api; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.registry.server.dns.RegistryDNS; - -/** - * A factory for DNS operation service instances. - */ -public final class DNSOperationsFactory implements RegistryConstants { - - /** - * DNS Implementation type. - */ - public enum DNSImplementation { - DNSJAVA - } - - private DNSOperationsFactory() { - } - - /** - * Create and initialize a DNS operations instance. - * - * @param conf configuration - * @return a DNS operations instance - */ - public static DNSOperations createInstance(Configuration conf) { - return createInstance("DNSOperations", DNSImplementation.DNSJAVA, conf); - } - - /** - * Create and initialize a registry operations instance. - * Access rights will be determined from the configuration. - * - * @param name name of the instance - * @param impl the DNS implementation. - * @param conf configuration - * @return a registry operations instance - */ - public static DNSOperations createInstance(String name, - DNSImplementation impl, - Configuration conf) { - Preconditions.checkArgument(conf != null, "Null configuration"); - DNSOperations operations = null; - switch (impl) { - case DNSJAVA: - operations = new RegistryDNS(name); - break; - - default: - throw new IllegalArgumentException( - String.format("%s is not available", impl.toString())); - } - - //operations.init(conf); - return operations; - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryConstants.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryConstants.java deleted file mode 100644 index db4f311..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryConstants.java +++ /dev/null @@ -1,390 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.api; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; - -/** - * Constants for the registry, including configuration keys and default - * values. - */ [email protected] [email protected] -public interface RegistryConstants { - - /** - * prefix for registry configuration options: {@value}. - * Why <code>hadoop.</code> and not YARN? It can - * live outside YARN - */ - String REGISTRY_PREFIX = "hadoop.registry."; - - /** - * Prefix for zookeeper-specific options: {@value} - * <p> - * For clients using other protocols, these options are not supported. - */ - String ZK_PREFIX = REGISTRY_PREFIX + "zk."; - - /** - * Prefix for dns-specific options: {@value} - * <p> - * For clients using other protocols, these options are not supported. - */ - String DNS_PREFIX = REGISTRY_PREFIX + "dns."; - - /** - * flag to indicate whether or not the registry should - * be enabled in the RM: {@value}. - */ - String KEY_DNS_ENABLED = DNS_PREFIX + "enabled"; - - /** - * Defaut value for enabling the DNS in the Registry: {@value}. - */ - boolean DEFAULT_DNS_ENABLED = false; - - /** - * DNS domain name key. - */ - String KEY_DNS_DOMAIN = DNS_PREFIX + "domain-name"; - - /** - * Max length of a label (node delimited by a dot in the FQDN). - */ - int MAX_FQDN_LABEL_LENGTH = 63; - - /** - * DNS bind address. - */ - String KEY_DNS_BIND_ADDRESS = DNS_PREFIX + "bind-address"; - - /** - * DNS port number key. - */ - String KEY_DNS_PORT = DNS_PREFIX + "bind-port"; - - /** - * Default DNS port number. - */ - int DEFAULT_DNS_PORT = 5335; - - /** - * DNSSEC Enabled? - */ - String KEY_DNSSEC_ENABLED = DNS_PREFIX + "dnssec.enabled"; - - /** - * DNSSEC Enabled? - */ - String KEY_DNSSEC_PUBLIC_KEY = DNS_PREFIX + "public-key"; - - /** - * DNSSEC private key file. - */ - String KEY_DNSSEC_PRIVATE_KEY_FILE = DNS_PREFIX + "private-key-file"; - - /** - * Default DNSSEC private key file path. - */ - String DEFAULT_DNSSEC_PRIVATE_KEY_FILE = - "/etc/hadoop/conf/registryDNS.private"; - - /** - * Zone subnet. - */ - String KEY_DNS_ZONE_SUBNET = DNS_PREFIX + "zone-subnet"; - - /** - * Zone subnet mask. - */ - String KEY_DNS_ZONE_MASK = DNS_PREFIX + "zone-mask"; - - /** - * Zone subnet IP min. - */ - String KEY_DNS_ZONE_IP_MIN = DNS_PREFIX + "zone-ip-min"; - - /** - * Zone subnet IP max. - */ - String KEY_DNS_ZONE_IP_MAX = DNS_PREFIX + "zone-ip-max"; - - /** - * DNS Record TTL. - */ - String KEY_DNS_TTL = DNS_PREFIX + "dns-ttl"; - - /** - * DNS Record TTL. - */ - String KEY_DNS_ZONES_DIR = DNS_PREFIX + "zones-dir"; - - /** - * Split Reverse Zone. - * It may be necessary to spit large reverse zone subnets - * into multiple zones to handle existing hosts collocated - * with containers. - */ - String KEY_DNS_SPLIT_REVERSE_ZONE = DNS_PREFIX + "split-reverse-zone"; - - /** - * Default value for splitting the reverse zone. - */ - boolean DEFAULT_DNS_SPLIT_REVERSE_ZONE = false; - - /** - * Split Reverse Zone IP Range. - * How many IPs should be part of each reverse zone split - */ - String KEY_DNS_SPLIT_REVERSE_ZONE_RANGE = DNS_PREFIX + - "split-reverse-zone-range"; - - /** - * Key to set if the registry is secure: {@value}. - * Turning it on changes the permissions policy from "open access" - * to restrictions on kerberos with the option of - * a user adding one or more auth key pairs down their - * own tree. - */ - String KEY_REGISTRY_SECURE = REGISTRY_PREFIX + "secure"; - - /** - * Default registry security policy: {@value}. - */ - boolean DEFAULT_REGISTRY_SECURE = false; - - /** - * Root path in the ZK tree for the registry: {@value}. - */ - String KEY_REGISTRY_ZK_ROOT = ZK_PREFIX + "root"; - - /** - * Default root of the yarn registry: {@value}. - */ - String DEFAULT_ZK_REGISTRY_ROOT = "/registry"; - - /** - * Registry client authentication policy. - * <p> - * This is only used in secure clusters. - * <p> - * If the Factory methods of {@link RegistryOperationsFactory} - * are used, this key does not need to be set: it is set - * up based on the factory method used. - */ - String KEY_REGISTRY_CLIENT_AUTH = - REGISTRY_PREFIX + "client.auth"; - - /** - * Registry client uses Kerberos: authentication is automatic from - * logged in user. - */ - String REGISTRY_CLIENT_AUTH_KERBEROS = "kerberos"; - - /** - * Username/password is the authentication mechanism. - * If set then both {@link #KEY_REGISTRY_CLIENT_AUTHENTICATION_ID} - * and {@link #KEY_REGISTRY_CLIENT_AUTHENTICATION_PASSWORD} must be set. - */ - String REGISTRY_CLIENT_AUTH_DIGEST = "digest"; - - /** - * No authentication; client is anonymous. - */ - String REGISTRY_CLIENT_AUTH_ANONYMOUS = ""; - String REGISTRY_CLIENT_AUTH_SIMPLE = "simple"; - - /** - * Registry client authentication ID. - * <p> - * This is only used in secure clusters with - * {@link #KEY_REGISTRY_CLIENT_AUTH} set to - * {@link #REGISTRY_CLIENT_AUTH_DIGEST} - * - */ - String KEY_REGISTRY_CLIENT_AUTHENTICATION_ID = - KEY_REGISTRY_CLIENT_AUTH + ".id"; - - /** - * Registry client authentication password. - * <p> - * This is only used in secure clusters with the client set to - * use digest (not SASL or anonymouse) authentication. - * <p> - * Specifically, {@link #KEY_REGISTRY_CLIENT_AUTH} set to - * {@link #REGISTRY_CLIENT_AUTH_DIGEST} - * - */ - String KEY_REGISTRY_CLIENT_AUTHENTICATION_PASSWORD = - KEY_REGISTRY_CLIENT_AUTH + ".password"; - - /** - * List of hostname:port pairs defining the - * zookeeper quorum binding for the registry {@value}. - */ - String KEY_REGISTRY_ZK_QUORUM = ZK_PREFIX + "quorum"; - - /** - * The default zookeeper quorum binding for the registry: {@value}. - */ - String DEFAULT_REGISTRY_ZK_QUORUM = "localhost:2181"; - - /** - * Zookeeper session timeout in milliseconds: {@value}. - */ - String KEY_REGISTRY_ZK_SESSION_TIMEOUT = - ZK_PREFIX + "session.timeout.ms"; - - /** - * The default ZK session timeout: {@value}. - */ - int DEFAULT_ZK_SESSION_TIMEOUT = 60000; - - /** - * Zookeeper connection timeout in milliseconds: {@value}. - */ - String KEY_REGISTRY_ZK_CONNECTION_TIMEOUT = - ZK_PREFIX + "connection.timeout.ms"; - - /** - * The default ZK connection timeout: {@value}. - */ - int DEFAULT_ZK_CONNECTION_TIMEOUT = 15000; - - /** - * Zookeeper connection retry count before failing: {@value}. - */ - String KEY_REGISTRY_ZK_RETRY_TIMES = ZK_PREFIX + "retry.times"; - - /** - * The default # of times to retry a ZK connection: {@value}. - */ - int DEFAULT_ZK_RETRY_TIMES = 5; - - /** - * Zookeeper connect interval in milliseconds: {@value}. - */ - String KEY_REGISTRY_ZK_RETRY_INTERVAL = - ZK_PREFIX + "retry.interval.ms"; - - /** - * The default interval between connection retries: {@value}. - */ - int DEFAULT_ZK_RETRY_INTERVAL = 1000; - - /** - * Zookeeper retry limit in milliseconds, during - * exponential backoff: {@value}. - * - * This places a limit even - * if the retry times and interval limit, combined - * with the backoff policy, result in a long retry - * period - * - */ - String KEY_REGISTRY_ZK_RETRY_CEILING = - ZK_PREFIX + "retry.ceiling.ms"; - - /** - * Default limit on retries: {@value}. - */ - int DEFAULT_ZK_RETRY_CEILING = 60000; - - /** - * A comma separated list of Zookeeper ACL identifiers with - * system access to the registry in a secure cluster: {@value}. - * - * These are given full access to all entries. - * - * If there is an "@" at the end of an entry it - * instructs the registry client to append the kerberos realm as - * derived from the login and {@link #KEY_REGISTRY_KERBEROS_REALM}. - */ - String KEY_REGISTRY_SYSTEM_ACCOUNTS = REGISTRY_PREFIX + "system.accounts"; - - /** - * Default system accounts given global access to the registry: {@value}. - */ - String DEFAULT_REGISTRY_SYSTEM_ACCOUNTS = - "sasl:yarn@, sasl:mapred@, sasl:hdfs@, sasl:hadoop@"; - - /** - * A comma separated list of Zookeeper ACL identifiers with - * system access to the registry in a secure cluster: {@value}. - * - * These are given full access to all entries. - * - * If there is an "@" at the end of an entry it - * instructs the registry client to append the default kerberos domain. - */ - String KEY_REGISTRY_USER_ACCOUNTS = REGISTRY_PREFIX + "user.accounts"; - - /** - * Default system acls: {@value}. - */ - String DEFAULT_REGISTRY_USER_ACCOUNTS = ""; - - /** - * The kerberos realm: {@value}. - * - * This is used to set the realm of - * system principals which do not declare their realm, - * and any other accounts that need the value. - * - * If empty, the default realm of the running process - * is used. - * - * If neither are known and the realm is needed, then the registry - * service/client will fail. - */ - String KEY_REGISTRY_KERBEROS_REALM = REGISTRY_PREFIX + "kerberos.realm"; - - /** - * Key to define the JAAS context. Used in secure registries: {@value}. - */ - String KEY_REGISTRY_CLIENT_JAAS_CONTEXT = REGISTRY_PREFIX + "jaas.context"; - - /** - * default client-side registry JAAS context: {@value}. - */ - String DEFAULT_REGISTRY_CLIENT_JAAS_CONTEXT = "Client"; - - /** - * path to users off the root: {@value}. - */ - String PATH_USERS = "/users/"; - - /** - * path to system services off the root : {@value}. - */ - String PATH_SYSTEM_SERVICES = "/services/"; - - /** - * path to system services under a user's home path : {@value}. - */ - String PATH_USER_SERVICES = "/services/"; - - /** - * path under a service record to point to components of that service: - * {@value}. - */ - String SUBPATH_COMPONENTS = "/components/"; -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperations.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperations.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperations.java deleted file mode 100644 index c51bcf7..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperations.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.api; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.FileAlreadyExistsException; -import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; -import org.apache.hadoop.fs.PathNotFoundException; -import org.apache.hadoop.service.Service; -import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException; -import org.apache.hadoop.registry.client.exceptions.InvalidRecordException; -import org.apache.hadoop.registry.client.exceptions.NoRecordException; -import org.apache.hadoop.registry.client.types.RegistryPathStatus; -import org.apache.hadoop.registry.client.types.ServiceRecord; - -import java.io.IOException; -import java.util.List; - -/** - * Registry Operations - */ [email protected] [email protected] -public interface RegistryOperations extends Service { - - /** - * Create a path. - * - * It is not an error if the path exists already, be it empty or not. - * - * The createParents flag also requests creating the parents. - * As entries in the registry can hold data while still having - * child entries, it is not an error if any of the parent path - * elements have service records. - * - * @param path path to create - * @param createParents also create the parents. - * @throws PathNotFoundException parent path is not in the registry. - * @throws InvalidPathnameException path name is invalid. - * @throws IOException Any other IO Exception. - * @return true if the path was created, false if it existed. - */ - boolean mknode(String path, boolean createParents) - throws PathNotFoundException, - InvalidPathnameException, - IOException; - - /** - * Bind a path in the registry to a service record - * @param path path to service record - * @param record service record service record to create/update - * @param flags bind flags - * @throws PathNotFoundException the parent path does not exist - * @throws FileAlreadyExistsException path exists but create flags - * do not include "overwrite" - * @throws InvalidPathnameException path name is invalid. - * @throws IOException Any other IO Exception. - */ - void bind(String path, ServiceRecord record, int flags) - throws PathNotFoundException, - FileAlreadyExistsException, - InvalidPathnameException, - IOException; - - /** - * Resolve the record at a path - * @param path path to an entry containing a {@link ServiceRecord} - * @return the record - * @throws PathNotFoundException path is not in the registry. - * @throws NoRecordException if there is not a service record - * @throws InvalidRecordException if there was a service record but it could - * not be parsed. - * @throws IOException Any other IO Exception - */ - - ServiceRecord resolve(String path) - throws PathNotFoundException, - NoRecordException, - InvalidRecordException, - IOException; - - /** - * Get the status of a path - * @param path path to query - * @return the status of the path - * @throws PathNotFoundException path is not in the registry. - * @throws InvalidPathnameException the path is invalid. - * @throws IOException Any other IO Exception - */ - RegistryPathStatus stat(String path) - throws PathNotFoundException, - InvalidPathnameException, - IOException; - - /** - * Probe for a path existing. - * This is equivalent to {@link #stat(String)} with - * any failure downgraded to a - * @param path path to query - * @return true if the path was found - * @throws IOException - */ - boolean exists(String path) throws IOException; - - /** - * List all entries under a registry path, returning the relative names - * of the entries. - * @param path path to query - * @return a possibly empty list of the short path names of - * child entries. - * @throws PathNotFoundException - * @throws InvalidPathnameException - * @throws IOException - */ - List<String> list(String path) throws - PathNotFoundException, - InvalidPathnameException, - IOException; - - /** - * Delete a path. - * - * If the operation returns without an error then the entry has been - * deleted. - * @param path path delete recursively - * @param recursive recursive flag - * @throws PathNotFoundException path is not in the registry. - * @throws InvalidPathnameException the path is invalid. - * @throws PathIsNotEmptyDirectoryException path has child entries, but - * recursive is false. - * @throws IOException Any other IO Exception - * - */ - void delete(String path, boolean recursive) - throws PathNotFoundException, - PathIsNotEmptyDirectoryException, - InvalidPathnameException, - IOException; - - /** - * Add a new write access entry to be added to node permissions in all - * future write operations of a session connected to a secure registry. - * - * This does not grant the session any more rights: if it lacked any write - * access, it will still be unable to manipulate the registry. - * - * In an insecure cluster, this operation has no effect. - * @param id ID to use - * @param pass password - * @return true if the accessor was added: that is, the registry connection - * uses permissions to manage access - * @throws IOException on any failure to build the digest - */ - boolean addWriteAccessor(String id, String pass) throws IOException; - - /** - * Clear all write accessors. - * - * At this point all standard permissions/ACLs are retained, - * including any set on behalf of the user - * Only accessors added via {@link #addWriteAccessor(String, String)} - * are removed. - */ - public void clearWriteAccessors(); -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperationsFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperationsFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperationsFactory.java deleted file mode 100644 index 5f9c5f3..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/RegistryOperationsFactory.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.api; - -import com.google.common.base.Preconditions; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.service.ServiceStateException; -import org.apache.hadoop.registry.client.impl.RegistryOperationsClient; - -import static org.apache.hadoop.registry.client.api.RegistryConstants.*; - -/** - * A factory for registry operation service instances. - * <p> - * <i>Each created instance will be returned initialized.</i> - * <p> - * That is, the service will have had <code>Service.init(conf)</code> applied - * to it âpossibly after the configuration has been modified to - * support the specific binding/security mechanism used - */ -public final class RegistryOperationsFactory { - - private RegistryOperationsFactory() { - } - - /** - * Create and initialize a registry operations instance. - * Access writes will be determined from the configuration - * @param conf configuration - * @return a registry operations instance - * @throws ServiceStateException on any failure to initialize - */ - public static RegistryOperations createInstance(Configuration conf) { - return createInstance("RegistryOperations", conf); - } - - /** - * Create and initialize a registry operations instance. - * Access rights will be determined from the configuration - * @param name name of the instance - * @param conf configuration - * @return a registry operations instance - * @throws ServiceStateException on any failure to initialize - */ - public static RegistryOperations createInstance(String name, Configuration conf) { - Preconditions.checkArgument(conf != null, "Null configuration"); - RegistryOperationsClient operations = - new RegistryOperationsClient(name); - operations.init(conf); - return operations; - } - - public static RegistryOperationsClient createClient(String name, - Configuration conf) { - Preconditions.checkArgument(conf != null, "Null configuration"); - RegistryOperationsClient operations = new RegistryOperationsClient(name); - operations.init(conf); - return operations; - } - - /** - * Create and initialize an anonymous read/write registry operations instance. - * In a secure cluster, this instance will only have read access to the - * registry. - * @param conf configuration - * @return an anonymous registry operations instance - * - * @throws ServiceStateException on any failure to initialize - */ - public static RegistryOperations createAnonymousInstance(Configuration conf) { - Preconditions.checkArgument(conf != null, "Null configuration"); - conf.set(KEY_REGISTRY_CLIENT_AUTH, REGISTRY_CLIENT_AUTH_ANONYMOUS); - return createInstance("AnonymousRegistryOperations", conf); - } - - /** - * Create and initialize an secure, Kerberos-authenticated instance. - * - * The user identity will be inferred from the current user - * - * The authentication of this instance will expire when any kerberos - * tokens needed to authenticate with the registry infrastructure expire. - * @param conf configuration - * @param jaasContext the JAAS context of the account. - * @return a registry operations instance - * @throws ServiceStateException on any failure to initialize - */ - public static RegistryOperations createKerberosInstance(Configuration conf, - String jaasContext) { - Preconditions.checkArgument(conf != null, "Null configuration"); - conf.set(KEY_REGISTRY_CLIENT_AUTH, REGISTRY_CLIENT_AUTH_KERBEROS); - conf.set(KEY_REGISTRY_CLIENT_JAAS_CONTEXT, jaasContext); - return createInstance("KerberosRegistryOperations", conf); - } - - /** - * Create a kerberos registry service client - * @param conf configuration - * @param jaasClientEntry the name of the login config entry - * @param principal principal of the client. - * @param keytab location to the keytab file - * @return a registry service client instance - */ - public static RegistryOperations createKerberosInstance(Configuration conf, - String jaasClientEntry, String principal, String keytab) { - Preconditions.checkArgument(conf != null, "Null configuration"); - conf.set(KEY_REGISTRY_CLIENT_AUTH, REGISTRY_CLIENT_AUTH_KERBEROS); - conf.set(KEY_REGISTRY_CLIENT_JAAS_CONTEXT, jaasClientEntry); - RegistryOperationsClient operations = - new RegistryOperationsClient("KerberosRegistryOperations"); - operations.setKerberosPrincipalAndKeytab(principal, keytab); - operations.init(conf); - return operations; - } - - - /** - * Create and initialize an operations instance authenticated with write - * access via an <code>id:password</code> pair. - * - * The instance will have the read access - * across the registry, but write access only to that part of the registry - * to which it has been give the relevant permissions. - * @param conf configuration - * @param id user ID - * @param password password - * @return a registry operations instance - * @throws ServiceStateException on any failure to initialize - * @throws IllegalArgumentException if an argument is invalid - */ - public static RegistryOperations createAuthenticatedInstance(Configuration conf, - String id, - String password) { - Preconditions.checkArgument(!StringUtils.isEmpty(id), "empty Id"); - Preconditions.checkArgument(!StringUtils.isEmpty(password), "empty Password"); - Preconditions.checkArgument(conf != null, "Null configuration"); - conf.set(KEY_REGISTRY_CLIENT_AUTH, REGISTRY_CLIENT_AUTH_DIGEST); - conf.set(KEY_REGISTRY_CLIENT_AUTHENTICATION_ID, id); - conf.set(KEY_REGISTRY_CLIENT_AUTHENTICATION_PASSWORD, password); - return createInstance("DigestRegistryOperations", conf); - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/package-info.java deleted file mode 100644 index f5f844e..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/api/package-info.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * YARN Registry Client API. - * - * This package contains the core API for the YARN registry. - * - * <ol> - * <li> Data types can be found in - * {@link org.apache.hadoop.registry.client.types}</li> - * <li> Exceptions are listed in - * {@link org.apache.hadoop.registry.client.exceptions}</li> - * <li> Classes to assist use of the registry are in - * {@link org.apache.hadoop.registry.client.binding}</li> - * </ol> - * - * - */ -package org.apache.hadoop.registry.client.api; http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java deleted file mode 100644 index 04aabfc..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.binding; - -import com.fasterxml.jackson.core.JsonProcessingException; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.registry.client.exceptions.InvalidRecordException; -import org.apache.hadoop.registry.client.exceptions.NoRecordException; -import org.apache.hadoop.util.JsonSerialization; - -import java.io.EOFException; -import java.io.IOException; - -/** - * Support for marshalling objects to and from JSON. - * <p> - * This extends {@link JsonSerialization} with the notion - * of a marker field in the JSON file, with - * <ol> - * <li>a fail-fast check for it before even trying to parse.</li> - * <li>Specific IOException subclasses for a failure.</li> - * </ol> - * The rationale for this is not only to support different things in the, - * registry, but the fact that all ZK nodes have a size > 0 when examined. - * - * @param <T> Type to marshal. - */ [email protected] [email protected] -public class JsonSerDeser<T> extends JsonSerialization<T> { - - private static final String UTF_8 = "UTF-8"; - public static final String E_NO_DATA = "No data at path"; - public static final String E_DATA_TOO_SHORT = "Data at path too short"; - public static final String E_MISSING_MARKER_STRING = - "Missing marker string: "; - - /** - * Create an instance bound to a specific type - * @param classType class to marshall - */ - public JsonSerDeser(Class<T> classType) { - super(classType, false, false); - } - - /** - * Deserialize from a byte array - * @param path path the data came from - * @param bytes byte array - * @throws IOException all problems - * @throws EOFException not enough data - * @throws InvalidRecordException if the parsing failed -the record is invalid - * @throws NoRecordException if the data is not considered a record: either - * it is too short or it did not contain the marker string. - */ - public T fromBytes(String path, byte[] bytes) throws IOException { - return fromBytes(path, bytes, ""); - } - - /** - * Deserialize from a byte array, optionally checking for a marker string. - * <p> - * If the marker parameter is supplied (and not empty), then its presence - * will be verified before the JSON parsing takes place; it is a fast-fail - * check. If not found, an {@link InvalidRecordException} exception will be - * raised - * @param path path the data came from - * @param bytes byte array - * @param marker an optional string which, if set, MUST be present in the - * UTF-8 parsed payload. - * @return The parsed record - * @throws IOException all problems - * @throws EOFException not enough data - * @throws InvalidRecordException if the JSON parsing failed. - * @throws NoRecordException if the data is not considered a record: either - * it is too short or it did not contain the marker string. - */ - public T fromBytes(String path, byte[] bytes, String marker) - throws IOException { - int len = bytes.length; - if (len == 0 ) { - throw new NoRecordException(path, E_NO_DATA); - } - if (StringUtils.isNotEmpty(marker) && len < marker.length()) { - throw new NoRecordException(path, E_DATA_TOO_SHORT); - } - String json = new String(bytes, 0, len, UTF_8); - if (StringUtils.isNotEmpty(marker) - && !json.contains(marker)) { - throw new NoRecordException(path, E_MISSING_MARKER_STRING + marker); - } - try { - return fromJson(json); - } catch (JsonProcessingException e) { - throw new InvalidRecordException(path, e.toString(), e); - } - } - -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java deleted file mode 100644 index b8e9ba1..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryPathUtils.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.binding; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.fs.PathNotFoundException; -import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException; -import org.apache.hadoop.registry.client.impl.zk.RegistryInternalConstants; -import org.apache.hadoop.registry.server.dns.BaseServiceRecordProcessor; -import org.apache.zookeeper.common.PathUtils; - -import java.net.IDN; -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Basic operations on paths: manipulating them and creating and validating - * path elements. - */ [email protected] [email protected] -public class RegistryPathUtils { - - /** - * Compiled down pattern to validate single entries in the path - */ - private static final Pattern PATH_ENTRY_VALIDATION_PATTERN = - Pattern.compile(RegistryInternalConstants.VALID_PATH_ENTRY_PATTERN); - - private static final Pattern USER_NAME = - Pattern.compile("/users/([a-z][a-z0-9-.]*)"); - - /** - * Validate ZK path with the path itself included in - * the exception text - * @param path path to validate - * @return the path parameter - * @throws InvalidPathnameException if the pathname is invalid. - */ - public static String validateZKPath(String path) throws - InvalidPathnameException { - try { - PathUtils.validatePath(path); - - } catch (IllegalArgumentException e) { - throw new InvalidPathnameException(path, - "Invalid Path \"" + path + "\" : " + e, e); - } - return path; - } - - /** - * Validate ZK path as valid for a DNS hostname. - * @param path path to validate - * @return the path parameter - * @throws InvalidPathnameException if the pathname is invalid. - */ - public static String validateElementsAsDNS(String path) throws - InvalidPathnameException { - List<String> splitpath = split(path); - for (String fragment : splitpath) { - if (!PATH_ENTRY_VALIDATION_PATTERN.matcher(fragment).matches()) { - throw new InvalidPathnameException(path, - "Invalid Path element \"" + fragment + "\""); - } - } - return path; - } - - /** - * Create a full path from the registry root and the supplied subdir - * @param path path of operation - * @return an absolute path - * @throws InvalidPathnameException if the path is invalid - */ - public static String createFullPath(String base, String path) throws - InvalidPathnameException { - Preconditions.checkArgument(path != null, "null path"); - Preconditions.checkArgument(base != null, "null path"); - return validateZKPath(join(base, path)); - } - - /** - * Join two paths, guaranteeing that there will not be exactly - * one separator between the two, and exactly one at the front - * of the path. There will be no trailing "/" except for the special - * case that this is the root path - * @param base base path - * @param path second path to add - * @return a combined path. - */ - public static String join(String base, String path) { - Preconditions.checkArgument(path != null, "null path"); - Preconditions.checkArgument(base != null, "null path"); - StringBuilder fullpath = new StringBuilder(); - - if (!base.startsWith("/")) { - fullpath.append('/'); - } - fullpath.append(base); - - // guarantee a trailing / - if (!fullpath.toString().endsWith("/")) { - fullpath.append("/"); - } - // strip off any at the beginning - if (path.startsWith("/")) { - // path starts with /, so append all other characters -if present - if (path.length() > 1) { - fullpath.append(path.substring(1)); - } - } else { - fullpath.append(path); - } - - //here there may be a trailing "/" - String finalpath = fullpath.toString(); - if (finalpath.endsWith("/") && !"/".equals(finalpath)) { - finalpath = finalpath.substring(0, finalpath.length() - 1); - - } - return finalpath; - } - - /** - * split a path into elements, stripping empty elements - * @param path the path - * @return the split path - */ - public static List<String> split(String path) { - // - String[] pathelements = path.split("/"); - List<String> dirs = new ArrayList<String>(pathelements.length); - for (String pathelement : pathelements) { - if (!pathelement.isEmpty()) { - dirs.add(pathelement); - } - } - return dirs; - } - - /** - * Get the last entry in a path; for an empty path - * returns "". The split logic is that of - * {@link #split(String)} - * @param path path of operation - * @return the last path entry or "" if none. - */ - public static String lastPathEntry(String path) { - List<String> splits = split(path); - if (splits.isEmpty()) { - // empty path. Return "" - return ""; - } else { - return splits.get(splits.size() - 1); - } - } - - /** - * Get the parent of a path - * @param path path to look at - * @return the parent path - * @throws PathNotFoundException if the path was at root. - */ - public static String parentOf(String path) throws PathNotFoundException { - List<String> elements = split(path); - - int size = elements.size(); - if (size == 0) { - throw new PathNotFoundException("No parent of " + path); - } - if (size == 1) { - return "/"; - } - elements.remove(size - 1); - StringBuilder parent = new StringBuilder(path.length()); - for (String element : elements) { - parent.append("/"); - parent.append(element); - } - return parent.toString(); - } - - /** - * Perform any formatting for the registry needed to convert - * non-simple-DNS elements - * @param element element to encode - * @return an encoded string - */ - public static String encodeForRegistry(String element) { - return IDN.toASCII(element); - } - - /** - * Perform whatever transforms are needed to get a YARN ID into - * a DNS-compatible name - * @param yarnId ID as string of YARN application, instance or container - * @return a string suitable for use in registry paths. - */ - public static String encodeYarnID(String yarnId) { - return yarnId.replace("container", "ctr").replace("_", "-"); - } - - /** - * Return the username found in the ZK path. - * - * @param recPath the ZK recPath. - * @return the user name. - */ - public static String getUsername(String recPath) { - String user = "anonymous"; - Matcher matcher = USER_NAME.matcher(recPath); - if (matcher.find()) { - user = matcher.group(1); - } - return user; - } -} http://git-wip-us.apache.org/repos/asf/hadoop/blob/e2a9fa84/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java deleted file mode 100644 index 05df325..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.registry.client.binding; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.registry.client.exceptions.InvalidRecordException; -import org.apache.hadoop.registry.client.types.Endpoint; -import org.apache.hadoop.registry.client.types.ProtocolTypes; -import org.apache.hadoop.registry.client.types.ServiceRecord; - -import java.net.InetSocketAddress; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.hadoop.registry.client.types.AddressTypes.*; - -/** - * Static methods to work with registry types âprimarily endpoints and the - * list representation of addresses. - */ [email protected] [email protected] -public class RegistryTypeUtils { - - /** - * Create a URL endpoint from a list of URIs - * @param api implemented API - * @param protocolType protocol type - * @param uris URIs - * @return a new endpoint - */ - public static Endpoint urlEndpoint(String api, - String protocolType, - URI... uris) { - return new Endpoint(api, protocolType, uris); - } - - /** - * Create a REST endpoint from a list of URIs - * @param api implemented API - * @param uris URIs - * @return a new endpoint - */ - public static Endpoint restEndpoint(String api, - URI... uris) { - return urlEndpoint(api, ProtocolTypes.PROTOCOL_REST, uris); - } - - /** - * Create a Web UI endpoint from a list of URIs - * @param api implemented API - * @param uris URIs - * @return a new endpoint - */ - public static Endpoint webEndpoint(String api, - URI... uris) { - return urlEndpoint(api, ProtocolTypes.PROTOCOL_WEBUI, uris); - } - - /** - * Create an internet address endpoint from a list of URIs - * @param api implemented API - * @param protocolType protocol type - * @param hostname hostname/FQDN - * @param port port - * @return a new endpoint - */ - - public static Endpoint inetAddrEndpoint(String api, - String protocolType, - String hostname, - int port) { - Preconditions.checkArgument(api != null, "null API"); - Preconditions.checkArgument(protocolType != null, "null protocolType"); - Preconditions.checkArgument(hostname != null, "null hostname"); - return new Endpoint(api, - ADDRESS_HOSTNAME_AND_PORT, - protocolType, - hostnamePortPair(hostname, port)); - } - - /** - * Create an IPC endpoint - * @param api API - * @param address the address as a tuple of (hostname, port) - * @return the new endpoint - */ - public static Endpoint ipcEndpoint(String api, InetSocketAddress address) { - return new Endpoint(api, - ADDRESS_HOSTNAME_AND_PORT, - ProtocolTypes.PROTOCOL_HADOOP_IPC, - address== null ? null: hostnamePortPair(address)); - } - - /** - * Create a single entry map - * @param key map entry key - * @param val map entry value - * @return a 1 entry map. - */ - public static Map<String, String> map(String key, String val) { - Map<String, String> map = new HashMap<String, String>(1); - map.put(key, val); - return map; - } - - /** - * Create a URI - * @param uri value - * @return a 1 entry map. - */ - public static Map<String, String> uri(String uri) { - return map(ADDRESS_URI, uri); - } - - /** - * Create a (hostname, port) address pair - * @param hostname hostname - * @param port port - * @return a 1 entry map. - */ - public static Map<String, String> hostnamePortPair(String hostname, int port) { - Map<String, String> map = - map(ADDRESS_HOSTNAME_FIELD, hostname); - map.put(ADDRESS_PORT_FIELD, Integer.toString(port)); - return map; - } - - /** - * Create a (hostname, port) address pair - * @param address socket address whose hostname and port are used for the - * generated address. - * @return a 1 entry map. - */ - public static Map<String, String> hostnamePortPair(InetSocketAddress address) { - return hostnamePortPair(address.getHostName(), address.getPort()); - } - - /** - * Require a specific address type on an endpoint - * @param required required type - * @param epr endpoint - * @throws InvalidRecordException if the type is wrong - */ - public static void requireAddressType(String required, Endpoint epr) throws - InvalidRecordException { - if (!required.equals(epr.addressType)) { - throw new InvalidRecordException( - epr.toString(), - "Address type of " + epr.addressType - + " does not match required type of " - + required); - } - } - - /** - * Get a single URI endpoint - * @param epr endpoint - * @return the uri of the first entry in the address list. Null if the endpoint - * itself is null - * @throws InvalidRecordException if the type is wrong, there are no addresses - * or the payload ill-formatted - */ - public static List<String> retrieveAddressesUriType(Endpoint epr) - throws InvalidRecordException { - if (epr == null) { - return null; - } - requireAddressType(ADDRESS_URI, epr); - List<Map<String, String>> addresses = epr.addresses; - if (addresses.size() < 1) { - throw new InvalidRecordException(epr.toString(), - "No addresses in endpoint"); - } - List<String> results = new ArrayList<String>(addresses.size()); - for (Map<String, String> address : addresses) { - results.add(getAddressField(address, ADDRESS_URI)); - } - return results; - } - - /** - * Get a specific field from an address -raising an exception if - * the field is not present - * @param address address to query - * @param field field to resolve - * @return the resolved value. Guaranteed to be non-null. - * @throws InvalidRecordException if the field did not resolve - */ - public static String getAddressField(Map<String, String> address, - String field) throws InvalidRecordException { - String val = address.get(field); - if (val == null) { - throw new InvalidRecordException("", "Missing address field: " + field); - } - return val; - } - - /** - * Get the address URLs. Guranteed to return at least one address. - * @param epr endpoint - * @return the address as a URL - * @throws InvalidRecordException if the type is wrong, there are no addresses - * or the payload ill-formatted - * @throws MalformedURLException address can't be turned into a URL - */ - public static List<URL> retrieveAddressURLs(Endpoint epr) - throws InvalidRecordException, MalformedURLException { - if (epr == null) { - throw new InvalidRecordException("", "Null endpoint"); - } - List<String> addresses = retrieveAddressesUriType(epr); - List<URL> results = new ArrayList<URL>(addresses.size()); - for (String address : addresses) { - results.add(new URL(address)); - } - return results; - } - - /** - * Validate the record by checking for null fields and other invalid - * conditions - * @param path path for exceptions - * @param record record to validate. May be null - * @throws InvalidRecordException on invalid entries - */ - public static void validateServiceRecord(String path, ServiceRecord record) - throws InvalidRecordException { - if (record == null) { - throw new InvalidRecordException(path, "Null record"); - } - if (!ServiceRecord.RECORD_TYPE.equals(record.type)) { - throw new InvalidRecordException(path, - "invalid record type field: \"" + record.type + "\""); - } - - if (record.external != null) { - for (Endpoint endpoint : record.external) { - validateEndpoint(path, endpoint); - } - } - if (record.internal != null) { - for (Endpoint endpoint : record.internal) { - validateEndpoint(path, endpoint); - } - } - } - - /** - * Validate the endpoint by checking for null fields and other invalid - * conditions - * @param path path for exceptions - * @param endpoint endpoint to validate. May be null - * @throws InvalidRecordException on invalid entries - */ - public static void validateEndpoint(String path, Endpoint endpoint) - throws InvalidRecordException { - if (endpoint == null) { - throw new InvalidRecordException(path, "Null endpoint"); - } - try { - endpoint.validate(); - } catch (RuntimeException e) { - throw new InvalidRecordException(path, e.toString()); - } - } - -} --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
