http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java index 0ebe645..71bb8a4 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java @@ -17,36 +17,20 @@ package org.apache.ignite.internal.processors.hadoop; import org.apache.ignite.internal.GridKernalContext; -import org.apache.ignite.internal.util.typedef.F; import org.jetbrains.annotations.Nullable; -import org.jsr166.ConcurrentHashMap8; -import org.objectweb.asm.AnnotationVisitor; -import org.objectweb.asm.Attribute; import org.objectweb.asm.ClassReader; -import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; -import org.objectweb.asm.FieldVisitor; -import org.objectweb.asm.Handle; -import org.objectweb.asm.Label; -import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; -import org.objectweb.asm.Type; import org.objectweb.asm.commons.Remapper; import org.objectweb.asm.commons.RemappingClassAdapter; import java.io.IOException; import java.io.InputStream; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; /** * Utility methods for Hadoop classloader required to avoid direct 3rd-party dependencies in class loader. */ public class HadoopHelperImpl implements HadoopHelper { - /** Cache for resolved dependency info. */ - private static final Map<String, Boolean> dependenciesCache = new ConcurrentHashMap8<>(); - /** Kernal context. */ private final GridKernalContext ctx; @@ -70,6 +54,11 @@ public class HadoopHelperImpl implements HadoopHelper { } /** {@inheritDoc} */ + @Override public boolean isNoOp() { + return false; + } + + /** {@inheritDoc} */ @Override public HadoopClassLoader commonClassLoader() { HadoopClassLoader res = ldr; @@ -125,582 +114,7 @@ public class HadoopHelperImpl implements HadoopHelper { } /** {@inheritDoc} */ - @Override public boolean isHadoop(String cls) { - return cls.startsWith("org.apache.hadoop."); - } - - /** {@inheritDoc} */ - @Override public boolean isHadoopIgfs(String cls) { - String ignitePkgPrefix = "org.apache.ignite"; - - int len = ignitePkgPrefix.length(); - - return cls.startsWith(ignitePkgPrefix) && ( - cls.indexOf("igfs.", len) != -1 || - cls.indexOf(".fs.", len) != -1 || - cls.indexOf("hadoop.", len) != -1); - } - - /** {@inheritDoc} */ @Override @Nullable public InputStream loadClassBytes(ClassLoader ldr, String clsName) { return ldr.getResourceAsStream(clsName.replace('.', '/') + ".class"); } - - /** {@inheritDoc} */ - @Override public boolean hasExternalDependencies(String clsName, ClassLoader parentClsLdr) { - Boolean hasDeps = dependenciesCache.get(clsName); - - if (hasDeps == null) { - CollectingContext ctx = new CollectingContext(parentClsLdr); - - ctx.annVisitor = new CollectingAnnotationVisitor(ctx); - ctx.mthdVisitor = new CollectingMethodVisitor(ctx, ctx.annVisitor); - ctx.fldVisitor = new CollectingFieldVisitor(ctx, ctx.annVisitor); - ctx.clsVisitor = new CollectingClassVisitor(ctx, ctx.annVisitor, ctx.mthdVisitor, ctx.fldVisitor); - - hasDeps = hasExternalDependencies(clsName, parentClsLdr, ctx); - - dependenciesCache.put(clsName, hasDeps); - } - - return hasDeps; - } - - /** - * Check whether class has external dependencies on Hadoop. - * - * @param clsName Class name. - * @param parentClsLdr Parent class loader. - * @param ctx Context. - * @return {@code true} If the class has external dependencies. - */ - private boolean hasExternalDependencies(String clsName, ClassLoader parentClsLdr, CollectingContext ctx) { - if (isHadoop(clsName)) // Hadoop must not be in classpath but Idea sucks, so filtering explicitly as external. - return true; - - // Try to get from parent to check if the type accessible. - InputStream in = loadClassBytes(parentClsLdr, clsName); - - if (in == null) // The class is external itself, it must be loaded from this class loader. - return true; - - if (!isHadoopIgfs(clsName)) // Other classes should not have external dependencies. - return false; - - final ClassReader rdr; - - try { - rdr = new ClassReader(in); - } - catch (IOException e) { - throw new RuntimeException("Failed to read class: " + clsName, e); - } - - ctx.visited.add(clsName); - - rdr.accept(ctx.clsVisitor, 0); - - if (ctx.found) // We already know that we have dependencies, no need to check parent. - return true; - - // Here we are known to not have any dependencies but possibly we have a parent which has them. - int idx = clsName.lastIndexOf('$'); - - if (idx == -1) // No parent class. - return false; - - String parentCls = clsName.substring(0, idx); - - if (ctx.visited.contains(parentCls)) - return false; - - Boolean res = dependenciesCache.get(parentCls); - - if (res == null) - res = hasExternalDependencies(parentCls, parentClsLdr, ctx); - - return res; - } - - /** - * @param name Class name. - * @return {@code true} If this is a valid class name. - */ - private static boolean validateClassName(String name) { - int len = name.length(); - - if (len <= 1) - return false; - - if (!Character.isJavaIdentifierStart(name.charAt(0))) - return false; - - boolean hasDot = false; - - for (int i = 1; i < len; i++) { - char c = name.charAt(i); - - if (c == '.') - hasDot = true; - else if (!Character.isJavaIdentifierPart(c)) - return false; - } - - return hasDot; - } - - /** - * Context for dependencies collection. - */ - private class CollectingContext { - /** Visited classes. */ - private final Set<String> visited = new HashSet<>(); - - /** Parent class loader. */ - private final ClassLoader parentClsLdr; - - /** Whether dependency found. */ - private boolean found; - - /** Annotation visitor. */ - private AnnotationVisitor annVisitor; - - /** Method visitor. */ - private MethodVisitor mthdVisitor; - - /** Field visitor. */ - private FieldVisitor fldVisitor; - - /** Class visitor. */ - private ClassVisitor clsVisitor; - - /** - * Constrcutor. - * - * @param parentClsLdr Parent class loader. - */ - private CollectingContext(ClassLoader parentClsLdr) { - this.parentClsLdr = parentClsLdr; - } - - /** - * Processes a method descriptor - * @param methDesc The method desc String. - */ - void onMethodsDesc(final String methDesc) { - // Process method return type: - onType(Type.getReturnType(methDesc)); - - if (found) - return; - - // Process method argument types: - for (Type t: Type.getArgumentTypes(methDesc)) { - onType(t); - - if (found) - return; - } - } - - /** - * Processes dependencies of a class. - * - * @param depCls The class name as dot-notated FQN. - */ - void onClass(final String depCls) { - assert depCls.indexOf('/') == -1 : depCls; // class name should be fully converted to dot notation. - assert depCls.charAt(0) != 'L' : depCls; - assert validateClassName(depCls) : depCls; - - if (depCls.startsWith("java.") || depCls.startsWith("javax.")) // Filter out platform classes. - return; - - if (visited.contains(depCls)) - return; - - Boolean res = dependenciesCache.get(depCls); - - if (res == Boolean.TRUE || (res == null && hasExternalDependencies(depCls, parentClsLdr, this))) - found = true; - } - - /** - * Analyses dependencies of given type. - * - * @param t The type to process. - */ - void onType(Type t) { - if (t == null) - return; - - int sort = t.getSort(); - - switch (sort) { - case Type.ARRAY: - onType(t.getElementType()); - - break; - - case Type.OBJECT: - onClass(t.getClassName()); - - break; - } - } - - /** - * Analyses dependencies of given object type. - * - * @param objType The object type to process. - */ - void onInternalTypeName(String objType) { - if (objType == null) - return; - - assert objType.length() > 1 : objType; - - if (objType.charAt(0) == '[') - // handle array. In this case this is a type descriptor notation, like "[Ljava/lang/Object;" - onType(objType); - else { - assert objType.indexOf('.') == -1 : objType; // Must be slash-separated FQN. - - String clsName = objType.replace('/', '.'); // Convert it to dot notation. - - onClass(clsName); // Process. - } - } - - /** - * Type description analyser. - * - * @param desc The description. - */ - void onType(String desc) { - if (!F.isEmpty(desc)) { - if (desc.length() <= 1) - return; // Optimization: filter out primitive types in early stage. - - Type t = Type.getType(desc); - - onType(t); - } - } - } - - /** - * Annotation visitor. - */ - private static class CollectingAnnotationVisitor extends AnnotationVisitor { - /** */ - final CollectingContext ctx; - - /** - * Annotation visitor. - * - * @param ctx The collector. - */ - CollectingAnnotationVisitor(CollectingContext ctx) { - super(Opcodes.ASM4); - - this.ctx = ctx; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String name, String desc) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return this; - } - - /** {@inheritDoc} */ - @Override public void visitEnum(String name, String desc, String val) { - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitArray(String name) { - return ctx.found ? null : this; - } - - /** {@inheritDoc} */ - @Override public void visit(String name, Object val) { - if (ctx.found) - return; - - if (val instanceof Type) - ctx.onType((Type)val); - } - - /** {@inheritDoc} */ - @Override public void visitEnd() { - // No-op. - } - } - - /** - * Field visitor. - */ - private static class CollectingFieldVisitor extends FieldVisitor { - /** Collector. */ - private final CollectingContext ctx; - - /** Annotation visitor. */ - private final AnnotationVisitor av; - - /** - * Constructor. - */ - CollectingFieldVisitor(CollectingContext ctx, AnnotationVisitor av) { - super(Opcodes.ASM4); - - this.ctx = ctx; - this.av = av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public void visitAttribute(Attribute attr) { - // No-op. - } - - /** {@inheritDoc} */ - @Override public void visitEnd() { - // No-op. - } - } - - /** - * Class visitor. - */ - private static class CollectingClassVisitor extends ClassVisitor { - /** Collector. */ - private final CollectingContext ctx; - - /** Annotation visitor. */ - private final AnnotationVisitor av; - - /** Method visitor. */ - private final MethodVisitor mv; - - /** Field visitor. */ - private final FieldVisitor fv; - - /** - * Constructor. - * - * @param ctx Collector. - * @param av Annotation visitor. - * @param mv Method visitor. - * @param fv Field visitor. - */ - CollectingClassVisitor(CollectingContext ctx, AnnotationVisitor av, MethodVisitor mv, FieldVisitor fv) { - super(Opcodes.ASM4); - - this.ctx = ctx; - this.av = av; - this.mv = mv; - this.fv = fv; - } - - /** {@inheritDoc} */ - @Override public void visit(int i, int i2, String name, String signature, String superName, String[] ifaces) { - if (ctx.found) - return; - - ctx.onInternalTypeName(superName); - - if (ctx.found) - return; - - if (ifaces != null) { - for (String iface : ifaces) { - ctx.onInternalTypeName(iface); - - if (ctx.found) - return; - } - } - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public void visitInnerClass(String name, String outerName, String innerName, int i) { - if (ctx.found) - return; - - ctx.onInternalTypeName(name); - } - - /** {@inheritDoc} */ - @Override public FieldVisitor visitField(int i, String name, String desc, String signature, Object val) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : fv; - } - - /** {@inheritDoc} */ - @Override public MethodVisitor visitMethod(int i, String name, String desc, String signature, - String[] exceptions) { - if (ctx.found) - return null; - - ctx.onMethodsDesc(desc); - - // Process declared method exceptions: - if (exceptions != null) { - for (String e : exceptions) - ctx.onInternalTypeName(e); - } - - return ctx.found ? null : mv; - } - } - - /** - * Method visitor. - */ - private static class CollectingMethodVisitor extends MethodVisitor { - /** Collector. */ - private final CollectingContext ctx; - - /** Annotation visitor. */ - private final AnnotationVisitor av; - - /** - * Constructor. - * - * @param ctx Collector. - * @param av Annotation visitor. - */ - private CollectingMethodVisitor(CollectingContext ctx, AnnotationVisitor av) { - super(Opcodes.ASM4); - - this.ctx = ctx; - this.av = av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitParameterAnnotation(int i, String desc, boolean b) { - if (ctx.found) - return null; - - ctx.onType(desc); - - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public AnnotationVisitor visitAnnotationDefault() { - return ctx.found ? null : av; - } - - /** {@inheritDoc} */ - @Override public void visitFieldInsn(int opcode, String owner, String name, String desc) { - if (ctx.found) - return; - - ctx.onInternalTypeName(owner); - - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { - // No-op. - } - - /** {@inheritDoc} */ - @Override public void visitFrame(int type, int nLoc, Object[] locTypes, int nStack, Object[] stackTypes) { - // No-op. - } - - /** {@inheritDoc} */ - @Override public void visitLocalVariable(String name, String desc, String signature, Label lb, - Label lb2, int i) { - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public void visitMethodInsn(int i, String owner, String name, String desc) { - if (ctx.found) - return; - - ctx.onInternalTypeName(owner); - - if (ctx.found) - return; - - ctx.onMethodsDesc(desc); - } - - /** {@inheritDoc} */ - @Override public void visitMultiANewArrayInsn(String desc, int dim) { - if (ctx.found) - return; - - ctx.onType(desc); - } - - /** {@inheritDoc} */ - @Override public void visitTryCatchBlock(Label start, Label end, Label hndl, String typeStr) { - if (ctx.found) - return; - - ctx.onInternalTypeName(typeStr); - } - - /** {@inheritDoc} */ - @Override public void visitTypeInsn(int opcode, String type) { - if (ctx.found) - return; - - ctx.onInternalTypeName(type); - } - } - }
http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopImpl.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopImpl.java new file mode 100644 index 0000000..ed2657e --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopImpl.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.configuration.HadoopConfiguration; +import org.apache.ignite.internal.IgniteInternalFuture; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; +import org.apache.ignite.internal.util.GridSpinBusyLock; +import org.jetbrains.annotations.Nullable; + +/** + * Hadoop facade implementation. + */ +public class HadoopImpl implements Hadoop { + /** Hadoop processor. */ + private final HadoopProcessor proc; + + /** Busy lock. */ + private final GridSpinBusyLock busyLock = new GridSpinBusyLock(); + + /** + * Constructor. + * + * @param proc Hadoop processor. + */ + HadoopImpl(HadoopProcessor proc) { + this.proc = proc; + } + + /** {@inheritDoc} */ + @Override public HadoopConfiguration configuration() { + return proc.config(); + } + + /** {@inheritDoc} */ + @Override public HadoopJobId nextJobId() { + if (busyLock.enterBusy()) { + try { + return proc.nextJobId(); + } + finally { + busyLock.leaveBusy(); + } + } + else + throw new IllegalStateException("Failed to get next job ID (grid is stopping)."); + } + + /** {@inheritDoc} */ + @Override public IgniteInternalFuture<?> submit(HadoopJobId jobId, HadoopJobInfo jobInfo) { + if (busyLock.enterBusy()) { + try { + return proc.submit(jobId, jobInfo); + } + finally { + busyLock.leaveBusy(); + } + } + else + throw new IllegalStateException("Failed to submit job (grid is stopping)."); + } + + /** {@inheritDoc} */ + @Nullable @Override public HadoopJobStatus status(HadoopJobId jobId) throws IgniteCheckedException { + if (busyLock.enterBusy()) { + try { + return proc.status(jobId); + } + finally { + busyLock.leaveBusy(); + } + } + else + throw new IllegalStateException("Failed to get job status (grid is stopping)."); + } + + /** {@inheritDoc} */ + @Nullable @Override public HadoopCounters counters(HadoopJobId jobId) throws IgniteCheckedException { + if (busyLock.enterBusy()) { + try { + return proc.counters(jobId); + } + finally { + busyLock.leaveBusy(); + } + } + else + throw new IllegalStateException("Failed to get job counters (grid is stopping)."); + } + + /** {@inheritDoc} */ + @Nullable @Override public IgniteInternalFuture<?> finishFuture(HadoopJobId jobId) throws IgniteCheckedException { + if (busyLock.enterBusy()) { + try { + return proc.finishFuture(jobId); + } + finally { + busyLock.leaveBusy(); + } + } + else + throw new IllegalStateException("Failed to get job finish future (grid is stopping)."); + } + + /** {@inheritDoc} */ + @Override public boolean kill(HadoopJobId jobId) throws IgniteCheckedException { + if (busyLock.enterBusy()) { + try { + return proc.kill(jobId); + } + finally { + busyLock.leaveBusy(); + } + } + else + throw new IllegalStateException("Failed to kill job (grid is stopping)."); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java new file mode 100644 index 0000000..520f094 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.IgniteCheckedException; +import org.apache.ignite.configuration.HadoopConfiguration; +import org.apache.ignite.hadoop.mapreduce.IgniteHadoopMapReducePlanner; +import org.apache.ignite.internal.GridKernalContext; +import org.apache.ignite.internal.IgniteInternalFuture; +import org.apache.ignite.internal.processors.hadoop.counter.HadoopCounters; +import org.apache.ignite.internal.processors.hadoop.jobtracker.HadoopJobTracker; +import org.apache.ignite.internal.processors.hadoop.shuffle.HadoopShuffle; +import org.apache.ignite.internal.processors.hadoop.taskexecutor.HadoopEmbeddedTaskExecutor; +import org.apache.ignite.internal.util.tostring.GridToStringExclude; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.util.typedef.internal.U; + +import java.io.IOException; +import java.util.List; +import java.util.ListIterator; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Hadoop processor. + */ +public class HadoopProcessor extends HadoopProcessorAdapter { + /** Job ID counter. */ + private final AtomicInteger idCtr = new AtomicInteger(); + + /** Hadoop context. */ + @GridToStringExclude + private HadoopContext hctx; + + /** Hadoop facade for public API. */ + @GridToStringExclude + private Hadoop hadoop; + + /** + * Constructor. + * + * @param ctx Kernal context. + */ + public HadoopProcessor(GridKernalContext ctx) { + super(ctx); + } + + /** {@inheritDoc} */ + @Override public void start() throws IgniteCheckedException { + if (ctx.isDaemon()) + return; + + HadoopConfiguration cfg = ctx.config().getHadoopConfiguration(); + + if (cfg == null) + cfg = new HadoopConfiguration(); + else + cfg = new HadoopConfiguration(cfg); + + initializeDefaults(cfg); + + hctx = new HadoopContext( + ctx, + cfg, + new HadoopJobTracker(), + new HadoopEmbeddedTaskExecutor(), + // TODO: IGNITE-404: Uncomment when fixed. + //cfg.isExternalExecution() ? new HadoopExternalTaskExecutor() : new HadoopEmbeddedTaskExecutor(), + new HadoopShuffle()); + + for (HadoopComponent c : hctx.components()) + c.start(hctx); + + hadoop = new HadoopImpl(this); + + ctx.addNodeAttribute(HadoopAttributes.NAME, new HadoopAttributes(cfg)); + } + + /** {@inheritDoc} */ + @Override public void onKernalStart() throws IgniteCheckedException { + super.onKernalStart(); + + if (hctx == null) + return; + + for (HadoopComponent c : hctx.components()) + c.onKernalStart(); + } + + /** {@inheritDoc} */ + @Override public void onKernalStop(boolean cancel) { + super.onKernalStop(cancel); + + if (hctx == null) + return; + + List<HadoopComponent> components = hctx.components(); + + for (ListIterator<HadoopComponent> it = components.listIterator(components.size()); it.hasPrevious();) { + HadoopComponent c = it.previous(); + + c.onKernalStop(cancel); + } + } + + /** {@inheritDoc} */ + @Override public void stop(boolean cancel) throws IgniteCheckedException { + super.stop(cancel); + + if (hctx == null) + return; + + List<HadoopComponent> components = hctx.components(); + + for (ListIterator<HadoopComponent> it = components.listIterator(components.size()); it.hasPrevious();) { + HadoopComponent c = it.previous(); + + c.stop(cancel); + } + } + + /** + * Gets Hadoop context. + * + * @return Hadoop context. + */ + public HadoopContext context() { + return hctx; + } + + /** {@inheritDoc} */ + @Override public Hadoop hadoop() { + if (hadoop == null) + throw new IllegalStateException("Hadoop accelerator is disabled (Hadoop is not in classpath, " + + "is HADOOP_HOME environment variable set?)"); + + return hadoop; + } + + /** {@inheritDoc} */ + @Override public HadoopConfiguration config() { + return hctx.configuration(); + } + + /** {@inheritDoc} */ + @Override public HadoopJobId nextJobId() { + return new HadoopJobId(ctx.localNodeId(), idCtr.incrementAndGet()); + } + + /** {@inheritDoc} */ + @Override public IgniteInternalFuture<?> submit(HadoopJobId jobId, HadoopJobInfo jobInfo) { + ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader()); + + try { + return hctx.jobTracker().submit(jobId, jobInfo); + } + finally { + HadoopCommonUtils.restoreContextClassLoader(oldLdr); + } + } + + /** {@inheritDoc} */ + @Override public HadoopJobStatus status(HadoopJobId jobId) throws IgniteCheckedException { + return hctx.jobTracker().status(jobId); + } + + /** {@inheritDoc} */ + @Override public HadoopCounters counters(HadoopJobId jobId) throws IgniteCheckedException { + return hctx.jobTracker().jobCounters(jobId); + } + + /** {@inheritDoc} */ + @Override public IgniteInternalFuture<?> finishFuture(HadoopJobId jobId) throws IgniteCheckedException { + return hctx.jobTracker().finishFuture(jobId); + } + + /** {@inheritDoc} */ + @Override public boolean kill(HadoopJobId jobId) throws IgniteCheckedException { + return hctx.jobTracker().killJob(jobId); + } + + /** {@inheritDoc} */ + @Override public void validateEnvironment() throws IgniteCheckedException { + // Perform some static checks as early as possible, so that any recoverable exceptions are thrown here. + try { + HadoopLocations loc = HadoopClasspathUtils.locations(); + + if (!F.isEmpty(loc.home())) + U.quietAndInfo(log, HadoopClasspathUtils.HOME + " is set to " + loc.home()); + + U.quietAndInfo(log, "Resolved Hadoop classpath locations: " + loc.common() + ", " + loc.hdfs() + ", " + + loc.mapred()); + } + catch (IOException ioe) { + throw new IgniteCheckedException(ioe.getMessage(), ioe); + } + + HadoopClassLoader.hadoopUrls(); + } + + /** + * Initializes default hadoop configuration. + * + * @param cfg Hadoop configuration. + */ + private void initializeDefaults(HadoopConfiguration cfg) { + if (cfg.getMapReducePlanner() == null) + cfg.setMapReducePlanner(new IgniteHadoopMapReducePlanner()); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(HadoopProcessor.class, this); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSetup.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSetup.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSetup.java new file mode 100644 index 0000000..ed39ce5 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopSetup.java @@ -0,0 +1,542 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.FilenameFilter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Date; +import java.util.Scanner; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.X; +import org.apache.ignite.internal.util.typedef.internal.U; + +import static org.apache.ignite.internal.IgniteVersionUtils.ACK_VER_STR; +import static org.apache.ignite.internal.IgniteVersionUtils.COPYRIGHT; + +/** + * Setup tool to configure Hadoop client. + */ +public class HadoopSetup { + /** */ + public static final String WINUTILS_EXE = "winutils.exe"; + + /** */ + private static final FilenameFilter IGNITE_JARS = new FilenameFilter() { + @Override public boolean accept(File dir, String name) { + return name.startsWith("ignite-") && name.endsWith(".jar"); + } + }; + + /** + * The main method. + * @param ignore Params. + */ + public static void main(String[] ignore) { + X.println( + " __________ ________________ ", + " / _/ ___/ |/ / _/_ __/ __/ ", + " _/ // (7 7 // / / / / _/ ", + "/___/\\___/_/|_/___/ /_/ /___/ ", + " for Apache Hadoop ", + " ", + "ver. " + ACK_VER_STR, + COPYRIGHT); + + configureHadoop(); + } + + /** + * This operation prepares the clean unpacked Hadoop distributive to work as client with Ignite-Hadoop. + * It performs these operations: + * <ul> + * <li>Check for setting of HADOOP_HOME environment variable.</li> + * <li>Try to resolve HADOOP_COMMON_HOME or evaluate it relative to HADOOP_HOME.</li> + * <li>In Windows check if winutils.exe exists and try to fix issue with some restrictions.</li> + * <li>In Windows check new line character issues in CMD scripts.</li> + * <li>Scan Hadoop lib directory to detect Ignite JARs. If these don't exist tries to create ones.</li> + * </ul> + */ + private static void configureHadoop() { + String igniteHome = U.getIgniteHome(); + + println("IGNITE_HOME is set to '" + igniteHome + "'."); + + checkIgniteHome(igniteHome); + + String homeVar = "HADOOP_HOME"; + String hadoopHome = System.getenv(homeVar); + + if (F.isEmpty(hadoopHome)) { + homeVar = "HADOOP_PREFIX"; + hadoopHome = System.getenv(homeVar); + } + + if (F.isEmpty(hadoopHome)) + exit("Neither HADOOP_HOME nor HADOOP_PREFIX environment variable is set. Please set one of them to a " + + "valid Hadoop installation directory and run setup tool again.", null); + + hadoopHome = hadoopHome.replaceAll("\"", ""); + + println(homeVar + " is set to '" + hadoopHome + "'."); + + String hiveHome = System.getenv("HIVE_HOME"); + + if (!F.isEmpty(hiveHome)) { + hiveHome = hiveHome.replaceAll("\"", ""); + + println("HIVE_HOME is set to '" + hiveHome + "'."); + } + + File hadoopDir = new File(hadoopHome); + + if (!hadoopDir.exists()) + exit("Hadoop installation folder does not exist.", null); + + if (!hadoopDir.isDirectory()) + exit("HADOOP_HOME must point to a directory.", null); + + if (!hadoopDir.canRead()) + exit("Hadoop installation folder can not be read. Please check permissions.", null); + + final File hadoopCommonDir; + + String hadoopCommonHome = System.getenv("HADOOP_COMMON_HOME"); + + if (F.isEmpty(hadoopCommonHome)) { + hadoopCommonDir = new File(hadoopDir, "share/hadoop/common"); + + println("HADOOP_COMMON_HOME is not set, will use '" + hadoopCommonDir.getPath() + "'."); + } + else { + println("HADOOP_COMMON_HOME is set to '" + hadoopCommonHome + "'."); + + hadoopCommonDir = new File(hadoopCommonHome); + } + + if (!hadoopCommonDir.canRead()) + exit("Failed to read Hadoop common dir '" + hadoopCommonDir + "'.", null); + + final File hadoopCommonLibDir = new File(hadoopCommonDir, "lib"); + + if (!hadoopCommonLibDir.canRead()) + exit("Failed to read Hadoop 'lib' folder in '" + hadoopCommonLibDir.getPath() + "'.", null); + + if (U.isWindows()) { + checkJavaPathSpaces(); + + final File hadoopBinDir = new File(hadoopDir, "bin"); + + if (!hadoopBinDir.canRead()) + exit("Failed to read subdirectory 'bin' in HADOOP_HOME.", null); + + File winutilsFile = new File(hadoopBinDir, WINUTILS_EXE); + + if (!winutilsFile.exists()) { + if (ask("File '" + WINUTILS_EXE + "' does not exist. " + + "It may be replaced by a stub. Create it?")) { + println("Creating file stub '" + winutilsFile.getAbsolutePath() + "'."); + + boolean ok = false; + + try { + ok = winutilsFile.createNewFile(); + } + catch (IOException ignore) { + // No-op. + } + + if (!ok) + exit("Failed to create '" + WINUTILS_EXE + "' file. Please check permissions.", null); + } + else + println("Ok. But Hadoop client probably will not work on Windows this way..."); + } + + processCmdFiles(hadoopDir, "bin", "sbin", "libexec"); + } + + File igniteLibs = new File(new File(igniteHome), "libs"); + + if (!igniteLibs.exists()) + exit("Ignite 'libs' folder is not found.", null); + + Collection<File> jarFiles = new ArrayList<>(); + + addJarsInFolder(jarFiles, igniteLibs); + addJarsInFolder(jarFiles, new File(igniteLibs, "ignite-hadoop")); + addJarsInFolder(jarFiles, new File(igniteLibs, "ignite-hadoop-impl")); + + boolean jarsLinksCorrect = true; + + for (File file : jarFiles) { + File link = new File(hadoopCommonLibDir, file.getName()); + + jarsLinksCorrect &= isJarLinkCorrect(link, file); + + if (!jarsLinksCorrect) + break; + } + + if (!jarsLinksCorrect) { + if (ask("Ignite JAR files are not found in Hadoop 'lib' directory. " + + "Create appropriate symbolic links?")) { + File[] oldIgniteJarFiles = hadoopCommonLibDir.listFiles(IGNITE_JARS); + + if (oldIgniteJarFiles.length > 0 && ask("The Hadoop 'lib' directory contains JARs from other Ignite " + + "installation. They must be deleted to continue. Continue?")) { + for (File file : oldIgniteJarFiles) { + println("Deleting file '" + file.getAbsolutePath() + "'."); + + if (!file.delete()) + exit("Failed to delete file '" + file.getPath() + "'.", null); + } + } + + for (File file : jarFiles) { + File targetFile = new File(hadoopCommonLibDir, file.getName()); + + try { + println("Creating symbolic link '" + targetFile.getAbsolutePath() + "'."); + + Files.createSymbolicLink(targetFile.toPath(), file.toPath()); + } + catch (IOException e) { + if (U.isWindows()) { + warn("Ability to create symbolic links is required!"); + warn("On Windows platform you have to grant permission 'Create symbolic links'"); + warn("to your user or run the Accelerator as Administrator."); + } + + exit("Creating symbolic link failed! Check permissions.", e); + } + } + } + else + println("Ok. But Hadoop client will not be able to talk to Ignite cluster without those JARs in classpath..."); + } + + File hadoopEtc = new File(hadoopDir, "etc" + File.separator + "hadoop"); + + File igniteHadoopCfg = igniteHadoopConfig(igniteHome); + + if (!igniteHadoopCfg.canRead()) + exit("Failed to read Ignite Hadoop 'config' folder at '" + igniteHadoopCfg.getAbsolutePath() + "'.", null); + + if (hadoopEtc.canWrite()) { // TODO Bigtop + if (ask("Replace 'core-site.xml' and 'mapred-site.xml' files with preconfigured templates " + + "(existing files will be backed up)?")) { + replaceWithBackup(new File(igniteHadoopCfg, "core-site.ignite.xml"), + new File(hadoopEtc, "core-site.xml")); + + replaceWithBackup(new File(igniteHadoopCfg, "mapred-site.ignite.xml"), + new File(hadoopEtc, "mapred-site.xml")); + } + else + println("Ok. You can configure them later, the templates are available at Ignite's 'docs' directory..."); + } + + if (!F.isEmpty(hiveHome)) { + File hiveConfDir = new File(hiveHome + File.separator + "conf"); + + if (!hiveConfDir.canWrite()) + warn("Can not write to '" + hiveConfDir.getAbsolutePath() + "'. To run Hive queries you have to " + + "configure 'hive-site.xml' manually. The template is available at Ignite's 'docs' directory."); + else if (ask("Replace 'hive-site.xml' with preconfigured template (existing file will be backed up)?")) + replaceWithBackup(new File(igniteHadoopCfg, "hive-site.ignite.xml"), + new File(hiveConfDir, "hive-site.xml")); + else + println("Ok. You can configure it later, the template is available at Ignite's 'docs' directory..."); + } + + println("Apache Hadoop setup is complete."); + } + + /** + * Get Ignite Hadoop config directory. + * + * @param igniteHome Ignite home. + * @return Ignite Hadoop config directory. + */ + private static File igniteHadoopConfig(String igniteHome) { + Path path = Paths.get(igniteHome, "modules", "hadoop", "config"); + + if (!Files.exists(path)) + path = Paths.get(igniteHome, "config", "hadoop"); + + if (Files.exists(path)) + return path.toFile(); + else + return new File(igniteHome, "docs"); + } + + /** + * @param jarFiles Jars. + * @param folder Folder. + */ + private static void addJarsInFolder(Collection<File> jarFiles, File folder) { + if (!folder.exists()) + exit("Folder '" + folder.getAbsolutePath() + "' is not found.", null); + + jarFiles.addAll(Arrays.asList(folder.listFiles(IGNITE_JARS))); + } + + /** + * Checks that JAVA_HOME does not contain space characters. + */ + private static void checkJavaPathSpaces() { + String javaHome = System.getProperty("java.home"); + + if (javaHome.contains(" ")) { + warn("Java installation path contains space characters!"); + warn("Hadoop client will not be able to start using '" + javaHome + "'."); + warn("Please install JRE to path which does not contain spaces and point JAVA_HOME to that installation."); + } + } + + /** + * Checks Ignite home. + * + * @param igniteHome Ignite home. + */ + private static void checkIgniteHome(String igniteHome) { + URL jarUrl = U.class.getProtectionDomain().getCodeSource().getLocation(); + + try { + Path jar = Paths.get(jarUrl.toURI()); + Path igHome = Paths.get(igniteHome); + + if (!jar.startsWith(igHome)) + exit("Ignite JAR files are not under IGNITE_HOME.", null); + } + catch (Exception e) { + exit(e.getMessage(), e); + } + } + + /** + * Replaces target file with source file. + * + * @param from From. + * @param to To. + */ + private static void replaceWithBackup(File from, File to) { + if (!from.canRead()) + exit("Failed to read source file '" + from.getAbsolutePath() + "'.", null); + + println("Replacing file '" + to.getAbsolutePath() + "'."); + + try { + U.copy(from, renameToBak(to), true); + } + catch (IOException e) { + exit("Failed to replace file '" + to.getAbsolutePath() + "'.", e); + } + } + + /** + * Renames file for backup. + * + * @param file File. + * @return File. + */ + private static File renameToBak(File file) { + DateFormat fmt = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss"); + + if (file.exists() && !file.renameTo(new File(file.getAbsolutePath() + "." + fmt.format(new Date()) + ".bak"))) + exit("Failed to rename file '" + file.getPath() + "'.", null); + + return file; + } + + /** + * Checks if link is correct. + * + * @param link Symbolic link. + * @param correctTarget Correct link target. + * @return {@code true} If link target is correct. + */ + private static boolean isJarLinkCorrect(File link, File correctTarget) { + if (!Files.isSymbolicLink(link.toPath())) + return false; // It is a real file or it does not exist. + + Path target = null; + + try { + target = Files.readSymbolicLink(link.toPath()); + } + catch (IOException e) { + exit("Failed to read symbolic link: " + link.getAbsolutePath(), e); + } + + return Files.exists(target) && target.toFile().equals(correctTarget); + } + + /** + * Writes the question end read the boolean answer from the console. + * + * @param question Question to write. + * @return {@code true} if user inputs 'Y' or 'y', {@code false} otherwise. + */ + private static boolean ask(String question) { + X.println(); + X.print(" < " + question + " (Y/N): "); + + String answer = null; + + if (!F.isEmpty(System.getenv("IGNITE_HADOOP_SETUP_YES"))) + answer = "Y"; + else { + BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); + + try { + answer = br.readLine(); + } + catch (IOException e) { + exit("Failed to read answer: " + e.getMessage(), e); + } + } + + if (answer != null && "Y".equals(answer.toUpperCase().trim())) { + X.println(" > Yes."); + + return true; + } + else { + X.println(" > No."); + + return false; + } + } + + /** + * Exit with message. + * + * @param msg Exit message. + */ + private static void exit(String msg, Exception e) { + X.println(" "); + X.println(" # " + msg); + X.println(" # Setup failed, exiting... "); + + if (e != null && !F.isEmpty(System.getenv("IGNITE_HADOOP_SETUP_DEBUG"))) + e.printStackTrace(); + + System.exit(1); + } + + /** + * Prints message. + * + * @param msg Message. + */ + private static void println(String msg) { + X.println(" > " + msg); + } + + /** + * Prints warning. + * + * @param msg Message. + */ + private static void warn(String msg) { + X.println(" ! " + msg); + } + + /** + * Checks that CMD files have valid MS Windows new line characters. If not, writes question to console and reads the + * answer. If it's 'Y' then backups original files and corrects invalid new line characters. + * + * @param rootDir Root directory to process. + * @param dirs Directories inside of the root to process. + */ + private static void processCmdFiles(File rootDir, String... dirs) { + boolean answer = false; + + for (String dir : dirs) { + File subDir = new File(rootDir, dir); + + File[] cmdFiles = subDir.listFiles(new FilenameFilter() { + @Override public boolean accept(File dir, String name) { + return name.toLowerCase().endsWith(".cmd"); + } + }); + + for (File file : cmdFiles) { + String content = null; + + try (Scanner scanner = new Scanner(file)) { + content = scanner.useDelimiter("\\Z").next(); + } + catch (FileNotFoundException e) { + exit("Failed to read file '" + file + "'.", e); + } + + boolean invalid = false; + + for (int i = 0; i < content.length(); i++) { + if (content.charAt(i) == '\n' && (i == 0 || content.charAt(i - 1) != '\r')) { + invalid = true; + + break; + } + } + + if (invalid) { + answer = answer || ask("One or more *.CMD files has invalid new line character. Replace them?"); + + if (!answer) { + println("Ok. But Windows most probably will fail to execute them..."); + + return; + } + + println("Fixing newline characters in file '" + file.getAbsolutePath() + "'."); + + renameToBak(file); + + try (BufferedWriter writer = new BufferedWriter(new FileWriter(file))) { + for (int i = 0; i < content.length(); i++) { + if (content.charAt(i) == '\n' && (i == 0 || content.charAt(i - 1) != '\r')) + writer.write("\r"); + + writer.write(content.charAt(i)); + } + } + catch (IOException e) { + exit("Failed to write file '" + file.getPath() + "': " + e.getMessage(), e); + } + } + } + } + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskCancelledException.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskCancelledException.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskCancelledException.java new file mode 100644 index 0000000..1dc8674 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopTaskCancelledException.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop; + +import org.apache.ignite.IgniteException; + +/** + * Exception that throws when the task is cancelling. + */ +public class HadoopTaskCancelledException extends IgniteException { + /** */ + private static final long serialVersionUID = 0L; + + /** + * @param msg Exception message. + */ + public HadoopTaskCancelledException(String msg) { + super(msg); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java new file mode 100644 index 0000000..ee61a82 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCounterAdapter.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.counter; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; + +import org.apache.ignite.internal.util.typedef.internal.S; +import org.jetbrains.annotations.Nullable; + +/** + * Default Hadoop counter implementation. + */ +public abstract class HadoopCounterAdapter implements HadoopCounter, Externalizable { + /** */ + private static final long serialVersionUID = 0L; + + /** Counter group name. */ + private String grp; + + /** Counter name. */ + private String name; + + /** + * Default constructor required by {@link Externalizable}. + */ + protected HadoopCounterAdapter() { + // No-op. + } + + /** + * Creates new counter with given group and name. + * + * @param grp Counter group name. + * @param name Counter name. + */ + protected HadoopCounterAdapter(String grp, String name) { + assert grp != null : "counter must have group"; + assert name != null : "counter must have name"; + + this.grp = grp; + this.name = name; + } + + /** {@inheritDoc} */ + @Override public String name() { + return name; + } + + /** {@inheritDoc} */ + @Override @Nullable public String group() { + return grp; + } + + /** {@inheritDoc} */ + @Override public void writeExternal(ObjectOutput out) throws IOException { + out.writeUTF(grp); + out.writeUTF(name); + writeValue(out); + } + + /** {@inheritDoc} */ + @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { + grp = in.readUTF(); + name = in.readUTF(); + readValue(in); + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + HadoopCounterAdapter cntr = (HadoopCounterAdapter)o; + + if (!grp.equals(cntr.grp)) + return false; + if (!name.equals(cntr.name)) + return false; + + return true; + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + int res = grp.hashCode(); + res = 31 * res + name.hashCode(); + return res; + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(HadoopCounterAdapter.class, this); + } + + /** + * Writes value of this counter to output. + * + * @param out Output. + * @throws IOException If failed. + */ + protected abstract void writeValue(ObjectOutput out) throws IOException; + + /** + * Read value of this counter from input. + * + * @param in Input. + * @throws IOException If failed. + */ + protected abstract void readValue(ObjectInput in) throws IOException; +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCountersImpl.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCountersImpl.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCountersImpl.java new file mode 100644 index 0000000..f3b5463 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopCountersImpl.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.counter; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.lang.reflect.Constructor; +import java.util.Collection; +import java.util.concurrent.ConcurrentMap; +import org.apache.ignite.IgniteException; +import org.apache.ignite.internal.util.lang.GridTuple3; +import org.apache.ignite.internal.util.typedef.internal.S; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.jsr166.ConcurrentHashMap8; + +/** + * Default in-memory counters store. + */ +public class HadoopCountersImpl implements HadoopCounters, Externalizable { + /** */ + private static final long serialVersionUID = 0L; + + /** */ + private final ConcurrentMap<CounterKey, HadoopCounter> cntrsMap = new ConcurrentHashMap8<>(); + + /** + * Default constructor. Creates new instance without counters. + */ + public HadoopCountersImpl() { + // No-op. + } + + /** + * Creates new instance that contain given counters. + * + * @param cntrs Counters to store. + */ + public HadoopCountersImpl(Iterable<HadoopCounter> cntrs) { + addCounters(cntrs, true); + } + + /** + * Copy constructor. + * + * @param cntrs Counters to copy. + */ + public HadoopCountersImpl(HadoopCounters cntrs) { + this(cntrs.all()); + } + + /** + * Creates counter instance. + * + * @param cls Class of the counter. + * @param grp Group name. + * @param name Counter name. + * @return Counter. + */ + private <T extends HadoopCounter> T createCounter(Class<? extends HadoopCounter> cls, String grp, + String name) { + try { + Constructor constructor = cls.getConstructor(String.class, String.class); + + return (T)constructor.newInstance(grp, name); + } + catch (Exception e) { + throw new IgniteException(e); + } + } + + /** + * Adds counters collection in addition to existing counters. + * + * @param cntrs Counters to add. + * @param cp Whether to copy counters or not. + */ + private void addCounters(Iterable<HadoopCounter> cntrs, boolean cp) { + assert cntrs != null; + + for (HadoopCounter cntr : cntrs) { + if (cp) { + HadoopCounter cntrCp = createCounter(cntr.getClass(), cntr.group(), cntr.name()); + + cntrCp.merge(cntr); + + cntr = cntrCp; + } + + cntrsMap.put(new CounterKey(cntr.getClass(), cntr.group(), cntr.name()), cntr); + } + } + + /** {@inheritDoc} */ + @Override public <T extends HadoopCounter> T counter(String grp, String name, Class<T> cls) { + assert cls != null; + + CounterKey mapKey = new CounterKey(cls, grp, name); + + T cntr = (T)cntrsMap.get(mapKey); + + if (cntr == null) { + cntr = createCounter(cls, grp, name); + + T old = (T)cntrsMap.putIfAbsent(mapKey, cntr); + + if (old != null) + return old; + } + + return cntr; + } + + /** {@inheritDoc} */ + @Override public Collection<HadoopCounter> all() { + return cntrsMap.values(); + } + + /** {@inheritDoc} */ + @Override public void merge(HadoopCounters other) { + for (HadoopCounter counter : other.all()) + counter(counter.group(), counter.name(), counter.getClass()).merge(counter); + } + + /** {@inheritDoc} */ + @Override public void writeExternal(ObjectOutput out) throws IOException { + U.writeCollection(out, cntrsMap.values()); + } + + /** {@inheritDoc} */ + @SuppressWarnings("unchecked") + @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { + addCounters(U.<HadoopCounter>readCollection(in), false); + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object o) { + if (this == o) + return true; + + if (o == null || getClass() != o.getClass()) + return false; + + HadoopCountersImpl counters = (HadoopCountersImpl)o; + + return cntrsMap.equals(counters.cntrsMap); + } + + /** {@inheritDoc} */ + @Override public int hashCode() { + return cntrsMap.hashCode(); + } + + /** {@inheritDoc} */ + @Override public String toString() { + return S.toString(HadoopCountersImpl.class, this, "counters", cntrsMap.values()); + } + + /** + * The tuple of counter identifier components for more readable code. + */ + private static class CounterKey extends GridTuple3<Class<? extends HadoopCounter>, String, String> { + /** */ + private static final long serialVersionUID = 0L; + + /** + * Constructor. + * + * @param cls Class of the counter. + * @param grp Group name. + * @param name Counter name. + */ + private CounterKey(Class<? extends HadoopCounter> cls, String grp, String name) { + super(cls, grp, name); + } + + /** + * Empty constructor required by {@link Externalizable}. + */ + public CounterKey() { + // No-op. + } + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopLongCounter.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopLongCounter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopLongCounter.java new file mode 100644 index 0000000..0d61e0d --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopLongCounter.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.counter; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; + +/** + * Standard hadoop counter to use via original Hadoop API in Hadoop jobs. + */ +public class HadoopLongCounter extends HadoopCounterAdapter { + /** */ + private static final long serialVersionUID = 0L; + + /** The counter value. */ + private long val; + + /** + * Default constructor required by {@link Externalizable}. + */ + public HadoopLongCounter() { + // No-op. + } + + /** + * Constructor. + * + * @param grp Group name. + * @param name Counter name. + */ + public HadoopLongCounter(String grp, String name) { + super(grp, name); + } + + /** {@inheritDoc} */ + @Override protected void writeValue(ObjectOutput out) throws IOException { + out.writeLong(val); + } + + /** {@inheritDoc} */ + @Override protected void readValue(ObjectInput in) throws IOException { + val = in.readLong(); + } + + /** {@inheritDoc} */ + @Override public void merge(HadoopCounter cntr) { + val += ((HadoopLongCounter)cntr).val; + } + + /** + * Gets current value of this counter. + * + * @return Current value. + */ + public long value() { + return val; + } + + /** + * Sets current value by the given value. + * + * @param val Value to set. + */ + public void value(long val) { + this.val = val; + } + + /** + * Increment this counter by the given value. + * + * @param i Value to increase this counter by. + */ + public void increment(long i) { + val += i; + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java new file mode 100644 index 0000000..9baedc2 --- /dev/null +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/counter/HadoopPerformanceCounter.java @@ -0,0 +1,286 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ignite.internal.processors.hadoop.counter; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.util.ArrayList; +import java.util.Collection; +import java.util.UUID; + +import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils; +import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo; +import org.apache.ignite.internal.processors.hadoop.HadoopTaskType; +import org.apache.ignite.internal.util.typedef.F; +import org.apache.ignite.internal.util.typedef.T2; +import org.apache.ignite.internal.util.typedef.internal.U; +import org.jetbrains.annotations.Nullable; + +/** + * Counter for the job statistics accumulation. + */ +public class HadoopPerformanceCounter extends HadoopCounterAdapter { + /** */ + private static final long serialVersionUID = 0L; + + /** The group name for this counter. */ + private static final String GROUP_NAME = "SYSTEM"; + + /** The counter name for this counter. */ + private static final String COUNTER_NAME = "PERFORMANCE"; + + /** Events collections. */ + private Collection<T2<String,Long>> evts = new ArrayList<>(); + + /** Node id to insert into the event info. */ + private UUID nodeId; + + /** */ + private int reducerNum; + + /** */ + private volatile Long firstShuffleMsg; + + /** */ + private volatile Long lastShuffleMsg; + + /** + * Default constructor required by {@link Externalizable}. + */ + public HadoopPerformanceCounter() { + // No-op. + } + + /** + * Constructor. + * + * @param grp Group name. + * @param name Counter name. + */ + public HadoopPerformanceCounter(String grp, String name) { + super(grp, name); + } + + /** + * Constructor to create instance to use this as helper. + * + * @param nodeId Id of the work node. + */ + public HadoopPerformanceCounter(UUID nodeId) { + this.nodeId = nodeId; + } + + /** {@inheritDoc} */ + @Override protected void writeValue(ObjectOutput out) throws IOException { + U.writeCollection(out, evts); + } + + /** {@inheritDoc} */ + @Override protected void readValue(ObjectInput in) throws IOException { + try { + evts = U.readCollection(in); + } + catch (ClassNotFoundException e) { + throw new IOException(e); + } + } + + /** {@inheritDoc} */ + @Override public void merge(HadoopCounter cntr) { + evts.addAll(((HadoopPerformanceCounter)cntr).evts); + } + + /** + * Gets the events collection. + * + * @return Collection of event. + */ + public Collection<T2<String, Long>> evts() { + return evts; + } + + /** + * Generate name that consists of some event information. + * + * @param info Task info. + * @param evtType The type of the event. + * @return String contains necessary event information. + */ + private String eventName(HadoopTaskInfo info, String evtType) { + return eventName(info.type().toString(), info.taskNumber(), evtType); + } + + /** + * Generate name that consists of some event information. + * + * @param taskType Task type. + * @param taskNum Number of the task. + * @param evtType The type of the event. + * @return String contains necessary event information. + */ + private String eventName(String taskType, int taskNum, String evtType) { + assert nodeId != null; + + return taskType + " " + taskNum + " " + evtType + " " + nodeId; + } + + /** + * Adds event of the task submission (task instance creation). + * + * @param info Task info. + * @param ts Timestamp of the event. + */ + public void onTaskSubmit(HadoopTaskInfo info, long ts) { + evts.add(new T2<>(eventName(info, "submit"), ts)); + } + + /** + * Adds event of the task preparation. + * + * @param info Task info. + * @param ts Timestamp of the event. + */ + public void onTaskPrepare(HadoopTaskInfo info, long ts) { + evts.add(new T2<>(eventName(info, "prepare"), ts)); + } + + /** + * Adds event of the task finish. + * + * @param info Task info. + * @param ts Timestamp of the event. + */ + public void onTaskFinish(HadoopTaskInfo info, long ts) { + if (info.type() == HadoopTaskType.REDUCE && lastShuffleMsg != null) { + evts.add(new T2<>(eventName("SHUFFLE", reducerNum, "start"), firstShuffleMsg)); + evts.add(new T2<>(eventName("SHUFFLE", reducerNum, "finish"), lastShuffleMsg)); + + lastShuffleMsg = null; + } + + evts.add(new T2<>(eventName(info, "finish"), ts)); + } + + /** + * Adds event of the task run. + * + * @param info Task info. + * @param ts Timestamp of the event. + */ + public void onTaskStart(HadoopTaskInfo info, long ts) { + evts.add(new T2<>(eventName(info, "start"), ts)); + } + + /** + * Adds event of the job preparation. + * + * @param ts Timestamp of the event. + */ + public void onJobPrepare(long ts) { + assert nodeId != null; + + evts.add(new T2<>("JOB prepare " + nodeId, ts)); + } + + /** + * Adds event of the job start. + * + * @param ts Timestamp of the event. + */ + public void onJobStart(long ts) { + assert nodeId != null; + + evts.add(new T2<>("JOB start " + nodeId, ts)); + } + + /** + * Adds client submission events from job info. + * + * @param info Job info. + */ + public void clientSubmissionEvents(HadoopJobInfo info) { + assert nodeId != null; + + addEventFromProperty("JOB requestId", info, HadoopCommonUtils.REQ_NEW_JOBID_TS_PROPERTY); + addEventFromProperty("JOB responseId", info, HadoopCommonUtils.RESPONSE_NEW_JOBID_TS_PROPERTY); + addEventFromProperty("JOB submit", info, HadoopCommonUtils.JOB_SUBMISSION_START_TS_PROPERTY); + } + + /** + * Adds event with timestamp from some property in job info. + * + * @param evt Event type and phase. + * @param info Job info. + * @param propName Property name to get timestamp. + */ + private void addEventFromProperty(String evt, HadoopJobInfo info, String propName) { + String val = info.property(propName); + + if (!F.isEmpty(val)) { + try { + evts.add(new T2<>(evt + " " + nodeId, Long.parseLong(val))); + } + catch (NumberFormatException e) { + throw new IllegalStateException("Invalid value '" + val + "' of property '" + propName + "'", e); + } + } + } + + /** + * Registers shuffle message event. + * + * @param reducerNum Number of reducer that receives the data. + * @param ts Timestamp of the event. + */ + public void onShuffleMessage(int reducerNum, long ts) { + this.reducerNum = reducerNum; + + if (firstShuffleMsg == null) + firstShuffleMsg = ts; + + lastShuffleMsg = ts; + } + + /** + * Gets system predefined performance counter from the HadoopCounters object. + * + * @param cntrs HadoopCounters object. + * @param nodeId Node id for methods that adds events. It may be null if you don't use ones. + * @return Predefined performance counter. + */ + public static HadoopPerformanceCounter getCounter(HadoopCounters cntrs, @Nullable UUID nodeId) { + HadoopPerformanceCounter cntr = cntrs.counter(GROUP_NAME, COUNTER_NAME, HadoopPerformanceCounter.class); + + if (nodeId != null) + cntr.nodeId(nodeId); + + return cntrs.counter(GROUP_NAME, COUNTER_NAME, HadoopPerformanceCounter.class); + } + + /** + * Sets the nodeId field. + * + * @param nodeId Node id. + */ + private void nodeId(UUID nodeId) { + this.nodeId = nodeId; + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ignite/blob/11b00873/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java ---------------------------------------------------------------------- diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java index 6c39946..76d9bff 100644 --- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java +++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/delegate/HadoopDelegateUtils.java @@ -23,6 +23,8 @@ import org.apache.ignite.hadoop.fs.CachingHadoopFileSystemFactory; import org.apache.ignite.hadoop.fs.IgniteHadoopFileSystemCounterWriter; import org.apache.ignite.hadoop.fs.IgniteHadoopIgfsSecondaryFileSystem; import org.apache.ignite.hadoop.fs.KerberosHadoopFileSystemFactory; +import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader; +import org.jetbrains.annotations.Nullable; import java.lang.reflect.Constructor; import java.util.HashMap; @@ -34,41 +36,42 @@ import java.util.Map; public class HadoopDelegateUtils { /** Secondary file system delegate class. */ private static final String SECONDARY_FILE_SYSTEM_CLS = - "org.apache.ignite.internal.processors.hadoop.delegate.HadoopIgfsSecondaryFileSystemDelegateImpl"; + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopIgfsSecondaryFileSystemDelegateImpl"; /** Default file system factory class. */ private static final String DFLT_FACTORY_CLS = - "org.apache.ignite.internal.processors.hadoop.delegate.HadoopDefaultFileSystemFactoryDelegate"; + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopDefaultFileSystemFactoryDelegate"; /** Factory proxy to delegate class name mapping. */ private static final Map<String, String> FACTORY_CLS_MAP; /** Counter writer delegate implementation. */ private static final String COUNTER_WRITER_DELEGATE_CLS = - "org.apache.ignite.internal.processors.hadoop.delegate.HadoopFileSystemCounterWriterDelegateImpl"; + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopFileSystemCounterWriterDelegateImpl"; static { FACTORY_CLS_MAP = new HashMap<>(); FACTORY_CLS_MAP.put(BasicHadoopFileSystemFactory.class.getName(), - "org.apache.ignite.internal.processors.hadoop.delegate.HadoopBasicFileSystemFactoryDelegate"); + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopBasicFileSystemFactoryDelegate"); FACTORY_CLS_MAP.put(CachingHadoopFileSystemFactory.class.getName(), - "org.apache.ignite.internal.processors.hadoop.delegate.HadoopCachingFileSystemFactoryDelegate"); + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopCachingFileSystemFactoryDelegate"); FACTORY_CLS_MAP.put(KerberosHadoopFileSystemFactory.class.getName(), - "org.apache.ignite.internal.processors.hadoop.delegate.HadoopKerberosFileSystemFactoryDelegate"); + "org.apache.ignite.internal.processors.hadoop.impl.delegate.HadoopKerberosFileSystemFactoryDelegate"); } /** * Create delegate for secondary file system. * + * @param ldr Hadoop class loader. * @param proxy Proxy. * @return Delegate. */ - public static HadoopIgfsSecondaryFileSystemDelegate secondaryFileSystemDelegate( + public static HadoopIgfsSecondaryFileSystemDelegate secondaryFileSystemDelegate(HadoopClassLoader ldr, IgniteHadoopIgfsSecondaryFileSystem proxy) { - return newInstance(SECONDARY_FILE_SYSTEM_CLS, proxy); + return newInstance(SECONDARY_FILE_SYSTEM_CLS, ldr, proxy); } /** @@ -84,31 +87,33 @@ public class HadoopDelegateUtils { if (clsName == null) clsName = DFLT_FACTORY_CLS; - return newInstance(clsName, proxy); + return newInstance(clsName, null, proxy); } /** * Create delegate for Hadoop counter writer. * + * @param ldr Class loader. * @param proxy Proxy. * @return Delegate. */ - public static HadoopFileSystemCounterWriterDelegate counterWriterDelegate( + public static HadoopFileSystemCounterWriterDelegate counterWriterDelegate(ClassLoader ldr, IgniteHadoopFileSystemCounterWriter proxy) { - return newInstance(COUNTER_WRITER_DELEGATE_CLS, proxy); + return newInstance(COUNTER_WRITER_DELEGATE_CLS, ldr, proxy); } /** * Get new delegate instance. * * @param clsName Class name. + * @param ldr Optional class loader. * @param proxy Proxy. * @return Instance. */ @SuppressWarnings("unchecked") - private static <T> T newInstance(String clsName, Object proxy) { + private static <T> T newInstance(String clsName, @Nullable ClassLoader ldr, Object proxy) { try { - Class delegateCls = Class.forName(clsName); + Class delegateCls = ldr == null ? Class.forName(clsName) : Class.forName(clsName, true, ldr); Constructor[] ctors = delegateCls.getConstructors();