This is an automated email from the ASF dual-hosted git repository.

spmallette pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/tinkerpop.git

commit 403a92d701e79ad88686553aea0d339247a75ed8
Merge: c7b28e5 5f4e2b6
Author: Stephen Mallette <sp...@genoprime.com>
AuthorDate: Thu Dec 13 14:15:07 2018 -0500

    Merge branch 'tp33'
    
    Conflicts:
        
gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/driver/remote/AbstractRemoteGraphProvider.java
        
spark-gremlin/src/test/java/org/apache/tinkerpop/gremlin/spark/process/computer/SparkHadoopGraphProvider.java

 .../gremlin/driver/remote/AbstractRemoteGraphProvider.java         | 3 ++-
 .../jsr223/GryoRemoteGraphGroovyTranslatorComputerProvider.java    | 3 ++-
 .../gremlin/spark/process/computer/SparkHadoopGraphProvider.java   | 7 ++++---
 3 files changed, 8 insertions(+), 5 deletions(-)

diff --cc 
gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/groovy/jsr223/GryoRemoteGraphGroovyTranslatorComputerProvider.java
index 4801811,c53516f..77fa5f6
--- 
a/gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/groovy/jsr223/GryoRemoteGraphGroovyTranslatorComputerProvider.java
+++ 
b/gremlin-server/src/test/java/org/apache/tinkerpop/gremlin/groovy/jsr223/GryoRemoteGraphGroovyTranslatorComputerProvider.java
@@@ -342,32 -342,9 +342,33 @@@ import org.apache.tinkerpop.gremlin.tin
          test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.map.SelectTest",
          method = 
"g_V_untilXout_outX_repeatXin_asXaXX_selectXaX_byXtailXlocalX_nameX",
          reason = "Local traversals may not traverse past the local star-graph 
on GraphComputer")
 +@Graph.OptOut(
 +        test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.map.ReadTest",
 +        method = "*",
 +        reason = "The io() step is not supported generally by GraphComputer")
 +@Graph.OptOut(
 +        test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.map.WriteTest",
 +        method = "*",
 +        reason = "The io() step is not supported generally by GraphComputer")
 +@Graph.OptOut(
 +        test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest",
 +        method = 
"g_V_repeatXout_repeatXoutX_timesX1XX_timesX1X_limitX1X_path_by_name",
 +        reason = "It is not possible to access more than a path element's id 
on GraphComputer")
 +@Graph.OptOut(
 +        test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest",
 +        method = 
"g_V_repeatXoutXknowsXX_untilXrepeatXoutXcreatedXX_emitXhasXname_lopXXX_path_byXnameX",
 +        reason = "It is not possible to access more than a path element's id 
on GraphComputer")
 +@Graph.OptOut(
 +        test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest",
 +        method = 
"g_VX3X_repeatXbothX_createdXX_untilXloops_is_40XXemit_repeatXin_knowsXX_emit_loopsXisX1Xdedup_values",
 +        reason = "Local traversals may not traverse past the local star-graph 
on GraphComputer")
 +@Graph.OptOut(
 +        test = 
"org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest",
 +        method = 
"g_VX6X_repeatXa_bothXcreatedX_simplePathX_emitXrepeatXb_bothXknowsXX_untilXloopsXbX_asXb_whereXloopsXaX_asXbX_hasXname_vadasXX_dedup_name",
 +        reason = "Local traversals may not traverse past the local star-graph 
on GraphComputer")
  @GraphProvider.Descriptor(computer = TinkerGraphComputer.class)
  public class GryoRemoteGraphGroovyTranslatorComputerProvider extends 
GryoRemoteGraphGroovyTranslatorProvider {
+     private final int AVAILABLE_PROCESSORS = 
Runtime.getRuntime().availableProcessors();
  
      @Override
      public GraphTraversalSource traversal(final Graph graph) {
diff --cc 
spark-gremlin/src/test/java/org/apache/tinkerpop/gremlin/spark/process/computer/SparkHadoopGraphProvider.java
index 2eebac1,644484f..6c1efb1
--- 
a/spark-gremlin/src/test/java/org/apache/tinkerpop/gremlin/spark/process/computer/SparkHadoopGraphProvider.java
+++ 
b/spark-gremlin/src/test/java/org/apache/tinkerpop/gremlin/spark/process/computer/SparkHadoopGraphProvider.java
@@@ -60,42 -44,15 +60,43 @@@ import java.util.Set
  
  /**
   * @author Marko A. Rodriguez (http://markorodriguez.com)
 + * @author Stephen Mallette (http://stephen.genoprime.com)
   */
  @GraphProvider.Descriptor(computer = SparkGraphComputer.class)
 -public class SparkHadoopGraphProvider extends HadoopGraphProvider {
 +public class SparkHadoopGraphProvider extends AbstractFileGraphProvider {
  
 -    protected static final String PREVIOUS_SPARK_PROVIDER = 
"previous.spark.provider";
 +    static final String PREVIOUS_SPARK_PROVIDER = "previous.spark.provider";
+     private final int AVAILABLE_PROCESSORS = 
Runtime.getRuntime().availableProcessors();
  
 +    public static final Set<Class> IMPLEMENTATION = 
Collections.unmodifiableSet(new HashSet<Class>() {{
 +        add(HadoopEdge.class);
 +        add(HadoopElement.class);
 +        add(HadoopGraph.class);
 +        add(HadoopProperty.class);
 +        add(HadoopVertex.class);
 +        add(HadoopVertexProperty.class);
 +        add(ComputerGraph.class);
 +        add(ComputerGraph.ComputerElement.class);
 +        add(ComputerGraph.ComputerVertex.class);
 +        add(ComputerGraph.ComputerEdge.class);
 +        add(ComputerGraph.ComputerVertexProperty.class);
 +        add(ComputerGraph.ComputerAdjacentVertex.class);
 +        add(ComputerGraph.ComputerProperty.class);
 +    }});
 +
 +    @Override
 +    public void loadGraphData(final Graph graph, final LoadGraphWith 
loadGraphWith, final Class testClass, final String testName) {
 +        if (loadGraphWith != null) ((HadoopGraph) 
graph).configuration().setInputLocation(getInputLocation(graph, 
loadGraphWith.value()));
 +    }
 +
 +    @Override
 +    public Set<Class> getImplementations() {
 +        return IMPLEMENTATION;
 +    }
 +
      @Override
      public Map<String, Object> getBaseConfiguration(final String graphName, 
final Class<?> test, final String testMethodName, final LoadGraphWith.GraphData 
loadGraphWith) {
 +        this.graphSONInput = RANDOM.nextBoolean();
          if (this.getClass().equals(SparkHadoopGraphProvider.class) && 
!SparkHadoopGraphProvider.class.getCanonicalName().equals(System.getProperty(PREVIOUS_SPARK_PROVIDER,
 null))) {
              Spark.close();
              HadoopPools.close();

Reply via email to