This is an automated email from the ASF dual-hosted git repository.

dschneider pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/geode.git


The following commit(s) were added to refs/heads/develop by this push:
     new 909124a  GEODE-6459:  add create jdbc-mapping option to specify the 
PDX class file (#3249)
909124a is described below

commit 909124a41ad4c54f9c470915f8f8382fc96947c2
Author: Jianxia Chen <[email protected]>
AuthorDate: Thu Mar 7 18:36:43 2019 -0800

    GEODE-6459:  add create jdbc-mapping option to specify the PDX class file 
(#3249)
    
    Added a new --pdx-class-file option to gfsh create jdbc-mapping that can 
take a jar or class that contains the named pdx class.
    This file needs to be accessible by the gfsh process. It will be 
transferred to the server and used to define the pdx info in the jdbc-mapping.
    
    Co-authored-by: Darrel Schneider <[email protected]>
    Co-authored-by: Jianxia Chen <[email protected]>
---
 .../cli/CreateMappingCommandDUnitTest.java         | 133 ++++++++++++-
 .../jdbc/internal/cli/CreateMappingCommand.java    | 114 +++++++++++-
 .../CreateMappingPreconditionCheckFunction.java    | 206 +++++++++++++++------
 .../connectors/util/internal/MappingConstants.java |   1 +
 .../cli/CreateMappingCommandInterceptorTest.java   | 104 +++++++++++
 .../internal/cli/CreateMappingCommandTest.java     | 135 +++++++++++---
 ...CreateMappingPreconditionCheckFunctionTest.java | 173 +++++++++++++----
 7 files changed, 728 insertions(+), 138 deletions(-)

diff --git 
a/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
 
b/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
index feaca81..06043f1 100644
--- 
a/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
+++ 
b/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
@@ -19,6 +19,7 @@ import static 
org.apache.geode.connectors.jdbc.internal.cli.DescribeMappingComma
 import static 
org.apache.geode.connectors.util.internal.MappingConstants.DATA_SOURCE_NAME;
 import static 
org.apache.geode.connectors.util.internal.MappingConstants.GROUP_NAME;
 import static 
org.apache.geode.connectors.util.internal.MappingConstants.ID_NAME;
+import static 
org.apache.geode.connectors.util.internal.MappingConstants.PDX_CLASS_FILE;
 import static 
org.apache.geode.connectors.util.internal.MappingConstants.PDX_NAME;
 import static 
org.apache.geode.connectors.util.internal.MappingConstants.REGION_NAME;
 import static 
org.apache.geode.connectors.util.internal.MappingConstants.SCHEMA_NAME;
@@ -27,6 +28,7 @@ import static 
org.apache.geode.connectors.util.internal.MappingConstants.TABLE_N
 import static org.assertj.core.api.Assertions.assertThat;
 
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.sql.Connection;
@@ -66,7 +68,9 @@ import org.apache.geode.pdx.FieldType;
 import org.apache.geode.pdx.PdxReader;
 import org.apache.geode.pdx.PdxSerializable;
 import org.apache.geode.pdx.PdxWriter;
+import org.apache.geode.test.compiler.CompiledSourceCode;
 import org.apache.geode.test.compiler.JarBuilder;
+import org.apache.geode.test.compiler.JavaCompiler;
 import org.apache.geode.test.dunit.IgnoredException;
 import org.apache.geode.test.dunit.rules.ClusterStartupRule;
 import org.apache.geode.test.dunit.rules.MemberVM;
@@ -544,24 +548,43 @@ public class CreateMappingCommandDUnitTest {
     }
   }
 
-  private File loadTestResource(String fileName) throws URISyntaxException {
+  private File loadTestResource(String fileName) {
     String filePath = TestUtil.getResourcePath(this.getClass(), fileName);
     assertThat(filePath).isNotNull();
 
     return new File(filePath);
   }
 
-  private void deployJar() throws URISyntaxException, IOException {
+  private void deployJar() throws IOException {
+    File outputJar = createJar();
+
+    CommandStringBuilder csb = new CommandStringBuilder(CliStrings.DEPLOY);
+    csb.addOption(CliStrings.JAR, outputJar.getAbsolutePath());
+    gfsh.executeAndAssertThat(csb.toString()).statusIsSuccess();
+  }
+
+  private File createJar() throws IOException {
     JarBuilder jarBuilder = new JarBuilder();
     File source = loadTestResource(
         "/org/apache/geode/internal/ResourcePDX.java");
 
     File outputJar = new File(temporaryFolder.getRoot(), "output.jar");
     jarBuilder.buildJar(outputJar, source);
+    return outputJar;
+  }
 
-    CommandStringBuilder csb = new CommandStringBuilder(CliStrings.DEPLOY);
-    csb.addOption(CliStrings.JAR, outputJar.getAbsolutePath());
-    gfsh.executeAndAssertThat(csb.toString()).statusIsSuccess();
+  private File createClassFile() throws IOException {
+    final JavaCompiler javaCompiler = new JavaCompiler();
+    File source = loadTestResource(
+        "/org/apache/geode/internal/ResourcePDX.java");
+    List<CompiledSourceCode> compiledSourceCodes = 
javaCompiler.compile(source);
+    String className = compiledSourceCodes.get(0).className;
+    String fileName = className.substring(className.lastIndexOf(".") + 1) + 
".class";
+    File file = new File(temporaryFolder.getRoot(), fileName);
+    FileOutputStream fileOutputStream = new FileOutputStream(file);
+    fileOutputStream.write(compiledSourceCodes.get(0).compiledBytecode);
+    fileOutputStream.close();
+    return file;
   }
 
   @Test
@@ -594,7 +617,6 @@ public class CreateMappingCommandDUnitTest {
     csb.addOption(PDX_NAME, "org.apache.geode.internal.ResourcePDX");
     csb.addOption(ID_NAME, "id");
     csb.addOption(SCHEMA_NAME, "mySchema");
-    IgnoredException.addIgnoredException(ClassNotFoundException.class);
 
     deployJar();
     gfsh.executeAndAssertThat(csb.toString()).statusIsSuccess();
@@ -611,6 +633,103 @@ public class CreateMappingCommandDUnitTest {
     });
   }
 
+  @Test
+  public void createMappingWithPdxClassFileSetToAJarFile() throws IOException, 
URISyntaxException {
+    String region1Name = "region1";
+    setupReplicate(region1Name);
+    File jarFile = createJar();
+
+    CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
+    csb.addOption(REGION_NAME, region1Name);
+    csb.addOption(DATA_SOURCE_NAME, "connection");
+    csb.addOption(TABLE_NAME, "employeeRegion");
+    csb.addOption(PDX_NAME, "org.apache.geode.internal.ResourcePDX");
+    csb.addOption(ID_NAME, "id");
+    csb.addOption(SCHEMA_NAME, "mySchema");
+    csb.addOption(PDX_CLASS_FILE, jarFile);
+
+    gfsh.executeAndAssertThat(csb.toString()).statusIsSuccess();
+
+    server1.invoke(() -> {
+      RegionMapping mapping = getRegionMappingFromService(region1Name);
+      assertValidResourcePDXMappingOnServer(mapping, region1Name, false, 
false, "employeeRegion");
+    });
+
+    locator.invoke(() -> {
+      RegionMapping regionMapping = 
getRegionMappingFromClusterConfig(region1Name, null);
+      assertValidResourcePDXMappingOnLocator(regionMapping, region1Name, null, 
false, false,
+          "employeeRegion");
+    });
+  }
+
+  @Test
+  public void createMappingWithNonExistingPdxClassFileFails() throws 
IOException {
+    String region1Name = "region1";
+    setupReplicate(region1Name);
+
+    CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
+    csb.addOption(REGION_NAME, region1Name);
+    csb.addOption(DATA_SOURCE_NAME, "connection");
+    csb.addOption(TABLE_NAME, "employeeRegion");
+    csb.addOption(PDX_NAME, "org.apache.geode.internal.ResourcePDX");
+    csb.addOption(ID_NAME, "id");
+    csb.addOption(SCHEMA_NAME, "mySchema");
+    csb.addOption(PDX_CLASS_FILE, "NonExistingJarFile.jar");
+
+    gfsh.executeAndAssertThat(csb.toString()).statusIsError()
+        .containsOutput("NonExistingJarFile.jar not found.");
+  }
+
+  @Test
+  public void createMappingWithInvalidJarPdxClassFileFails() throws 
IOException {
+    String region1Name = "region1";
+    setupReplicate(region1Name);
+    File invalidFile = loadTestResource(
+        "/org/apache/geode/internal/ResourcePDX.java");
+
+    CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
+    csb.addOption(REGION_NAME, region1Name);
+    csb.addOption(DATA_SOURCE_NAME, "connection");
+    csb.addOption(TABLE_NAME, "employeeRegion");
+    csb.addOption(PDX_NAME, "org.apache.geode.internal.ResourcePDX");
+    csb.addOption(ID_NAME, "id");
+    csb.addOption(SCHEMA_NAME, "mySchema");
+    csb.addOption(PDX_CLASS_FILE, invalidFile);
+
+    gfsh.executeAndAssertThat(csb.toString()).statusIsError()
+        .containsOutput(invalidFile + " must end with \".jar\" or 
\".class\".");
+  }
+
+  @Test
+  public void createMappingWithPdxClassFileSetToAClassFile()
+      throws IOException, URISyntaxException {
+    String region1Name = "region1";
+    setupReplicate(region1Name);
+    File classFile = createClassFile();
+
+    CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
+    csb.addOption(REGION_NAME, region1Name);
+    csb.addOption(DATA_SOURCE_NAME, "connection");
+    csb.addOption(TABLE_NAME, "employeeRegion");
+    csb.addOption(PDX_NAME, "org.apache.geode.internal.ResourcePDX");
+    csb.addOption(ID_NAME, "id");
+    csb.addOption(SCHEMA_NAME, "mySchema");
+    csb.addOption(PDX_CLASS_FILE, classFile);
+
+    gfsh.executeAndAssertThat(csb.toString()).statusIsSuccess();
+
+    server1.invoke(() -> {
+      RegionMapping mapping = getRegionMappingFromService(region1Name);
+      assertValidResourcePDXMappingOnServer(mapping, region1Name, false, 
false, "employeeRegion");
+    });
+
+    locator.invoke(() -> {
+      RegionMapping regionMapping = 
getRegionMappingFromClusterConfig(region1Name, null);
+      assertValidResourcePDXMappingOnLocator(regionMapping, region1Name, null, 
false, false,
+          "employeeRegion");
+    });
+  }
+
   private static void assertValidEmployeeMapping(RegionMapping mapping, String 
tableName) {
     assertThat(mapping.getDataSourceName()).isEqualTo("connection");
     assertThat(mapping.getTableName()).isEqualTo(tableName);
@@ -1090,7 +1209,7 @@ public class CreateMappingCommandDUnitTest {
     // NOTE: --table is optional so it should not be in the output but it is. 
See GEODE-3468.
     gfsh.executeAndAssertThat(csb.toString()).statusIsError()
         .containsOutput(
-            "You should specify option (--table, --pdx-name, --synchronous, 
--id, --catalog, --schema, --group) for this command");
+            "You should specify option (--table, --pdx-name, --pdx-class-file, 
--synchronous, --id, --catalog, --schema, --group) for this command");
   }
 
   @Test
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
index 7d6d414..3f2451b 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommand.java
@@ -14,10 +14,20 @@
  */
 package org.apache.geode.connectors.jdbc.internal.cli;
 
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
+import com.healthmarketscience.rmiio.RemoteInputStream;
+import com.healthmarketscience.rmiio.SimpleRemoteInputStream;
+import com.healthmarketscience.rmiio.exporter.RemoteStreamExporter;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
@@ -40,9 +50,18 @@ import 
org.apache.geode.distributed.ConfigurationPersistenceService;
 import org.apache.geode.distributed.DistributedMember;
 import org.apache.geode.management.cli.CliMetaData;
 import org.apache.geode.management.cli.ConverterHint;
+import org.apache.geode.management.cli.Result;
 import org.apache.geode.management.cli.SingleGfshCommand;
+import org.apache.geode.management.internal.ManagementAgent;
+import org.apache.geode.management.internal.SystemManagementService;
+import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
+import org.apache.geode.management.internal.cli.GfshParseResult;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
+import org.apache.geode.management.internal.cli.remote.CommandExecutionContext;
+import org.apache.geode.management.internal.cli.result.FileResult;
+import org.apache.geode.management.internal.cli.result.ModelCommandResult;
+import org.apache.geode.management.internal.cli.result.ResultBuilder;
 import org.apache.geode.management.internal.cli.result.model.ResultModel;
 import org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.ResourcePermission;
@@ -79,9 +98,14 @@ public class CreateMappingCommand extends SingleGfshCommand {
   private static final String CREATE_MAPPING__GROUPS_NAME = "groups";
   private static final String CREATE_MAPPING__GROUPS_NAME__HELP =
       "The names of the server groups on which this mapping should be 
created.";
+  private static final String CREATE_MAPPING__PDX_CLASS_FILE = 
MappingConstants.PDX_CLASS_FILE;
+  private static final String CREATE_MAPPING__PDX_CLASS_FILE__HELP =
+      "The file that contains the PDX class. It must be a file with the 
\".jar\" or \".class\" extension. By default, the PDX class must be on the 
server's classpath or gfsh deployed.";
 
   @CliCommand(value = CREATE_MAPPING, help = CREATE_MAPPING__HELP)
-  @CliMetaData(relatedTopic = CliStrings.DEFAULT_TOPIC_GEODE)
+  @CliMetaData(
+      interceptor = 
"org.apache.geode.connectors.jdbc.internal.cli.CreateMappingCommand$Interceptor",
+      relatedTopic = {CliStrings.DEFAULT_TOPIC_GEODE})
   @ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
       operation = ResourcePermission.Operation.MANAGE)
   public ResultModel createMapping(
@@ -93,6 +117,8 @@ public class CreateMappingCommand extends SingleGfshCommand {
           help = CREATE_MAPPING__TABLE_NAME__HELP) String table,
       @CliOption(key = CREATE_MAPPING__PDX_NAME, mandatory = true,
           help = CREATE_MAPPING__PDX_NAME__HELP) String pdxName,
+      @CliOption(key = CREATE_MAPPING__PDX_CLASS_FILE,
+          help = CREATE_MAPPING__PDX_CLASS_FILE__HELP) String pdxClassFile,
       @CliOption(key = CREATE_MAPPING__SYNCHRONOUS_NAME,
           help = CREATE_MAPPING__SYNCHRONOUS_NAME__HELP,
           specifiedDefaultValue = "true", unspecifiedDefaultValue = "false") 
boolean synchronous,
@@ -103,11 +129,25 @@ public class CreateMappingCommand extends 
SingleGfshCommand {
           help = CREATE_MAPPING__SCHEMA_NAME__HELP) String schema,
       @CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS},
           optionContext = ConverterHint.MEMBERGROUP,
-          help = CREATE_MAPPING__GROUPS_NAME__HELP) String[] groups) {
+          help = CREATE_MAPPING__GROUPS_NAME__HELP) String[] groups)
+      throws IOException {
     if (regionName.startsWith("/")) {
       regionName = regionName.substring(1);
     }
 
+    String tempPdxClassFilePath = null;
+    String remoteInputStreamName = null;
+    RemoteInputStream remoteInputStream = null;
+    if (pdxClassFile != null) {
+      List<String> pdxClassFilePaths = getFilePathFromShell();
+      if (pdxClassFilePaths.size() != 1) {
+        throw new IllegalStateException(
+            "Expected only one element in the list returned by 
getFilePathFromShell, but it returned: "
+                + pdxClassFilePaths);
+      }
+      tempPdxClassFilePath = pdxClassFilePaths.get(0);
+    }
+
     Set<DistributedMember> targetMembers = findMembers(groups, null);
     RegionMapping mapping =
         new RegionMapping(regionName, pdxName, table, dataSourceName, id, 
catalog, schema);
@@ -131,9 +171,30 @@ public class CreateMappingCommand extends 
SingleGfshCommand {
       return ResultModel.createError(ex.getMessage());
     }
 
-    CliFunctionResult preconditionCheckResult =
-        executeFunctionAndGetFunctionResult(new 
CreateMappingPreconditionCheckFunction(), mapping,
-            targetMembers.iterator().next());
+    if (pdxClassFile != null) {
+      ManagementAgent agent =
+          ((SystemManagementService) 
getManagementService()).getManagementAgent();
+      RemoteStreamExporter exporter = agent.getRemoteStreamExporter();
+      remoteInputStreamName = FilenameUtils.getName(tempPdxClassFilePath);
+      remoteInputStream =
+          exporter.export(createSimpleRemoteInputStream(tempPdxClassFilePath));
+    }
+
+    CliFunctionResult preconditionCheckResult = null;
+    try {
+      preconditionCheckResult =
+          executeFunctionAndGetFunctionResult(new 
CreateMappingPreconditionCheckFunction(),
+              new Object[] {mapping, remoteInputStreamName, remoteInputStream},
+              targetMembers.iterator().next());
+    } finally {
+      if (remoteInputStream != null) {
+        try {
+          remoteInputStream.close(true);
+        } catch (IOException ex) {
+          // Ignored. the stream may have already been closed.
+        }
+      }
+    }
     if (preconditionCheckResult.isSuccessful()) {
       Object[] preconditionOutput = (Object[]) 
preconditionCheckResult.getResultObject();
       String computedIds = (String) preconditionOutput[0];
@@ -160,6 +221,11 @@ public class CreateMappingCommand extends 
SingleGfshCommand {
     return result;
   }
 
+  SimpleRemoteInputStream createSimpleRemoteInputStream(String 
tempPdxClassFilePath)
+      throws FileNotFoundException {
+    return new SimpleRemoteInputStream(new 
FileInputStream(tempPdxClassFilePath));
+  }
+
   private ConfigurationPersistenceService checkForClusterConfiguration()
       throws PreconditionException {
     ConfigurationPersistenceService result = 
getConfigurationPersistenceService();
@@ -315,4 +381,42 @@ public class CreateMappingCommand extends 
SingleGfshCommand {
     writer.setClassName(JdbcWriter.class.getName());
     attributes.setCacheWriter(writer);
   }
+
+  /**
+   * Interceptor used by gfsh to intercept execution of create jdbc-mapping 
command at "shell".
+   */
+  public static class Interceptor extends AbstractCliAroundInterceptor {
+
+    @Override
+    public Result preExecution(GfshParseResult parseResult) {
+      String pdxClassFileName = (String) 
parseResult.getParamValue(CREATE_MAPPING__PDX_CLASS_FILE);
+
+      if (StringUtils.isBlank(pdxClassFileName)) {
+        return new ModelCommandResult(ResultModel.createInfo(""));
+      }
+
+      FileResult fileResult = new FileResult();
+      File pdxClassFile = new File(pdxClassFileName);
+      if (!pdxClassFile.exists()) {
+        return ResultBuilder.createUserErrorResult(pdxClassFile + " not 
found.");
+      }
+      if (!pdxClassFile.isFile()) {
+        return ResultBuilder.createUserErrorResult(pdxClassFile + " is not a 
file.");
+      }
+      String fileExtension = FilenameUtils.getExtension(pdxClassFileName);
+      if (!fileExtension.equalsIgnoreCase("jar") && 
!fileExtension.equalsIgnoreCase("class")) {
+        return ResultBuilder
+            .createUserErrorResult(pdxClassFile + " must end with \".jar\" or 
\".class\".");
+      }
+      fileResult.addFile(pdxClassFile);
+
+      return fileResult;
+    }
+  }
+
+  // For testing purpose
+  List<String> getFilePathFromShell() {
+    return CommandExecutionContext.getFilePathFromShell();
+  }
+
 }
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
index 53628da..2854cb0 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
@@ -14,9 +14,19 @@
  */
 package org.apache.geode.connectors.jdbc.internal.cli;
 
-import java.io.ObjectInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.sql.Connection;
 import java.sql.JDBCType;
 import java.sql.SQLException;
@@ -26,11 +36,16 @@ import java.util.Set;
 
 import javax.sql.DataSource;
 
+import com.healthmarketscience.rmiio.RemoteInputStream;
+import com.healthmarketscience.rmiio.RemoteInputStreamClient;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+
 import org.apache.geode.SerializationException;
 import org.apache.geode.annotations.Experimental;
 import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.connectors.jdbc.JdbcConnectorException;
-import org.apache.geode.connectors.jdbc.internal.SqlHandler.DataSourceFactory;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
@@ -49,50 +64,16 @@ import org.apache.geode.pdx.internal.PdxWriterImpl;
 import org.apache.geode.pdx.internal.TypeRegistry;
 
 @Experimental
-public class CreateMappingPreconditionCheckFunction extends 
CliFunction<RegionMapping> {
-
-  private transient DataSourceFactory dataSourceFactory;
-  private transient ClassFactory classFactory;
-  private transient ReflectionBasedAutoSerializerFactory 
reflectionBasedAutoSerializerFactory;
-  private transient PdxWriterFactory pdxWriterFactory;
-  private transient TableMetaDataManager tableMetaDataManager;
-
-  CreateMappingPreconditionCheckFunction(DataSourceFactory factory, 
ClassFactory classFactory,
-      ReflectionBasedAutoSerializerFactory 
reflectionBasedAutoSerializerFactory,
-      PdxWriterFactory pdxWriterFactory,
-      TableMetaDataManager manager) {
-    this.dataSourceFactory = factory;
-    this.classFactory = classFactory;
-    this.reflectionBasedAutoSerializerFactory = 
reflectionBasedAutoSerializerFactory;
-    this.pdxWriterFactory = pdxWriterFactory;
-    this.tableMetaDataManager = manager;
-  }
-
-  CreateMappingPreconditionCheckFunction() {
-    this(dataSourceName -> JNDIInvoker.getDataSource(dataSourceName),
-        className -> ClassPathLoader.getLatest().forName(className),
-        className -> new ReflectionBasedAutoSerializer(className),
-        (typeRegistry, object) -> new PdxWriterImpl(typeRegistry, object, new 
PdxOutputStream()),
-        new TableMetaDataManager());
-  }
-
-  // used by java during deserialization
-  private void readObject(ObjectInputStream stream) {
-    this.dataSourceFactory = dataSourceName -> 
JNDIInvoker.getDataSource(dataSourceName);
-    this.classFactory = className -> 
ClassPathLoader.getLatest().forName(className);
-    this.reflectionBasedAutoSerializerFactory =
-        className -> new ReflectionBasedAutoSerializer(className);
-    this.pdxWriterFactory =
-        (typeRegistry, object) -> new PdxWriterImpl(typeRegistry, object, new 
PdxOutputStream());
-    this.tableMetaDataManager = new TableMetaDataManager();
-  }
+public class CreateMappingPreconditionCheckFunction extends 
CliFunction<Object[]> {
 
   @Override
-  public CliFunctionResult executeFunction(FunctionContext<RegionMapping> 
context)
-      throws Exception {
-    RegionMapping regionMapping = context.getArguments();
+  public CliFunctionResult executeFunction(FunctionContext<Object[]> context) {
+    Object[] args = context.getArguments();
+    RegionMapping regionMapping = (RegionMapping) args[0];
+    String remoteInputStreamName = (String) args[1];
+    RemoteInputStream remoteInputStream = (RemoteInputStream) args[2];
     String dataSourceName = regionMapping.getDataSourceName();
-    DataSource dataSource = dataSourceFactory.getDataSource(dataSourceName);
+    DataSource dataSource = getDataSource(dataSourceName);
     if (dataSource == null) {
       throw new JdbcConnectorException("JDBC data-source named \"" + 
dataSourceName
           + "\" not found. Create it with gfsh 'create data-source --pooled 
--name="
@@ -100,10 +81,11 @@ public class CreateMappingPreconditionCheckFunction 
extends CliFunction<RegionMa
     }
     InternalCache cache = (InternalCache) context.getCache();
     TypeRegistry typeRegistry = cache.getPdxRegistry();
-    PdxType pdxType = getPdxTypeForClass(cache, typeRegistry, 
regionMapping.getPdxName());
+    PdxType pdxType = getPdxTypeForClass(cache, typeRegistry, 
regionMapping.getPdxName(),
+        remoteInputStreamName, remoteInputStream);
     try (Connection connection = dataSource.getConnection()) {
       TableMetaDataView tableMetaData =
-          tableMetaDataManager.getTableMetaDataView(connection, regionMapping);
+          getTableMetaDataManager().getTableMetaDataView(connection, 
regionMapping);
       // TODO the table name returned in tableMetaData may be different than
       // the table name specified on the command line at this point.
       // Do we want to update the region mapping to hold the "real" table name
@@ -168,8 +150,8 @@ public class CreateMappingPreconditionCheckFunction extends 
CliFunction<RegionMa
   }
 
   private PdxType getPdxTypeForClass(InternalCache cache, TypeRegistry 
typeRegistry,
-      String className) {
-    Class<?> clazz = loadPdxClass(className);
+      String className, String remoteInputStreamName, RemoteInputStream 
remoteInputStream) {
+    Class<?> clazz = loadPdxClass(className, remoteInputStreamName, 
remoteInputStream);
     PdxType result = typeRegistry.getExistingTypeForClass(clazz);
     if (result != null) {
       return result;
@@ -194,8 +176,8 @@ public class CreateMappingPreconditionCheckFunction extends 
CliFunction<RegionMa
     } catch (SerializationException ex) {
       String className = clazz.getName();
       ReflectionBasedAutoSerializer serializer =
-          this.reflectionBasedAutoSerializerFactory.create("\\Q" + className + 
"\\E");
-      PdxWriter writer = this.pdxWriterFactory.create(typeRegistry, object);
+          getReflectionBasedAutoSerializer("\\Q" + className + "\\E");
+      PdxWriter writer = createPdxWriter(typeRegistry, object);
       boolean result = serializer.toData(object, writer);
       if (!result) {
         throw new JdbcConnectorException(
@@ -220,22 +202,132 @@ public class CreateMappingPreconditionCheckFunction 
extends CliFunction<RegionMa
     }
   }
 
-  private Class<?> loadPdxClass(String className) {
+  private Class<?> loadPdxClass(String className, String remoteInputStreamName,
+      RemoteInputStream remoteInputStream) {
     try {
-      return this.classFactory.loadClass(className);
+      if (remoteInputStream != null) {
+        return loadPdxClassFromRemoteStream(className, remoteInputStreamName, 
remoteInputStream);
+      } else {
+        return loadClass(className);
+      }
     } catch (ClassNotFoundException ex) {
       throw new JdbcConnectorException(
           "The pdx class \"" + className + "\" could not be loaded because: " 
+ ex);
     }
   }
 
-  public interface ClassFactory {
-    public Class loadClass(String className) throws ClassNotFoundException;
+  private Class<?> loadPdxClassFromRemoteStream(String className, String 
remoteInputStreamName,
+      RemoteInputStream remoteInputStream) throws ClassNotFoundException {
+    Path tempDir = createTemporaryDirectory("pdx-class-dir-");
+    try {
+      File file =
+          copyRemoteInputStreamToTempFile(className, remoteInputStreamName, 
remoteInputStream,
+              tempDir);
+      return loadClass(className, createURL(file, tempDir));
+    } finally {
+      deleteDirectory(tempDir);
+    }
+  }
+
+  Path createTemporaryDirectory(String prefix) {
+    try {
+      return createTempDirectory(prefix);
+    } catch (IOException ex) {
+      throw new JdbcConnectorException(
+          "Could not create a temporary directory with the prefix \"" + prefix 
+ "\" because: "
+              + ex);
+    }
+
+  }
+
+  void deleteDirectory(Path tempDir) {
+    try {
+      FileUtils.deleteDirectory(tempDir.toFile());
+    } catch (IOException ioe) {
+      // ignore
+    }
+  }
+
+  private URL createURL(File file, Path tempDir) {
+    URI uri;
+    if (isJar(file.getName())) {
+      uri = file.toURI();
+    } else {
+      uri = tempDir.toUri();
+    }
+    try {
+      return uri.toURL();
+    } catch (MalformedURLException e) {
+      throw new JdbcConnectorException(
+          "Could not convert \"" + uri + "\" to a URL, because: " + e);
+    }
+  }
+
+  private boolean isJar(String fileName) {
+    String fileExtension = FilenameUtils.getExtension(fileName);
+    return fileExtension.equalsIgnoreCase("jar");
   }
-  public interface ReflectionBasedAutoSerializerFactory {
-    public ReflectionBasedAutoSerializer create(String className);
+
+  private File copyRemoteInputStreamToTempFile(String className, String 
remoteInputStreamName,
+      RemoteInputStream remoteInputStream, Path tempDir) {
+    if (!isJar(remoteInputStreamName) && className.contains(".")) {
+      File packageDir = new File(tempDir.toFile(), className.replace(".", 
"/")).getParentFile();
+      packageDir.mkdirs();
+      tempDir = packageDir.toPath();
+    }
+    try {
+      Path tempPdxClassFile = Paths.get(tempDir.toString(), 
remoteInputStreamName);
+      try (InputStream input = RemoteInputStreamClient.wrap(remoteInputStream);
+          FileOutputStream output = new 
FileOutputStream(tempPdxClassFile.toString())) {
+        copyFile(input, output);
+      }
+      return tempPdxClassFile.toFile();
+    } catch (IOException iox) {
+      throw new JdbcConnectorException(
+          "The pdx class file \"" + remoteInputStreamName
+              + "\" could not be copied to a temporary file, because: " + iox);
+    }
   }
-  public interface PdxWriterFactory {
-    public PdxWriter create(TypeRegistry typeRegistry, Object object);
+
+
+  // unit test mocks this method
+  DataSource getDataSource(String dataSourceName) {
+    return JNDIInvoker.getDataSource(dataSourceName);
+  }
+
+  // unit test mocks this method
+  Class<?> loadClass(String className) throws ClassNotFoundException {
+    return ClassPathLoader.getLatest().forName(className);
+  }
+
+  // unit test mocks this method
+  Class<?> loadClass(String className, URL url) throws ClassNotFoundException {
+    return URLClassLoader.newInstance(new URL[] {url}).loadClass(className);
   }
+
+  // unit test mocks this method
+  ReflectionBasedAutoSerializer getReflectionBasedAutoSerializer(String 
className) {
+    return new ReflectionBasedAutoSerializer(className);
+  }
+
+  // unit test mocks this method
+  PdxWriter createPdxWriter(TypeRegistry typeRegistry, Object object) {
+    return new PdxWriterImpl(typeRegistry, object, new PdxOutputStream());
+  }
+
+  // unit test mocks this method
+  TableMetaDataManager getTableMetaDataManager() {
+    return new TableMetaDataManager();
+  }
+
+  // unit test mocks this method
+  Path createTempDirectory(String prefix) throws IOException {
+    return Files.createTempDirectory(prefix);
+  }
+
+  // unit test mocks this method
+  void copyFile(InputStream input, FileOutputStream output) throws IOException 
{
+    IOUtils.copyLarge(input, output);
+  }
+
 }
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/util/internal/MappingConstants.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/util/internal/MappingConstants.java
index aec52c1..b22f99e 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/util/internal/MappingConstants.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/util/internal/MappingConstants.java
@@ -24,6 +24,7 @@ public final class MappingConstants {
   public static final String SCHEMA_NAME = "schema";
   public static final String CATALOG_NAME = "catalog";
   public static final String GROUP_NAME = "groups";
+  public static final String PDX_CLASS_FILE = "pdx-class-file";
 
   private MappingConstants() {}
 }
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandInterceptorTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandInterceptorTest.java
new file mode 100644
index 0000000..d85348b
--- /dev/null
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandInterceptorTest.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+package org.apache.geode.connectors.jdbc.internal.cli;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import org.apache.geode.connectors.util.internal.MappingConstants;
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.management.internal.cli.result.FileResult;
+
+public class CreateMappingCommandInterceptorTest {
+
+  private final CreateMappingCommand.Interceptor interceptor =
+      new CreateMappingCommand.Interceptor();
+
+  private GfshParseResult gfshParseResult = mock(GfshParseResult.class);
+
+  @Rule
+  public TemporaryFolder testFolder = new TemporaryFolder();
+
+  @Test
+  public void preExecutionGivenNullPdxClassFileReturnsOK() {
+    
when(gfshParseResult.getParamValue(MappingConstants.PDX_CLASS_FILE)).thenReturn(null);
+    Result result = interceptor.preExecution(gfshParseResult);
+    assertThat(result.getStatus()).isEqualTo(Result.Status.OK);
+  }
+
+  @Test
+  public void preExecutionGivenNonExistingPdxClassFileReturnsError() {
+    when(gfshParseResult.getParamValue(MappingConstants.PDX_CLASS_FILE))
+        .thenReturn("NonExistingFile");
+    Result result = interceptor.preExecution(gfshParseResult);
+    assertThat(result.getStatus()).isEqualTo(Result.Status.ERROR);
+    assertThat(result.nextLine()).contains("NonExistingFile not found.");
+  }
+
+  @Test
+  public void preExecutionGivenDirectoryAsPdxClassFileReturnsError() throws 
IOException {
+    File tempFolder = testFolder.newFolder("tempFolder");
+    when(gfshParseResult.getParamValue(MappingConstants.PDX_CLASS_FILE))
+        .thenReturn(tempFolder.getAbsolutePath());
+    Result result = interceptor.preExecution(gfshParseResult);
+    assertThat(result.getStatus()).isEqualTo(Result.Status.ERROR);
+    assertThat(result.nextLine()).contains(tempFolder.getAbsolutePath() + " is 
not a file.");
+  }
+
+  @Test
+  public void 
preExecutionGivenFileWithoutExtensionAsPdxClassFileReturnsError() throws 
IOException {
+    File tempFile = testFolder.newFile("tempFile");
+    when(gfshParseResult.getParamValue(MappingConstants.PDX_CLASS_FILE))
+        .thenReturn(tempFile.getAbsolutePath());
+    Result result = interceptor.preExecution(gfshParseResult);
+    assertThat(result.getStatus()).isEqualTo(Result.Status.ERROR);
+    assertThat(result.nextLine())
+        .contains(tempFile.getAbsolutePath() + " must end with \".jar\" or 
\".class\".");
+  }
+
+  @Test
+  public void preExecutionGivenClassFileAsPdxClassFileReturnsOK() throws 
IOException {
+    File tempFile = testFolder.newFile("tempFile.class");
+    when(gfshParseResult.getParamValue(MappingConstants.PDX_CLASS_FILE))
+        .thenReturn(tempFile.getAbsolutePath());
+    Result result = interceptor.preExecution(gfshParseResult);
+    assertThat(result.getStatus()).isEqualTo(Result.Status.OK);
+    assertThat(result).isInstanceOf(FileResult.class);
+    FileResult fileResult = (FileResult) result;
+    assertThat(fileResult.getFiles()).containsExactly(tempFile);
+  }
+
+  @Test
+  public void preExecutionGivenJarFileAsPdxClassFileReturnsOK() throws 
IOException {
+    File tempFile = testFolder.newFile("tempFile.jar");
+    when(gfshParseResult.getParamValue(MappingConstants.PDX_CLASS_FILE))
+        .thenReturn(tempFile.getAbsolutePath());
+    Result result = interceptor.preExecution(gfshParseResult);
+    assertThat(result.getStatus()).isEqualTo(Result.Status.OK);
+    assertThat(result).isInstanceOf(FileResult.class);
+    FileResult fileResult = (FileResult) result;
+    assertThat(fileResult.getFiles()).containsExactly(tempFile);
+  }
+
+}
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
index 3ce3fa2..9c1264b 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandTest.java
@@ -15,6 +15,7 @@
 package org.apache.geode.connectors.jdbc.internal.cli;
 
 import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
@@ -23,12 +24,18 @@ import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import com.healthmarketscience.rmiio.RemoteInputStream;
+import com.healthmarketscience.rmiio.SimpleRemoteInputStream;
+import com.healthmarketscience.rmiio.exporter.RemoteStreamExporter;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.ArgumentCaptor;
@@ -49,6 +56,8 @@ import 
org.apache.geode.distributed.internal.DistributionManager;
 import 
org.apache.geode.distributed.internal.membership.InternalDistributedMember;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.ManagementAgent;
+import org.apache.geode.management.internal.SystemManagementService;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
 import org.apache.geode.management.internal.cli.result.model.ResultModel;
 
@@ -61,6 +70,7 @@ public class CreateMappingCommandTest {
   private String dataSourceName;
   private String tableName;
   private String pdxClass;
+  private String pdxClassFile;
   private String group1Name;
   private String group2Name;
   private Set<InternalDistributedMember> members;
@@ -82,6 +92,7 @@ public class CreateMappingCommandTest {
     dataSourceName = "connection";
     tableName = "testTable";
     pdxClass = "myPdxClass";
+    pdxClassFile = null;
     group1Name = "group1";
     group2Name = "group2";
     cache = mock(InternalCache.class);
@@ -143,7 +154,7 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void createsMappingReturnsStatusOKWhenFunctionResultSuccess() {
+  public void createsMappingReturnsStatusOKWhenFunctionResultSuccess() throws 
IOException {
     setupRequiredPreconditions();
     results.add(successFunctionResult);
     String ids = "ids";
@@ -151,7 +162,7 @@ public class CreateMappingCommandTest {
     String schema = "schema";
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, ids, catalog, schema, null);
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.OK);
     Object[] results = (Object[]) result.getConfigObject();
@@ -169,7 +180,8 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void 
createsMappingReturnsStatusOKWhenFunctionResultSuccessWithGroups() {
+  public void 
createsMappingReturnsStatusOKWhenFunctionResultSuccessWithGroups()
+      throws IOException {
     setupRequiredPreconditionsForGroup();
     results.add(successFunctionResult);
     String ids = "ids";
@@ -178,7 +190,7 @@ public class CreateMappingCommandTest {
     String[] groups = {group1Name, group2Name};
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, ids, catalog, schema, groups);
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, 
groups);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.OK);
     Object[] results = (Object[]) result.getConfigObject();
@@ -197,7 +209,7 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void createsMappingReturnsCorrectFieldMappings() {
+  public void createsMappingReturnsCorrectFieldMappings() throws IOException {
     setupRequiredPreconditions();
     results.add(successFunctionResult);
     String ids = "ids";
@@ -207,7 +219,7 @@ public class CreateMappingCommandTest {
     this.fieldMappings.add(new FieldMapping("pdx2", "pdx2type", "jdbc2", 
"jdbc2type", false));
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, ids, catalog, schema, null);
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.OK);
     Object[] results = (Object[]) result.getConfigObject();
@@ -216,7 +228,69 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void createsMappingReturnsRegionMappingWithComputedIds() {
+  public void createsMappingWithPdxClassFileReturnsCorrectFieldMappings() 
throws IOException {
+    RemoteInputStream remoteInputStream = setupPdxClassFile();
+    setupRequiredPreconditions();
+    results.add(successFunctionResult);
+    String ids = "ids";
+    String catalog = "catalog";
+    String schema = "schema";
+    this.fieldMappings.add(new FieldMapping("pdx1", "pdx1type", "jdbc1", 
"jdbc1type", false));
+    this.fieldMappings.add(new FieldMapping("pdx2", "pdx2type", "jdbc2", 
"jdbc2type", false));
+
+    ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, null);
+
+    assertThat(result.getStatus()).isSameAs(Result.Status.OK);
+    Object[] results = (Object[]) result.getConfigObject();
+    RegionMapping regionMapping = (RegionMapping) results[0];
+    assertThat(regionMapping.getFieldMappings()).isEqualTo(this.fieldMappings);
+    ArgumentCaptor<Object[]> argumentCaptor = 
ArgumentCaptor.forClass(Object[].class);
+    
verify(createRegionMappingCommand).executeFunctionAndGetFunctionResult(any(),
+        argumentCaptor.capture(), any());
+    Object[] args = argumentCaptor.getValue();
+    assertThat(args).hasSize(3);
+    assertThat(args[0]).isEqualTo(regionMapping);
+    assertThat(args[1]).isEqualTo("myPdxClassFilePath");
+    assertThat(args[2]).isSameAs(remoteInputStream);
+  }
+
+  @Test
+  public void 
createsMappingWithPdxClassFileAndFilePathFromShellIsEmptyListThrowsIllegalStateException()
+      throws IOException {
+    setupPdxClassFile();
+    
doReturn(Collections.emptyList()).when(createRegionMappingCommand).getFilePathFromShell();
+    setupRequiredPreconditions();
+    String ids = "ids";
+    String catalog = "catalog";
+    String schema = "schema";
+
+    assertThatThrownBy(() -> 
createRegionMappingCommand.createMapping(regionName, dataSourceName,
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, null))
+            .isInstanceOf(IllegalStateException.class);
+  }
+
+  private RemoteInputStream setupPdxClassFile() throws FileNotFoundException, 
RemoteException {
+    pdxClassFile = "myPdxClassFile";
+    String tempPdxClassFilePath = "myPdxClassFilePath";
+    List<String> list = Collections.singletonList(tempPdxClassFilePath);
+    doReturn(list).when(createRegionMappingCommand).getFilePathFromShell();
+    SystemManagementService systemManagementService = 
mock(SystemManagementService.class);
+    
doReturn(systemManagementService).when(createRegionMappingCommand).getManagementService();
+    ManagementAgent managementAgent = mock(ManagementAgent.class);
+    
when(systemManagementService.getManagementAgent()).thenReturn(managementAgent);
+    RemoteStreamExporter remoteStreamExporter = 
mock(RemoteStreamExporter.class);
+    
when(managementAgent.getRemoteStreamExporter()).thenReturn(remoteStreamExporter);
+    SimpleRemoteInputStream simpleRemoteInputStream = 
mock(SimpleRemoteInputStream.class);
+    doReturn(simpleRemoteInputStream).when(createRegionMappingCommand)
+        .createSimpleRemoteInputStream(tempPdxClassFilePath);
+    RemoteInputStream remoteInputStream = mock(RemoteInputStream.class);
+    
when(remoteStreamExporter.export(simpleRemoteInputStream)).thenReturn(remoteInputStream);
+    return remoteInputStream;
+  }
+
+  @Test
+  public void createsMappingReturnsRegionMappingWithComputedIds() throws 
IOException {
     setupRequiredPreconditions();
     results.add(successFunctionResult);
     String ids = "does not matter";
@@ -226,7 +300,7 @@ public class CreateMappingCommandTest {
     preconditionOutput[0] = computedIds;
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, ids, catalog, schema, null);
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.OK);
     Object[] results = (Object[]) result.getConfigObject();
@@ -235,7 +309,7 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void createsMappingReturnsErrorIfPreconditionCheckErrors() {
+  public void createsMappingReturnsErrorIfPreconditionCheckErrors() throws 
IOException {
     setupRequiredPreconditions();
     results.add(successFunctionResult);
     String ids = "ids";
@@ -245,19 +319,19 @@ public class CreateMappingCommandTest {
     when(preconditionCheckResults.getStatusMessage()).thenReturn("precondition 
check failed");
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, ids, catalog, schema, null);
+        tableName, pdxClass, pdxClassFile, false, ids, catalog, schema, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString()).contains("precondition check failed");
   }
 
   @Test
-  public void createsMappingWithRegionPathCreatesMappingWithSlashRemoved() {
+  public void createsMappingWithRegionPathCreatesMappingWithSlashRemoved() 
throws IOException {
     setupRequiredPreconditions();
     results.add(successFunctionResult);
 
     ResultModel result = createRegionMappingCommand.createMapping("/" + 
regionName, dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.OK);
     Object[] results = (Object[]) result.getConfigObject();
@@ -267,30 +341,31 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void createsMappingReturnsStatusERRORWhenFunctionResultIsEmpty() {
+  public void createsMappingReturnsStatusERRORWhenFunctionResultIsEmpty() 
throws IOException {
     setupRequiredPreconditions();
     results.clear();
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
   }
 
   @Test
-  public void createsMappingReturnsStatusERRORWhenClusterConfigIsDisabled() {
+  public void createsMappingReturnsStatusERRORWhenClusterConfigIsDisabled() 
throws IOException {
     results.add(successFunctionResult);
     
doReturn(null).when(createRegionMappingCommand).getConfigurationPersistenceService();
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString()).contains("Cluster Configuration must be 
enabled.");
   }
 
   @Test
-  public void 
createsMappingReturnsStatusERRORWhenClusterConfigDoesNotContainRegion() {
+  public void 
createsMappingReturnsStatusERRORWhenClusterConfigDoesNotContainRegion()
+      throws IOException {
     results.add(successFunctionResult);
     ConfigurationPersistenceService configurationPersistenceService =
         mock(ConfigurationPersistenceService.class);
@@ -301,7 +376,7 @@ public class CreateMappingCommandTest {
     when(cacheConfig.getRegions()).thenReturn(Collections.emptyList());
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString())
@@ -309,7 +384,7 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void createsMappingReturnsStatusERRORWhenRegionMappingExists() {
+  public void createsMappingReturnsStatusERRORWhenRegionMappingExists() throws 
IOException {
     results.add(successFunctionResult);
     ConfigurationPersistenceService configurationPersistenceService =
         mock(ConfigurationPersistenceService.class);
@@ -331,14 +406,15 @@ public class CreateMappingCommandTest {
     when(matchingRegion.getCustomRegionElements()).thenReturn(customList);
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString()).contains("A JDBC mapping for " + regionName 
+ " already exists.");
   }
 
   @Test
-  public void 
createsMappingReturnsStatusERRORWhenClusterConfigRegionHasLoader() {
+  public void 
createsMappingReturnsStatusERRORWhenClusterConfigRegionHasLoader()
+      throws IOException {
     results.add(successFunctionResult);
     ConfigurationPersistenceService configurationPersistenceService =
         mock(ConfigurationPersistenceService.class);
@@ -356,7 +432,7 @@ public class CreateMappingCommandTest {
     when(matchingRegion.getRegionAttributes()).thenReturn(loaderAttribute);
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString()).contains("The existing region " + regionName
@@ -364,7 +440,8 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void 
createMappingWithSynchronousReturnsStatusERRORWhenClusterConfigRegionHasWriter()
 {
+  public void 
createMappingWithSynchronousReturnsStatusERRORWhenClusterConfigRegionHasWriter()
+      throws IOException {
     results.add(successFunctionResult);
     ConfigurationPersistenceService configurationPersistenceService =
         mock(ConfigurationPersistenceService.class);
@@ -382,7 +459,7 @@ public class CreateMappingCommandTest {
     when(matchingRegion.getRegionAttributes()).thenReturn(writerAttribute);
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, true, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, true, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString()).contains("The existing region " + regionName
@@ -390,7 +467,8 @@ public class CreateMappingCommandTest {
   }
 
   @Test
-  public void 
createMappingWithSynchronousReturnsStatusOKWhenAsycnEventQueueAlreadyExists() {
+  public void 
createMappingWithSynchronousReturnsStatusOKWhenAsycnEventQueueAlreadyExists()
+      throws IOException {
     results.add(successFunctionResult);
     ConfigurationPersistenceService configurationPersistenceService =
         mock(ConfigurationPersistenceService.class);
@@ -412,14 +490,15 @@ public class CreateMappingCommandTest {
     when(cacheConfig.getAsyncEventQueues()).thenReturn(asyncEventQueues);
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, true, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, true, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.OK);
   }
 
 
   @Test
-  public void 
createsMappingReturnsStatusERRORWhenAsycnEventQueueAlreadyExists() {
+  public void 
createsMappingReturnsStatusERRORWhenAsycnEventQueueAlreadyExists()
+      throws IOException {
     results.add(successFunctionResult);
     ConfigurationPersistenceService configurationPersistenceService =
         mock(ConfigurationPersistenceService.class);
@@ -441,7 +520,7 @@ public class CreateMappingCommandTest {
     when(cacheConfig.getAsyncEventQueues()).thenReturn(asyncEventQueues);
 
     ResultModel result = createRegionMappingCommand.createMapping(regionName, 
dataSourceName,
-        tableName, pdxClass, false, null, null, null, null);
+        tableName, pdxClass, pdxClassFile, false, null, null, null, null);
 
     assertThat(result.getStatus()).isSameAs(Result.Status.ERROR);
     assertThat(result.toString())
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
index 593ca5a..1b5d14c 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
@@ -17,12 +17,17 @@ package org.apache.geode.connectors.jdbc.internal.cli;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.catchThrowable;
 import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.eq;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.same;
+import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
+import java.io.IOException;
 import java.io.Serializable;
 import java.sql.Connection;
 import java.sql.JDBCType;
@@ -36,6 +41,7 @@ import java.util.Set;
 
 import javax.sql.DataSource;
 
+import com.healthmarketscience.rmiio.RemoteInputStream;
 import org.apache.commons.lang3.SerializationUtils;
 import org.junit.Before;
 import org.junit.Test;
@@ -44,12 +50,8 @@ import org.apache.geode.SerializationException;
 import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.cache.execute.ResultSender;
 import org.apache.geode.connectors.jdbc.JdbcConnectorException;
-import org.apache.geode.connectors.jdbc.internal.SqlHandler.DataSourceFactory;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
-import 
org.apache.geode.connectors.jdbc.internal.cli.CreateMappingPreconditionCheckFunction.ClassFactory;
-import 
org.apache.geode.connectors.jdbc.internal.cli.CreateMappingPreconditionCheckFunction.PdxWriterFactory;
-import 
org.apache.geode.connectors.jdbc.internal.cli.CreateMappingPreconditionCheckFunction.ReflectionBasedAutoSerializerFactory;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
@@ -69,21 +71,21 @@ public class CreateMappingPreconditionCheckFunctionTest {
   private static final String MEMBER_NAME = "testMemberName";
 
   private RegionMapping regionMapping;
-  private FunctionContext<RegionMapping> context;
+  private FunctionContext<Object[]> context;
   private ResultSender<Object> resultSender;
   private InternalCache cache;
   private TypeRegistry typeRegistry;
-  private DataSourceFactory dataSourceFactory;
-  private ClassFactory classFactory;
   private TableMetaDataManager tableMetaDataManager;
   private TableMetaDataView tableMetaDataView;
   private DataSource dataSource;
   private PdxType pdxType = mock(PdxType.class);
+  private String remoteInputStreamName;
+  private RemoteInputStream remoteInputStream;
+  private final Object[] inputArgs = new Object[3];
 
   private CreateMappingPreconditionCheckFunction function;
 
   public static class PdxClassDummy {
-
   }
 
   public static class PdxClassDummyNoZeroArg {
@@ -98,6 +100,9 @@ public class CreateMappingPreconditionCheckFunctionTest {
     typeRegistry = mock(TypeRegistry.class);
     when(cache.getPdxRegistry()).thenReturn(typeRegistry);
     regionMapping = mock(RegionMapping.class);
+    remoteInputStreamName = null;
+    remoteInputStream = null;
+    setupInputArgs();
 
     when(regionMapping.getRegionName()).thenReturn(REGION_NAME);
     when(regionMapping.getPdxName()).thenReturn(PDX_CLASS_NAME);
@@ -105,24 +110,33 @@ public class CreateMappingPreconditionCheckFunctionTest {
 
     when(context.getResultSender()).thenReturn(resultSender);
     when(context.getCache()).thenReturn(cache);
-    when(context.getArguments()).thenReturn(regionMapping);
+    when(context.getArguments()).thenReturn(inputArgs);
     when(context.getMemberName()).thenReturn(MEMBER_NAME);
 
-    dataSourceFactory = mock(DataSourceFactory.class);
     dataSource = mock(DataSource.class);
     Connection connection = mock(Connection.class);
     when(dataSource.getConnection()).thenReturn(connection);
-    
when(dataSourceFactory.getDataSource(DATA_SOURCE_NAME)).thenReturn(dataSource);
-    classFactory = mock(ClassFactory.class);
-    
when(classFactory.loadClass(PDX_CLASS_NAME)).thenReturn(PdxClassDummy.class);
     
when(typeRegistry.getExistingTypeForClass(PdxClassDummy.class)).thenReturn(pdxType);
     tableMetaDataManager = mock(TableMetaDataManager.class);
     tableMetaDataView = mock(TableMetaDataView.class);
     when(tableMetaDataManager.getTableMetaDataView(connection, regionMapping))
         .thenReturn(tableMetaDataView);
-    function =
-        new CreateMappingPreconditionCheckFunction(dataSourceFactory, 
classFactory, null, null,
-            tableMetaDataManager);
+    setupFunction();
+  }
+
+  private void setupInputArgs() {
+    inputArgs[0] = regionMapping;
+    inputArgs[1] = remoteInputStreamName;
+    inputArgs[2] = remoteInputStream;
+  }
+
+  private void setupFunction() throws ClassNotFoundException {
+    function = spy(CreateMappingPreconditionCheckFunction.class);
+    doReturn(dataSource).when(function).getDataSource(DATA_SOURCE_NAME);
+    doReturn(PdxClassDummy.class).when(function).loadClass(PDX_CLASS_NAME);
+    doReturn(null).when(function).getReflectionBasedAutoSerializer(any());
+    doReturn(null).when(function).createPdxWriter(same(typeRegistry), any());
+    doReturn(tableMetaDataManager).when(function).getTableMetaDataManager();
   }
 
   @Test
@@ -137,7 +151,7 @@ public class CreateMappingPreconditionCheckFunctionTest {
 
   @Test
   public void serializes() {
-    Serializable original = function;
+    Serializable original = new CreateMappingPreconditionCheckFunction();
 
     Object copy = SerializationUtils.clone(original);
 
@@ -147,7 +161,7 @@ public class CreateMappingPreconditionCheckFunctionTest {
 
   @Test
   public void executeFunctionThrowsIfDataSourceDoesNotExist() throws Exception 
{
-    when(dataSourceFactory.getDataSource(DATA_SOURCE_NAME)).thenReturn(null);
+    doReturn(null).when(function).getDataSource(DATA_SOURCE_NAME);
 
     Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
 
@@ -170,7 +184,7 @@ public class CreateMappingPreconditionCheckFunctionTest {
   @Test
   public void executeFunctionThrowsIfClassNotFound() throws 
ClassNotFoundException {
     ClassNotFoundException ex = new ClassNotFoundException("class not found");
-    when(classFactory.loadClass(PDX_CLASS_NAME)).thenThrow(ex);
+    doThrow(ex).when(function).loadClass(PDX_CLASS_NAME);
 
     Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
 
@@ -287,7 +301,7 @@ public class CreateMappingPreconditionCheckFunctionTest {
     when(pdxField1.getFieldType()).thenReturn(FieldType.LONG);
     when(pdxType.getFieldCount()).thenReturn(1);
     when(pdxType.getFields()).thenReturn(Arrays.asList(pdxField1));
-    
when(classFactory.loadClass(PDX_CLASS_NAME)).thenReturn(PdxClassDummyNoZeroArg.class);
+    
doReturn(PdxClassDummyNoZeroArg.class).when(function).loadClass(PDX_CLASS_NAME);
     
when(typeRegistry.getExistingTypeForClass(PdxClassDummyNoZeroArg.class)).thenReturn(null);
 
     Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
@@ -316,22 +330,16 @@ public class CreateMappingPreconditionCheckFunctionTest {
         mock(ReflectionBasedAutoSerializer.class);
     PdxWriter pdxWriter = mock(PdxWriter.class);
     when(reflectionedBasedAutoSerializer.toData(any(), 
same(pdxWriter))).thenReturn(true);
-    ReflectionBasedAutoSerializerFactory reflectionBasedAutoSerializerFactory =
-        mock(ReflectionBasedAutoSerializerFactory.class);
-    
when(reflectionBasedAutoSerializerFactory.create(domainClassNameInAutoSerializer))
-        .thenReturn(reflectionedBasedAutoSerializer);
-    PdxWriterFactory pdxWriterFactory = mock(PdxWriterFactory.class);
-    when(pdxWriterFactory.create(same(typeRegistry), 
any())).thenReturn(pdxWriter);
-    function = new CreateMappingPreconditionCheckFunction(dataSourceFactory, 
classFactory,
-        reflectionBasedAutoSerializerFactory, pdxWriterFactory,
-        tableMetaDataManager);
+    doReturn(reflectionedBasedAutoSerializer).when(function)
+        .getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
+    doReturn(pdxWriter).when(function).createPdxWriter(same(typeRegistry), 
any());
     SerializationException ex = new SerializationException("test");
     doThrow(ex).when(cache).registerPdxMetaData(any());
 
     CliFunctionResult result = function.executeFunction(context);
 
     assertThat(result.isSuccessful()).isTrue();
-    
verify(reflectionBasedAutoSerializerFactory).create(domainClassNameInAutoSerializer);
+    
verify(function).getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
     Object[] outputs = (Object[]) result.getResultObject();
     ArrayList<FieldMapping> fieldsMappings = (ArrayList<FieldMapping>) 
outputs[1];
     assertThat(fieldsMappings).hasSize(1);
@@ -340,7 +348,6 @@ public class CreateMappingPreconditionCheckFunctionTest {
             new FieldMapping("COL1", FieldType.LONG.name(), "col1", 
JDBCType.DATE.name(), false));
   }
 
-
   @Test
   public void 
executeFunctionThrowsGivenPdxRegistrationFailsAndReflectionBasedAutoSerializerThatReturnsFalse()
       throws Exception {
@@ -360,18 +367,13 @@ public class CreateMappingPreconditionCheckFunctionTest {
         mock(ReflectionBasedAutoSerializer.class);
     PdxWriter pdxWriter = mock(PdxWriter.class);
     when(reflectionedBasedAutoSerializer.toData(any(), 
same(pdxWriter))).thenReturn(false);
-    ReflectionBasedAutoSerializerFactory reflectionBasedAutoSerializerFactory =
-        mock(ReflectionBasedAutoSerializerFactory.class);
-    
when(reflectionBasedAutoSerializerFactory.create(domainClassNameInAutoSerializer))
-        .thenReturn(reflectionedBasedAutoSerializer);
-    PdxWriterFactory pdxWriterFactory = mock(PdxWriterFactory.class);
-    when(pdxWriterFactory.create(same(typeRegistry), 
any())).thenReturn(pdxWriter);
+    doReturn(reflectionedBasedAutoSerializer).when(function)
+        .getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
     SerializationException ex = new SerializationException("test");
     doThrow(ex).when(cache).registerPdxMetaData(any());
-
-    function = new CreateMappingPreconditionCheckFunction(dataSourceFactory, 
classFactory,
-        reflectionBasedAutoSerializerFactory, pdxWriterFactory,
-        tableMetaDataManager);
+    doReturn(reflectionedBasedAutoSerializer).when(function)
+        .getReflectionBasedAutoSerializer(PdxClassDummy.class.getName());
+    doReturn(pdxWriter).when(function).createPdxWriter(same(typeRegistry), 
any());
 
     Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
 
@@ -486,4 +488,93 @@ public class CreateMappingPreconditionCheckFunctionTest {
     Object[] outputs = (Object[]) result.getResultObject();
     assertThat(outputs[0]).isEqualTo("keyCol1");
   }
+
+  @Test
+  public void 
executeFunctionThrowsGivenRemoteInputStreamAndLoadClassThatThrowsClassNotFound()
+      throws ClassNotFoundException {
+    remoteInputStreamName = "remoteInputStreamName";
+    remoteInputStream = mock(RemoteInputStream.class);
+    setupInputArgs();
+    
doThrow(ClassNotFoundException.class).when(function).loadClass(eq(PDX_CLASS_NAME),
 any());
+
+    Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessageContaining(
+            "The pdx class \"" + PDX_CLASS_NAME + "\" could not be loaded 
because: ")
+        .hasMessageContaining("ClassNotFoundException");
+    verify(function).createTemporaryDirectory(any());
+    verify(function).deleteDirectory(any());
+  }
+
+  @Test
+  public void 
executeFunctionThrowsGivenRemoteInputStreamAndcreateTempDirectoryException()
+      throws IOException {
+    remoteInputStreamName = "remoteInputStreamName";
+    remoteInputStream = mock(RemoteInputStream.class);
+    setupInputArgs();
+    doThrow(IOException.class).when(function).createTempDirectory(any());
+
+    Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessageContaining(
+            "Could not create a temporary directory with the prefix 
\"pdx-class-dir-\" because: ")
+        .hasMessageContaining("IOException");
+    verify(function, never()).deleteDirectory(any());
+    verify(remoteInputStream, never()).close(true);
+  }
+
+  @Test
+  public void 
executeFunctionThrowsGivenRemoteInputStreamAndCopyFileIOException()
+      throws IOException {
+    remoteInputStreamName = "remoteInputStreamName";
+    remoteInputStream = mock(RemoteInputStream.class);
+    setupInputArgs();
+    doThrow(IOException.class).when(function).copyFile(any(), any());
+
+    Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessageContaining(
+            "The pdx class file \"" + remoteInputStreamName
+                + "\" could not be copied to a temporary file, because: ")
+        .hasMessageContaining("IOException");
+    verify(function).createTemporaryDirectory(any());
+    verify(function).deleteDirectory(any());
+    verify(remoteInputStream).close(true);
+  }
+
+  @Test
+  public void 
executeFunctionReturnsSuccessGivenRemoteInputStreamClassAndPackageName()
+      throws ClassNotFoundException {
+    remoteInputStreamName = "remoteInputStreamName.class";
+    remoteInputStream = mock(RemoteInputStream.class);
+    setupInputArgs();
+    String PDX_CLASS_NAME_WITH_PACKAGE = "foo.bar.MyPdxClassName";
+    when(regionMapping.getPdxName()).thenReturn(PDX_CLASS_NAME_WITH_PACKAGE);
+    
doReturn(PdxClassDummy.class).when(function).loadClass(eq(PDX_CLASS_NAME_WITH_PACKAGE),
 any());
+
+    CliFunctionResult result = function.executeFunction(context);
+
+    assertThat(result.isSuccessful()).isTrue();
+    verify(function).createTemporaryDirectory(any());
+    verify(function).deleteDirectory(any());
+  }
+
+  @Test
+  public void executeFunctionReturnsSuccessGivenRemoteInputStreamJar()
+      throws ClassNotFoundException {
+    remoteInputStreamName = "remoteInputStreamName.jar";
+    remoteInputStream = mock(RemoteInputStream.class);
+    setupInputArgs();
+    doReturn(PdxClassDummy.class).when(function).loadClass(eq(PDX_CLASS_NAME), 
any());
+
+    CliFunctionResult result = function.executeFunction(context);
+
+    assertThat(result.isSuccessful()).isTrue();
+    verify(function).createTemporaryDirectory(any());
+    verify(function).deleteDirectory(any());
+  }
+
 }

Reply via email to