This is an automated email from the ASF dual-hosted git repository.

dschneider pushed a commit to branch feature/GEODE-6291
in repository https://gitbox.apache.org/repos/asf/geode.git


The following commit(s) were added to refs/heads/feature/GEODE-6291 by this 
push:
     new 8088fb3  wip: change create jdbc-mapping to require that the pdx class 
exists this will break some of the existing tests. All the unit tests are 
passing but the other types of jdbc tests still need work. Also need to 
implement creating a PdxType during create jdbc-mapping using the reflection 
based auto serializer. Also need to implement on option on create jdbc-mapping 
to transfer the class file for the pdx class to the server that runs the 
precondition function
8088fb3 is described below

commit 8088fb3dbb7aadb77996b29e71362dac338efc97
Author: Darrel Schneider <dschnei...@pivotal.io>
AuthorDate: Fri Feb 15 09:04:45 2019 -0800

    wip: change create jdbc-mapping to require that the pdx class exists
    this will break some of the existing tests. All the unit tests are passing
    but the other types of jdbc tests still need work.
    Also need to implement creating a PdxType during create jdbc-mapping
    using the reflection based auto serializer.
    Also need to implement on option on create jdbc-mapping to transfer the
    class file for the pdx class to the server that runs the precondition 
function
---
 .../cli/CreateMappingCommandDUnitTest.java         |  13 --
 .../jdbc/internal/SqlToPdxInstanceCreator.java     |  98 +-------------
 .../CreateMappingPreconditionCheckFunction.java    | 136 +++++++++++++++----
 .../jdbc/internal/SqlToPdxInstanceCreatorTest.java | 146 ---------------------
 ...CreateMappingPreconditionCheckFunctionTest.java |  94 +++++++++++--
 .../apache/geode/pdx/internal/TypeRegistry.java    |  65 +--------
 .../geode/pdx/internal/TypeRegistryTest.java       | 132 -------------------
 7 files changed, 202 insertions(+), 482 deletions(-)

diff --git 
a/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
 
b/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
index 78b4dd0..0cdfe4f 100644
--- 
a/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
+++ 
b/geode-connectors/src/distributedTest/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingCommandDUnitTest.java
@@ -607,10 +607,6 @@ public class CreateMappingCommandDUnitTest {
     setupReplicate(region1Name);
     setupReplicate(region2Name);
 
-    server1.invoke(() -> {
-      ClusterStartupRule.getCache().registerPdxMetaData(new Employee());
-    });
-
     CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
     csb.addOption(REGION_NAME, region1Name);
     csb.addOption(DATA_SOURCE_NAME, "connection");
@@ -696,16 +692,11 @@ public class CreateMappingCommandDUnitTest {
     }
   }
 
-
   @Test
   public void createMappingsWithExistingPdxName() {
     String region1Name = "region1";
     setupReplicate(region1Name);
 
-    server1.invoke(() -> {
-      ClusterStartupRule.getCache().registerPdxMetaData(new Employee());
-    });
-
     CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
     csb.addOption(REGION_NAME, region1Name);
     csb.addOption(DATA_SOURCE_NAME, "connection");
@@ -732,10 +723,6 @@ public class CreateMappingCommandDUnitTest {
   public void createMappingUsingRegionNameUsesDomainClass() {
     setupReplicate(EMPLOYEE_LOWER);
 
-    server1.invoke(() -> {
-      ClusterStartupRule.getCache().registerPdxMetaData(new Employee());
-    });
-
     CommandStringBuilder csb = new CommandStringBuilder(CREATE_MAPPING);
     csb.addOption(REGION_NAME, EMPLOYEE_LOWER);
     csb.addOption(DATA_SOURCE_NAME, "connection");
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
index b837fb9..b1355d3 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreator.java
@@ -14,16 +14,11 @@
  */
 package org.apache.geode.connectors.jdbc.internal;
 
-import java.sql.JDBCType;
-import java.util.Set;
-
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.FieldType;
 import org.apache.geode.pdx.PdxInstanceFactory;
-import org.apache.geode.pdx.internal.PdxField;
-import org.apache.geode.pdx.internal.TypeRegistry;
 
 public class SqlToPdxInstanceCreator {
   private final InternalCache cache;
@@ -35,27 +30,12 @@ public class SqlToPdxInstanceCreator {
   }
 
   public SqlToPdxInstance create() {
-    TypeRegistry typeRegistry = cache.getPdxRegistry();
     SqlToPdxInstance result = new SqlToPdxInstance();
     PdxInstanceFactory templateFactory = createPdxInstanceFactory();
     for (FieldMapping columnMapping : regionMapping.getFieldMappings()) {
       String columnName = columnMapping.getJdbcName();
       String fieldName = columnMapping.getPdxName();
-      FieldType fieldType;
-      if (fieldName.isEmpty()) {
-        Set<PdxField> pdxFields =
-            typeRegistry.findFieldThatMatchesName(regionMapping.getPdxName(), 
columnName);
-        JDBCType columnType = JDBCType.valueOf(columnMapping.getJdbcType());
-        if (pdxFields.isEmpty()) {
-          fieldName = columnName;
-          fieldType = computeFieldType(columnMapping.isJdbcNullable(), 
columnType);
-        } else {
-          fieldName = pdxFields.iterator().next().getFieldName();
-          fieldType = findFieldType(pdxFields, columnMapping.isJdbcNullable(), 
columnType);
-        }
-      } else {
-        fieldType = FieldType.valueOf(columnMapping.getPdxType());
-      }
+      FieldType fieldType = FieldType.valueOf(columnMapping.getPdxType());
       result.addMapping(columnName, fieldName, fieldType);
       writeField(templateFactory, columnMapping, fieldName, fieldType);
     }
@@ -142,80 +122,4 @@ public class SqlToPdxInstanceCreator {
         throw new IllegalStateException("unhandled pdx field type " + 
fieldType);
     }
   }
-
-  public static FieldType findFieldType(Set<PdxField> pdxFields, boolean 
columnNullable,
-      JDBCType columnType) {
-    if (pdxFields.size() == 1) {
-      return pdxFields.iterator().next().getFieldType();
-    } else {
-      FieldType fieldTypeBasedOnJDBC = computeFieldType(columnNullable, 
columnType);
-      // TODO find best type in pdxFields
-      return pdxFields.iterator().next().getFieldType();
-    }
-  }
-
-  public static FieldType computeFieldType(boolean isNullable, JDBCType 
jdbcType) {
-    switch (jdbcType) {
-      case NULL:
-        throw new IllegalStateException("unexpected NULL jdbc column type");
-      case BOOLEAN:
-        return computeType(isNullable, FieldType.BOOLEAN);
-      case BIT: // 1 bit
-        return computeType(isNullable, FieldType.BOOLEAN);
-      case TINYINT: // unsigned 8 bits
-        return computeType(isNullable, FieldType.SHORT);
-      case SMALLINT: // signed 16 bits
-        return computeType(isNullable, FieldType.SHORT);
-      case INTEGER: // signed 32 bits
-        return computeType(isNullable, FieldType.INT);
-      case BIGINT: // signed 64 bits
-        return computeType(isNullable, FieldType.LONG);
-      case FLOAT:
-        return computeType(isNullable, FieldType.DOUBLE);
-      case REAL:
-        return computeType(isNullable, FieldType.FLOAT);
-      case DOUBLE:
-        return computeType(isNullable, FieldType.DOUBLE);
-      case CHAR:
-        return FieldType.STRING;
-      case VARCHAR:
-        return FieldType.STRING;
-      case LONGVARCHAR:
-        return FieldType.STRING;
-      case DATE:
-        return FieldType.DATE;
-      case TIME:
-        return FieldType.DATE;
-      case TIMESTAMP:
-        return FieldType.DATE;
-      case BINARY:
-        return FieldType.BYTE_ARRAY;
-      case VARBINARY:
-        return FieldType.BYTE_ARRAY;
-      case LONGVARBINARY:
-        return FieldType.BYTE_ARRAY;
-      case BLOB:
-        return FieldType.BYTE_ARRAY;
-      case NCHAR:
-        return FieldType.STRING;
-      case NVARCHAR:
-        return FieldType.STRING;
-      case LONGNVARCHAR:
-        return FieldType.STRING;
-      case TIME_WITH_TIMEZONE:
-        return FieldType.DATE;
-      case TIMESTAMP_WITH_TIMEZONE:
-        return FieldType.DATE;
-      default:
-        return FieldType.OBJECT;
-    }
-  }
-
-  private static FieldType computeType(boolean isNullable, FieldType 
nonNullType) {
-    if (isNullable) {
-      return FieldType.OBJECT;
-    }
-    return nonNullType;
-
-  }
 }
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
index 3ef5ae9..9091ca4 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
@@ -15,6 +15,8 @@
 package org.apache.geode.connectors.jdbc.internal.cli;
 
 import java.io.ObjectInputStream;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import java.sql.Connection;
 import java.sql.JDBCType;
 import java.sql.SQLException;
@@ -24,11 +26,11 @@ import java.util.Set;
 
 import javax.sql.DataSource;
 
+import org.apache.geode.SerializationException;
 import org.apache.geode.annotations.Experimental;
 import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.connectors.jdbc.JdbcConnectorException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler.DataSourceFactory;
-import org.apache.geode.connectors.jdbc.internal.SqlToPdxInstanceCreator;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
@@ -37,28 +39,35 @@ import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.jndi.JNDIInvoker;
 import org.apache.geode.management.cli.CliFunction;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
-import org.apache.geode.pdx.FieldType;
+import org.apache.geode.pdx.PdxSerializable;
 import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
 import org.apache.geode.pdx.internal.TypeRegistry;
 
 @Experimental
 public class CreateMappingPreconditionCheckFunction extends 
CliFunction<RegionMapping> {
 
   private transient DataSourceFactory dataSourceFactory;
+  private transient ClassFactory classFactory;
   private transient TableMetaDataManager tableMetaDataManager;
 
-  CreateMappingPreconditionCheckFunction(DataSourceFactory factory, 
TableMetaDataManager manager) {
+  CreateMappingPreconditionCheckFunction(DataSourceFactory factory, 
ClassFactory classFactory,
+      TableMetaDataManager manager) {
     this.dataSourceFactory = factory;
+    this.classFactory = classFactory;
     this.tableMetaDataManager = manager;
   }
 
   CreateMappingPreconditionCheckFunction() {
-    this(dataSourceName -> JNDIInvoker.getDataSource(dataSourceName), new 
TableMetaDataManager());
+    this(dataSourceName -> JNDIInvoker.getDataSource(dataSourceName),
+        className -> Class.forName(className),
+        new TableMetaDataManager());
   }
 
   // used by java during deserialization
   private void readObject(ObjectInputStream stream) {
     this.dataSourceFactory = dataSourceName -> 
JNDIInvoker.getDataSource(dataSourceName);
+    this.classFactory = className -> Class.forName(className);
     this.tableMetaDataManager = new TableMetaDataManager();
   }
 
@@ -75,6 +84,7 @@ public class CreateMappingPreconditionCheckFunction extends 
CliFunction<RegionMa
     }
     InternalCache cache = (InternalCache) context.getCache();
     TypeRegistry typeRegistry = cache.getPdxRegistry();
+    PdxType pdxType = getPdxTypeForClass(cache, typeRegistry, 
regionMapping.getPdxName());
     try (Connection connection = dataSource.getConnection()) {
       TableMetaDataView tableMetaData =
           tableMetaDataManager.getTableMetaDataView(connection, regionMapping);
@@ -84,13 +94,19 @@ public class CreateMappingPreconditionCheckFunction extends 
CliFunction<RegionMa
       Object[] output = new Object[2];
       ArrayList<FieldMapping> fieldMappings = new ArrayList<>();
       output[1] = fieldMappings;
-      for (String jdbcName : tableMetaData.getColumnNames()) {
+      Set<String> columnNames = tableMetaData.getColumnNames();
+      if (columnNames.size() != pdxType.getFieldCount()) {
+        throw new JdbcConnectorException(
+            "The table and pdx class must have the same number of 
columns/fields. But the table has "
+                + columnNames.size()
+                + " columns and the pdx class has " + pdxType.getFieldCount() 
+ " fields.");
+      }
+      List<PdxField> pdxFields = pdxType.getFields();
+      for (String jdbcName : columnNames) {
         boolean isNullable = tableMetaData.isColumnNullable(jdbcName);
         JDBCType jdbcType = tableMetaData.getColumnDataType(jdbcName);
         FieldMapping fieldMapping =
-            new FieldMapping("", "", jdbcName, jdbcType.getName(), isNullable);
-        updateFieldMappingFromExistingPdxType(fieldMapping, typeRegistry,
-            regionMapping.getPdxName());
+            createFieldMapping(jdbcName, jdbcType.getName(), isNullable, 
pdxFields);
         fieldMappings.add(fieldMapping);
       }
       if (regionMapping.getIds() == null || regionMapping.getIds().isEmpty()) {
@@ -104,22 +120,98 @@ public class CreateMappingPreconditionCheckFunction 
extends CliFunction<RegionMa
     }
   }
 
-  private void updateFieldMappingFromExistingPdxType(FieldMapping fieldMapping,
-      TypeRegistry typeRegistry, String pdxClassName) {
-    String columnName = fieldMapping.getJdbcName();
-    try {
-      Set<PdxField> foundFields = 
typeRegistry.findFieldThatMatchesName(pdxClassName, columnName);
-      if (!foundFields.isEmpty()) {
-        fieldMapping.setPdxName(foundFields.iterator().next().getFieldName());
-        JDBCType columnType = JDBCType.valueOf(fieldMapping.getJdbcType());
-        FieldType fieldType = 
SqlToPdxInstanceCreator.findFieldType(foundFields,
-            fieldMapping.isJdbcNullable(), columnType);
-        fieldMapping.setPdxType(fieldType.name());
+  private FieldMapping createFieldMapping(String jdbcName, String jdbcType, 
boolean jdbcNullable,
+      List<PdxField> pdxFields) {
+    String pdxName = null;
+    String pdxType = null;
+    for (PdxField pdxField : pdxFields) {
+      if (pdxField.getFieldName().equals(jdbcName)) {
+        pdxName = pdxField.getFieldName();
+        pdxType = pdxField.getFieldType().name();
+        break;
+      }
+    }
+    if (pdxName == null) {
+      // look for one inexact match
+      for (PdxField pdxField : pdxFields) {
+        if (pdxField.getFieldName().equalsIgnoreCase(jdbcName)) {
+          if (pdxName != null) {
+            throw new JdbcConnectorException(
+                "More than one PDX field name matched the column name \"" + 
jdbcName + "\"");
+          }
+          pdxName = pdxField.getFieldName();
+          pdxType = pdxField.getFieldType().name();
+        }
+      }
+    }
+    if (pdxName == null) {
+      throw new JdbcConnectorException(
+          "No PDX field name matched the column name \"" + jdbcName + "\"");
+    }
+    return new FieldMapping(pdxName, pdxType, jdbcName, jdbcType, 
jdbcNullable);
+  }
+
+  private PdxType getPdxTypeForClass(InternalCache cache, TypeRegistry 
typeRegistry,
+      String className) {
+    Class<?> clazz = loadPdxClass(className);
+    PdxType result = typeRegistry.getExistingTypeForClass(clazz);
+    if (result != null) {
+      return result;
+    }
+    return generatePdxTypeForClass(cache, typeRegistry, clazz);
+  }
+
+  /**
+   * Generates and returns a PdxType for the given class.
+   * The generated PdxType is also stored in the TypeRegistry.
+   *
+   * @param cache used to generate pdx type
+   * @param clazz the class to generate a PdxType for
+   * @return the generated PdxType
+   * @throws JdbcException if a PdxType can not be generated
+   */
+  private PdxType generatePdxTypeForClass(InternalCache cache, TypeRegistry 
typeRegistry,
+      Class<?> clazz) {
+    if (PdxSerializable.class.isAssignableFrom(clazz)) {
+      Object object = createInstance(clazz);
+      try {
+        cache.registerPdxMetaData(object);
+      } catch (SerializationException ex) {
+        throw new JdbcConnectorException(
+            "Could not generate a PdxType for the class " + clazz.getName() + 
" because: " + ex);
       }
-    } catch (IllegalStateException ex) {
+      // serialization will leave the type in the registry
+      return typeRegistry.getExistingTypeForClass(clazz);
+    }
+    // TODO
+    // Otherwise use the ReflectionBasedAutoSerializer to generate
+    // a PdxType for the class.
+    throw new JdbcConnectorException(
+        "Could not generate a PdxType for the class " + clazz.getName());
+  }
+
+  private Object createInstance(Class<?> clazz) {
+    try {
+      Constructor<?> ctor = clazz.getConstructor();
+      return ctor.newInstance(new Object[] {});
+    } catch (NoSuchMethodException | SecurityException | InstantiationException
+        | IllegalAccessException | IllegalArgumentException | 
InvocationTargetException ex) {
+      throw new JdbcConnectorException(
+          "Could not generate a PdxType for the class " + clazz.getName()
+              + " because it did not have a public zero arg constructor. 
Details: " + ex);
+    }
+  }
+
+  private Class<?> loadPdxClass(String className) {
+    try {
+      return this.classFactory.loadClass(className);
+    } catch (ClassNotFoundException ex) {
       throw new JdbcConnectorException(
-          "Could not determine what pdx field to use for the column name " + 
columnName
-              + " because " + ex.getMessage());
+          "The pdx class \"" + className + "\" could not be loaded because: " 
+ ex);
     }
   }
+
+  public interface ClassFactory {
+    public Class loadClass(String className) throws ClassNotFoundException;
+  }
 }
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
index e70ead0..046e01a 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/SqlToPdxInstanceCreatorTest.java
@@ -15,7 +15,6 @@
 package org.apache.geode.connectors.jdbc.internal;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.catchThrowable;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
@@ -23,7 +22,6 @@ import static org.mockito.Mockito.when;
 import java.sql.JDBCType;
 import java.sql.SQLException;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.Map;
 
 import junitparams.JUnitParamsRunner;
@@ -41,7 +39,6 @@ import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.FieldType;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.PdxInstanceFactory;
-import org.apache.geode.pdx.internal.PdxField;
 import org.apache.geode.pdx.internal.TypeRegistry;
 
 @RunWith(JUnitParamsRunner.class)
@@ -111,34 +108,6 @@ public class SqlToPdxInstanceCreatorTest {
     assertThat(map.get(COLUMN_NAME_1).getType()).isEqualTo(FieldType.STRING);
   }
 
-  @Test
-  public void 
pdxFieldGeneratedFromRegistryPdxFieldGivenNoPdxNameAndTypeInRegistry()
-      throws Exception {
-    PdxInstanceFactory factory = setupPdxInstanceFactory(null);
-    when(columnMapping.getJdbcType()).thenReturn(JDBCType.NULL.name());
-    when(columnMapping.getPdxName()).thenReturn("");
-    when(columnMapping.getPdxType()).thenReturn("");
-    TypeRegistry typeRegistry = mock(TypeRegistry.class);
-    PdxField pdxField = mock(PdxField.class);
-    when(pdxField.getFieldName()).thenReturn("customPdxFieldName");
-    when(pdxField.getFieldType()).thenReturn(FieldType.OBJECT);
-    when(typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, COLUMN_NAME_1))
-        .thenReturn(Collections.singleton(pdxField));
-    when(cache.getPdxRegistry()).thenReturn(typeRegistry);
-
-    SqlToPdxInstance result = createSqlToPdxInstance();
-
-    verify(factory).writeObject("customPdxFieldName", null);
-    verify(factory).create();
-    assertThat(result).isNotNull();
-    assertThat(result.getPdxTemplate()).isSameAs(pdxTemplate);
-    Map<String, PdxFieldInfo> map = result.getColumnToPdxFieldMap();
-    assertThat(map).hasSize(1);
-    assertThat(map).containsKey(COLUMN_NAME_1);
-    
assertThat(map.get(COLUMN_NAME_1).getName()).isEqualTo("customPdxFieldName");
-    assertThat(map.get(COLUMN_NAME_1).getType()).isEqualTo(FieldType.OBJECT);
-  }
-
   private SqlToPdxInstance createSqlToPdxInstance() throws SQLException {
     SqlToPdxInstanceCreator sqlToPdxInstanceCreator =
         new SqlToPdxInstanceCreator(cache, regionMapping);
@@ -229,119 +198,4 @@ public class SqlToPdxInstanceCreatorTest {
         throw new IllegalStateException("unhandled fieldType " + fieldType);
     }
   }
-
-  @Test
-  public void computeFieldTypeTest() {
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.BOOLEAN))
-        .isEqualTo(FieldType.BOOLEAN);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.BOOLEAN))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BIT))
-        .isEqualTo(FieldType.BOOLEAN);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BIT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.TINYINT))
-        .isEqualTo(FieldType.SHORT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.TINYINT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.SMALLINT))
-        .isEqualTo(FieldType.SHORT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.SMALLINT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.INTEGER))
-        .isEqualTo(FieldType.INT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.INTEGER))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.BIGINT))
-        .isEqualTo(FieldType.LONG);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BIGINT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.REAL))
-        .isEqualTo(FieldType.FLOAT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.REAL))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.FLOAT))
-        .isEqualTo(FieldType.DOUBLE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.FLOAT))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.DOUBLE))
-        .isEqualTo(FieldType.DOUBLE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.DOUBLE))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.DATE))
-        .isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.DATE))
-        .isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.TIME))
-        .isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.TIME))
-        .isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.TIMESTAMP))
-        .isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.TIMESTAMP))
-        .isEqualTo(FieldType.DATE);
-    assertThat(
-        SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.TIME_WITH_TIMEZONE))
-            .isEqualTo(FieldType.DATE);
-    assertThat(
-        SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.TIME_WITH_TIMEZONE))
-            .isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false,
-        JDBCType.TIMESTAMP_WITH_TIMEZONE)).isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true,
-        JDBCType.TIMESTAMP_WITH_TIMEZONE)).isEqualTo(FieldType.DATE);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.CHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.VARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.LONGVARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.NCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.NVARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(
-        SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.LONGNVARCHAR))
-            .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.BLOB))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.BINARY))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.VARBINARY))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(
-        SqlToPdxInstanceCreator.computeFieldType(false, 
JDBCType.LONGVARBINARY))
-            .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.ROWID))
-        .isEqualTo(FieldType.OBJECT);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.CHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.VARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.LONGVARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.NCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.NVARCHAR))
-        .isEqualTo(FieldType.STRING);
-    assertThat(
-        SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.LONGNVARCHAR))
-            .isEqualTo(FieldType.STRING);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BLOB))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.BINARY))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, 
JDBCType.VARBINARY))
-        .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(
-        SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.LONGVARBINARY))
-            .isEqualTo(FieldType.BYTE_ARRAY);
-    assertThat(SqlToPdxInstanceCreator.computeFieldType(true, JDBCType.ROWID))
-        .isEqualTo(FieldType.OBJECT);
-    Throwable throwable = catchThrowable(
-        () -> SqlToPdxInstanceCreator.computeFieldType(false, JDBCType.NULL));
-    assertThat(throwable).isInstanceOf(IllegalStateException.class);
-  }
-
-
 }
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
index 9244903..6074e3e 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
@@ -27,6 +27,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.LinkedHashSet;
+import java.util.List;
 import java.util.Set;
 
 import javax.sql.DataSource;
@@ -41,12 +42,14 @@ import 
org.apache.geode.connectors.jdbc.JdbcConnectorException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler.DataSourceFactory;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
+import 
org.apache.geode.connectors.jdbc.internal.cli.CreateMappingPreconditionCheckFunction.ClassFactory;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
 import org.apache.geode.pdx.FieldType;
 import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
 import org.apache.geode.pdx.internal.TypeRegistry;
 
 public class CreateMappingPreconditionCheckFunctionTest {
@@ -62,14 +65,19 @@ public class CreateMappingPreconditionCheckFunctionTest {
   private InternalCache cache;
   private TypeRegistry typeRegistry;
   private DataSourceFactory dataSourceFactory;
+  private ClassFactory classFactory;
   private TableMetaDataManager tableMetaDataManager;
   private TableMetaDataView tableMetaDataView;
   private DataSource dataSource;
+  private PdxType pdxType = mock(PdxType.class);
 
   private CreateMappingPreconditionCheckFunction function;
 
+  public static class PdxClassDummy {
+  }
+
   @Before
-  public void setUp() throws SQLException {
+  public void setUp() throws SQLException, ClassNotFoundException {
     context = mock(FunctionContext.class);
     resultSender = mock(ResultSender.class);
     cache = mock(InternalCache.class);
@@ -91,11 +99,15 @@ public class CreateMappingPreconditionCheckFunctionTest {
     Connection connection = mock(Connection.class);
     when(dataSource.getConnection()).thenReturn(connection);
     
when(dataSourceFactory.getDataSource(DATA_SOURCE_NAME)).thenReturn(dataSource);
+    classFactory = mock(ClassFactory.class);
+    
when(classFactory.loadClass(PDX_CLASS_NAME)).thenReturn(PdxClassDummy.class);
+    
when(typeRegistry.getExistingTypeForClass(PdxClassDummy.class)).thenReturn(pdxType);
     tableMetaDataManager = mock(TableMetaDataManager.class);
     tableMetaDataView = mock(TableMetaDataView.class);
     when(tableMetaDataManager.getTableMetaDataView(connection, regionMapping))
         .thenReturn(tableMetaDataView);
-    function = new CreateMappingPreconditionCheckFunction(dataSourceFactory, 
tableMetaDataManager);
+    function = new CreateMappingPreconditionCheckFunction(dataSourceFactory, 
classFactory,
+        tableMetaDataManager);
   }
 
   @Test
@@ -141,6 +153,18 @@ public class CreateMappingPreconditionCheckFunctionTest {
   }
 
   @Test
+  public void executeFunctionThrowsIfClassNotFound() throws 
ClassNotFoundException {
+    ClassNotFoundException ex = new ClassNotFoundException("class not found");
+    when(classFactory.loadClass(PDX_CLASS_NAME)).thenThrow(ex);
+
+    Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage("The pdx class \"" + PDX_CLASS_NAME
+            + "\" could not be loaded because: 
java.lang.ClassNotFoundException: class not found");
+  }
+
+  @Test
   public void executeFunctionReturnsNoFieldMappingsIfNoColumns() throws 
Exception {
     Set<String> columnNames = Collections.emptySet();
     when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
@@ -161,6 +185,15 @@ public class CreateMappingPreconditionCheckFunctionTest {
     
when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
     when(tableMetaDataView.isColumnNullable("col2")).thenReturn(true);
     
when(tableMetaDataView.getColumnDataType("col2")).thenReturn(JDBCType.DATE);
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("col1");
+    when(field1.getFieldType()).thenReturn(FieldType.DATE);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("col2");
+    when(field2.getFieldType()).thenReturn(FieldType.DATE);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
 
     CliFunctionResult result = function.executeFunction(context);
 
@@ -170,10 +203,10 @@ public class CreateMappingPreconditionCheckFunctionTest {
     assertThat(fieldsMappings).hasSize(2);
     assertThat(fieldsMappings.get(0))
         .isEqualTo(
-            new FieldMapping("", "", "col1", JDBCType.DATE.name(), false));
+            new FieldMapping("col1", FieldType.DATE.name(), "col1", 
JDBCType.DATE.name(), false));
     assertThat(fieldsMappings.get(1))
         .isEqualTo(
-            new FieldMapping("", "", "col2", JDBCType.DATE.name(), true));
+            new FieldMapping("col2", FieldType.DATE.name(), "col2", 
JDBCType.DATE.name(), true));
   }
 
   @Test
@@ -186,8 +219,8 @@ public class CreateMappingPreconditionCheckFunctionTest {
     PdxField pdxField1 = mock(PdxField.class);
     when(pdxField1.getFieldName()).thenReturn("COL1");
     when(pdxField1.getFieldType()).thenReturn(FieldType.LONG);
-    when(typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, "col1"))
-        .thenReturn(Collections.singleton(pdxField1));
+    when(pdxType.getFieldCount()).thenReturn(1);
+    when(pdxType.getFields()).thenReturn(Arrays.asList(pdxField1));
 
     CliFunctionResult result = function.executeFunction(context);
 
@@ -207,14 +240,57 @@ public class CreateMappingPreconditionCheckFunctionTest {
     when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
     when(tableMetaDataView.isColumnNullable("col1")).thenReturn(false);
     
when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
-    when(typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, "col1"))
-        .thenThrow(new IllegalStateException("reason"));
+    when(pdxType.getFieldCount()).thenReturn(1);
+    PdxField pdxField1 = mock(PdxField.class);
+    when(pdxField1.getFieldName()).thenReturn("COL1");
+    when(pdxField1.getFieldType()).thenReturn(FieldType.DATE);
+    PdxField pdxField2 = mock(PdxField.class);
+    when(pdxField2.getFieldName()).thenReturn("Col1");
+    when(pdxField2.getFieldType()).thenReturn(FieldType.DATE);
+    when(pdxType.getFields()).thenReturn(Arrays.asList(pdxField1, pdxField2));
+
+    Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage("More than one PDX field name matched the column name 
\"col1\"");
+  }
+
+  @Test
+  public void executeFunctionThrowsGivenExistingPdxTypeWithNoMatches()
+      throws Exception {
+    Set<String> columnNames = new LinkedHashSet<>(Arrays.asList("col1"));
+    when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
+    when(tableMetaDataView.isColumnNullable("col1")).thenReturn(false);
+    
when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
+    when(pdxType.getFieldCount()).thenReturn(1);
+    PdxField pdxField1 = mock(PdxField.class);
+    when(pdxField1.getFieldName()).thenReturn("pdxCOL1");
+    when(pdxField1.getFieldType()).thenReturn(FieldType.DATE);
+    PdxField pdxField2 = mock(PdxField.class);
+    when(pdxField2.getFieldName()).thenReturn("pdxCol1");
+    when(pdxField2.getFieldType()).thenReturn(FieldType.DATE);
+    when(pdxType.getFields()).thenReturn(Arrays.asList(pdxField1, pdxField2));
+
+    Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage("No PDX field name matched the column name \"col1\"");
+  }
+
+  @Test
+  public void 
executeFunctionThrowsGivenExistingPdxTypeWithWrongNumberOfFields()
+      throws Exception {
+    Set<String> columnNames = new LinkedHashSet<>(Arrays.asList("col1"));
+    when(tableMetaDataView.getColumnNames()).thenReturn(columnNames);
+    when(tableMetaDataView.isColumnNullable("col1")).thenReturn(false);
+    
when(tableMetaDataView.getColumnDataType("col1")).thenReturn(JDBCType.DATE);
+    when(pdxType.getFieldCount()).thenReturn(2);
 
     Throwable throwable = catchThrowable(() -> 
function.executeFunction(context));
 
     assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
         .hasMessage(
-            "Could not determine what pdx field to use for the column name 
col1 because reason");
+            "The table and pdx class must have the same number of 
columns/fields. But the table has 1 columns and the pdx class has 2 fields.");
   }
 
   @Test
diff --git 
a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java 
b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
index e3bdf55..2adb7c9 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
@@ -16,8 +16,6 @@ package org.apache.geode.pdx.internal;
 
 import static java.lang.Integer.valueOf;
 
-import java.util.Collections;
-import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
@@ -154,7 +152,8 @@ public class TypeRegistry {
   }
 
   public PdxType getExistingTypeForClass(Class<?> aClass) {
-    return this.localTypeIds.get(aClass);
+    PdxType result = this.localTypeIds.get(aClass);
+    return result;
   }
 
   /**
@@ -540,64 +539,4 @@ public class TypeRegistry {
   public void setPdxReadSerializedOverride(boolean overridePdxReadSerialized) {
     pdxReadSerializedOverride.set(overridePdxReadSerialized);
   }
-
-  /**
-   * Find and return the pdx fields that match the given class and field name.
-   * An exact match will be looked for first; followed by a
-   * case insensitive match.
-   *
-   * @param pdxClassName the pdx type's class
-   * @param name the name of the field to find
-   * @return Set<PdxField> that match class and field name
-   * @throws IllegalStateException if more than one field matched
-   */
-  public Set<PdxField> findFieldThatMatchesName(String pdxClassName, String 
name) {
-    Set<PdxType> pdxTypes = getPdxTypesForClassName(pdxClassName);
-    if (pdxTypes.isEmpty()) {
-      return Collections.emptySet();
-    }
-    Set<PdxField> foundFields = findExactMatch(name, pdxTypes);
-    if (foundFields.isEmpty()) {
-      foundFields = findCaseInsensitiveMatch(name, pdxTypes);
-    }
-    return foundFields;
-  }
-
-
-  private Set<PdxField> findCaseInsensitiveMatch(String name, Set<PdxType> 
pdxTypes) {
-    HashSet<String> matchingFieldNames = new HashSet<>();
-    for (PdxType pdxType : pdxTypes) {
-      for (String existingFieldName : pdxType.getFieldNames()) {
-        if (existingFieldName.equalsIgnoreCase(name)) {
-          matchingFieldNames.add(existingFieldName);
-        }
-      }
-    }
-    if (matchingFieldNames.isEmpty()) {
-      return Collections.emptySet();
-    } else if (matchingFieldNames.size() > 1) {
-      throw new IllegalStateException(
-          "the pdx fields " + String.join(", ", matchingFieldNames) + " all 
match " + name);
-    }
-    String matchingFieldName = matchingFieldNames.iterator().next();
-    return findExactMatch(matchingFieldName, pdxTypes);
-  }
-
-  private Set<PdxField> findExactMatch(String name, Set<PdxType> pdxTypes) {
-    Set<PdxField> result = null;
-    for (PdxType pdxType : pdxTypes) {
-      PdxField foundField = pdxType.getPdxField(name);
-      if (foundField != null) {
-        if (result == null) {
-          result = new HashSet<>();
-        }
-        result.add(foundField);
-      }
-    }
-    if (result == null) {
-      result = Collections.emptySet();
-    }
-    return result;
-  }
-
 }
diff --git 
a/geode-core/src/test/java/org/apache/geode/pdx/internal/TypeRegistryTest.java 
b/geode-core/src/test/java/org/apache/geode/pdx/internal/TypeRegistryTest.java
deleted file mode 100644
index 72a7490..0000000
--- 
a/geode-core/src/test/java/org/apache/geode/pdx/internal/TypeRegistryTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
- * agreements. See the NOTICE file distributed with this work for additional 
information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the 
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
- * or implied. See the License for the specific language governing permissions 
and limitations under
- * the License.
- */
-package org.apache.geode.pdx.internal;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.catchThrowable;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import org.apache.geode.internal.cache.InternalCache;
-import org.apache.geode.test.junit.categories.SerializationTest;
-
-@Category({SerializationTest.class})
-public class TypeRegistryTest {
-  private final InternalCache cache = mock(InternalCache.class);
-  private TypeRegistration distributedTypeRegistry = 
mock(TypeRegistration.class);
-  private final TypeRegistry typeRegistry = new TypeRegistry(cache, 
distributedTypeRegistry);
-  private static final String PDX_CLASS_NAME = "pdxClassName";
-
-  @Test
-  public void findFieldThatMatchesNameReturnsEmptyGivenNoTypes() {
-    Set<PdxType> pdxTypesForClass = Collections.emptySet();
-    when(distributedTypeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME))
-        .thenReturn(pdxTypesForClass);
-
-    Set<PdxField> foundFields =
-        this.typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, 
"fieldName");
-
-    assertThat(foundFields).isEmpty();
-  }
-
-  @Test
-  public void findFieldThatMatchesNameReturnsFieldThatExactlyMatches() {
-    PdxType exactMatchType = mock(PdxType.class);
-    PdxField exactMatchField = mock(PdxField.class);
-    when(exactMatchType.getPdxField("fieldName")).thenReturn(exactMatchField);
-    Set<PdxType> pdxTypesForClass = new 
HashSet<>(Arrays.asList(exactMatchType));
-    when(distributedTypeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME))
-        .thenReturn(pdxTypesForClass);
-
-    Set<PdxField> foundFields =
-        this.typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, 
"fieldName");
-
-    assertThat(foundFields).containsExactly(exactMatchField);
-  }
-
-  @Test
-  public void findFieldThatMatchesNameReturnsTwoFieldsThatExactlyMatch() {
-    PdxType exactMatchType = mock(PdxType.class);
-    PdxField exactMatchField = mock(PdxField.class);
-    when(exactMatchType.getPdxField("fieldName")).thenReturn(exactMatchField);
-    PdxType exactMatchType2 = mock(PdxType.class);
-    PdxField exactMatchField2 = mock(PdxField.class);
-    
when(exactMatchType2.getPdxField("fieldName")).thenReturn(exactMatchField2);
-    Set<PdxType> pdxTypesForClass = new 
HashSet<>(Arrays.asList(exactMatchType, exactMatchType2));
-    when(distributedTypeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME))
-        .thenReturn(pdxTypesForClass);
-
-    Set<PdxField> foundFields =
-        this.typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, 
"fieldName");
-
-    assertThat(foundFields).containsExactlyInAnyOrder(exactMatchField, 
exactMatchField2);
-  }
-
-  @Test
-  public void findFieldThatMatchesNameReturnsFieldThatInexactlyMatches() {
-    PdxType inexactMatchType = mock(PdxType.class);
-    PdxField inexactMatchField = mock(PdxField.class);
-    when(inexactMatchType.getPdxField("fieldName")).thenReturn(null);
-    
when(inexactMatchType.getFieldNames()).thenReturn(Arrays.asList("skipThisOne", 
"FIELDNAME"));
-    
when(inexactMatchType.getPdxField("FIELDNAME")).thenReturn(inexactMatchField);
-    Set<PdxType> pdxTypesForClass = new 
HashSet<>(Arrays.asList(inexactMatchType));
-    when(distributedTypeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME))
-        .thenReturn(pdxTypesForClass);
-
-    Set<PdxField> foundFields =
-        this.typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, 
"fieldName");
-
-    assertThat(foundFields).containsExactly(inexactMatchField);
-  }
-
-  @Test
-  public void findFieldThatMatchesNameReturnsEmptyIfFieldExistButNoneMatch() {
-    PdxType noMatchType = mock(PdxType.class);
-    when(noMatchType.getPdxField("fieldName")).thenReturn(null);
-    when(noMatchType.getFieldNames()).thenReturn(Arrays.asList("nomatch1", 
"nomatch2"));
-    Set<PdxType> pdxTypesForClass = new HashSet<>(Arrays.asList(noMatchType));
-    when(distributedTypeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME))
-        .thenReturn(pdxTypesForClass);
-
-    Set<PdxField> foundFields =
-        this.typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, 
"fieldName");
-
-    assertThat(foundFields).isEmpty();
-  }
-
-  @Test
-  public void findFieldThatMatchesNameThrowsIfMoreThanOneMatch() {
-    PdxType inexactMatchType = mock(PdxType.class);
-    when(inexactMatchType.getPdxField("fieldName")).thenReturn(null);
-    
when(inexactMatchType.getFieldNames()).thenReturn(Arrays.asList("fieldname", 
"FIELDNAME"));
-    Set<PdxType> pdxTypesForClass = new 
HashSet<>(Arrays.asList(inexactMatchType));
-    when(distributedTypeRegistry.getPdxTypesForClassName(PDX_CLASS_NAME))
-        .thenReturn(pdxTypesForClass);
-
-    Throwable throwable = catchThrowable(
-        () -> this.typeRegistry.findFieldThatMatchesName(PDX_CLASS_NAME, 
"fieldName"));
-
-    assertThat(throwable).isInstanceOf(IllegalStateException.class)
-        .hasMessage("the pdx fields fieldname, FIELDNAME all match fieldName");
-  }
-}

Reply via email to