This is an automated email from the ASF dual-hosted git repository.

onichols pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/geode.git


The following commit(s) were added to refs/heads/develop by this push:
     new 723429f  GEODE-8626: Omitting field-mapping tag of cache.xml when 
using Simple JDBC Connector (#5637)
723429f is described below

commit 723429f291f48f309acc3407f1405339ddbcfc20
Author: Masaki Yamakawa <[email protected]>
AuthorDate: Sat Sep 11 09:02:07 2021 +0900

    GEODE-8626: Omitting field-mapping tag of cache.xml when using Simple JDBC 
Connector (#5637)
    
    * Omitting field-mapping tag of cache.xml when using Simple JDBC Connector
    * Use the default mapping when using the Simple JDBC Connector in cache.xml 
and without the field-mapping tag
    * Move some methods of CreateMappingPreconditionCheckFunction class to 
JdbcConnectorServiceImpl class
---
 .../jdbc/CacheXmlJdbcMappingIntegrationTest.java   | 268 +++++++++++++++++
 .../connectors/jdbc/NonSerializedEmployee.java     |  44 +++
 ...cMappingIntegrationTest.FieldMappings.cache.xml |  47 +++
 ...eldMappingsColumnNamesWithUnderscores.cache.xml |  47 +++
 ...appingIntegrationTest.NoFieldMappings.cache.xml |  44 +++
 ...NoFieldMappingsWithNonSerializedClass.cache.xml |  44 +++
 ...bcMappingIntegrationTest.WrongPdxName.cache.xml |  44 +++
 .../jdbc/internal/JdbcConnectorService.java        |   6 +
 .../jdbc/internal/JdbcConnectorServiceImpl.java    |  62 ++++
 .../CreateMappingPreconditionCheckFunction.java    |  58 +---
 .../internal/xml/RegionMappingConfiguration.java   | 129 ++++++++
 .../jdbc/internal/JdbcConnectorServiceTest.java    |  98 +++++++
 ...CreateMappingPreconditionCheckFunctionTest.java |   6 +
 .../xml/RegionMappingConfigurationTest.java        | 325 +++++++++++++++++++++
 14 files changed, 1168 insertions(+), 54 deletions(-)

diff --git 
a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.java
 
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.java
new file mode 100644
index 0000000..13ef725
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.java
@@ -0,0 +1,268 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+package org.apache.geode.connectors.jdbc;
+
+import static 
org.apache.geode.test.util.ResourceUtils.createTempFileFromResource;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.catchThrowable;
+
+import java.net.URL;
+import java.sql.Connection;
+import java.sql.JDBCType;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.contrib.java.lang.system.RestoreSystemProperties;
+
+import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.connectors.jdbc.internal.JdbcConnectorService;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
+import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
+import 
org.apache.geode.connectors.jdbc.test.junit.rules.DatabaseConnectionRule;
+import org.apache.geode.connectors.jdbc.test.junit.rules.MySqlConnectionRule;
+import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.internal.jndi.JNDIInvoker;
+import org.apache.geode.pdx.FieldType;
+import org.apache.geode.pdx.internal.AutoSerializableManager;
+
+public class CacheXmlJdbcMappingIntegrationTest {
+
+  private static final URL COMPOSE_RESOURCE_PATH =
+      CacheXmlJdbcMappingIntegrationTest.class.getResource("/mysql.yml");
+
+  private static final String DATA_SOURCE_NAME = "TestDataSource";
+  private static final String DB_NAME = "test";
+  private static final String REGION_TABLE_NAME = "employees";
+  private static final String REGION_NAME = "Region1";
+
+  @Rule
+  public RestoreSystemProperties restoreSystemProperties = new 
RestoreSystemProperties();
+
+  private Connection connection;
+  private Statement statement;
+  private InternalCache cache;
+
+  @ClassRule
+  public static DatabaseConnectionRule dbRule = new 
MySqlConnectionRule.Builder()
+      .file(COMPOSE_RESOURCE_PATH.getPath()).build();
+
+  @Before
+  public void setUp() throws Exception {
+    System.setProperty(AutoSerializableManager.NO_HARDCODED_EXCLUDES_PARAM, 
"true");
+    connection = dbRule.getConnection();
+    statement = connection.createStatement();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    JNDIInvoker.unMapDatasource(DATA_SOURCE_NAME);
+
+    if (cache != null) {
+      cache.close();
+    }
+
+    if (statement == null) {
+      statement = connection.createStatement();
+    }
+    statement.execute("Drop table IF EXISTS " + REGION_TABLE_NAME);
+    statement.close();
+
+    if (connection != null) {
+      connection.close();
+    }
+  }
+
+  private InternalCache createCacheAndCreateJdbcMapping(String 
cacheXmlTestName) {
+    String url = dbRule.getConnectionUrl().replaceAll("&", "&amp;");
+    System.setProperty("TestDataSourceUrl", url);
+    InternalCache cache =
+        (InternalCache) new CacheFactory().set("locators", 
"").set("mcast-port", "0")
+            .set("cache-xml-file", getXmlFileForTest(cacheXmlTestName))
+            .create();
+    return cache;
+  }
+
+  private InternalCache createCacheAndCreateJdbcMappingWithWrongDataSource(
+      String cacheXmlTestName) {
+    System.setProperty("TestDataSourceUrl", "jdbc:mysql://localhost/test");
+    InternalCache cache =
+        (InternalCache) new CacheFactory().set("locators", 
"").set("mcast-port", "0")
+            .set("cache-xml-file", getXmlFileForTest(cacheXmlTestName))
+            .create();
+    return cache;
+  }
+
+  private String getXmlFileForTest(String testName) {
+    return createTempFileFromResource(getClass(),
+        getClassSimpleName() + "." + testName + 
".cache.xml").getAbsolutePath();
+  }
+
+  private String getClassSimpleName() {
+    return getClass().getSimpleName();
+  }
+
+  private void createEmployeeTable() throws Exception {
+    statement.execute("Create Table " + REGION_TABLE_NAME
+        + " (id varchar(10) primary key not null, name varchar(10), age int)");
+  }
+
+  private void createEmployeeTableWithColumnNamesWithUnderscores() throws 
Exception {
+    statement.execute("Create Table " + REGION_TABLE_NAME
+        + " (id varchar(10) primary key not null, _name varchar(10), _age 
int)");
+  }
+
+  private List<FieldMapping> getEmployeeTableFieldMappings() {
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("id", FieldType.STRING.name(), "id", 
JDBCType.VARCHAR.name(), false),
+        new FieldMapping("name", FieldType.STRING.name(), "name", 
JDBCType.VARCHAR.name(), true),
+        new FieldMapping("age", FieldType.INT.name(), "age", 
JDBCType.INTEGER.name(), true));
+    return fieldMappings;
+  }
+
+  private List<FieldMapping> 
getEmployeeTableColumnNameWithUnderscoresFieldMappings() {
+    List<FieldMapping> fieldMappings = Arrays.asList(
+        new FieldMapping("id", FieldType.STRING.name(), "id", 
JDBCType.VARCHAR.name(), false),
+        new FieldMapping("name", FieldType.STRING.name(), "_name", 
JDBCType.VARCHAR.name(), true),
+        new FieldMapping("age", FieldType.INT.name(), "_age", 
JDBCType.INTEGER.name(), true));
+    return fieldMappings;
+  }
+
+  @Test
+  public void mappingSuccessWhenFieldMappingsAreExists() throws Exception {
+    createEmployeeTable();
+
+    cache = createCacheAndCreateJdbcMapping("FieldMappings");
+    JdbcConnectorService service = 
cache.getService(JdbcConnectorService.class);
+
+    RegionMapping mapping = service.getMappingForRegion(REGION_NAME);
+    assertThat(mapping.getDataSourceName()).isEqualTo(DATA_SOURCE_NAME);
+    assertThat(mapping.getTableName()).isEqualTo(REGION_TABLE_NAME);
+    assertThat(mapping.getRegionName()).isEqualTo(REGION_NAME);
+    assertThat(mapping.getPdxName()).isEqualTo(Employee.class.getName());
+    assertThat(mapping.getIds()).isEqualTo("id");
+    assertThat(mapping.getFieldMappings().size()).isEqualTo(3);
+    
assertThat(mapping.getFieldMappings()).containsAll(getEmployeeTableFieldMappings());
+  }
+
+  @Test
+  public void mappingSuccessWhenFieldMappingsAreOmitted() throws Exception {
+    createEmployeeTable();
+
+    cache = createCacheAndCreateJdbcMapping("NoFieldMappings");
+    JdbcConnectorService service = 
cache.getService(JdbcConnectorService.class);
+
+    RegionMapping mapping = service.getMappingForRegion(REGION_NAME);
+    assertThat(mapping.getDataSourceName()).isEqualTo(DATA_SOURCE_NAME);
+    assertThat(mapping.getTableName()).isEqualTo(REGION_TABLE_NAME);
+    assertThat(mapping.getRegionName()).isEqualTo(REGION_NAME);
+    assertThat(mapping.getPdxName()).isEqualTo(Employee.class.getName());
+    assertThat(mapping.getIds()).isEqualTo("id");
+    assertThat(mapping.getFieldMappings().size()).isEqualTo(3);
+    
assertThat(mapping.getFieldMappings()).containsAll(getEmployeeTableFieldMappings());
+  }
+
+  @Test
+  public void 
mappingSuccessWhenFieldMappingsAreOmittedWithNonSerializedClass() throws 
Exception {
+    createEmployeeTable();
+
+    cache = 
createCacheAndCreateJdbcMapping("NoFieldMappingsWithNonSerializedClass");
+    JdbcConnectorService service = 
cache.getService(JdbcConnectorService.class);
+
+    RegionMapping mapping = service.getMappingForRegion(REGION_NAME);
+    assertThat(mapping.getDataSourceName()).isEqualTo(DATA_SOURCE_NAME);
+    assertThat(mapping.getTableName()).isEqualTo(REGION_TABLE_NAME);
+    assertThat(mapping.getRegionName()).isEqualTo(REGION_NAME);
+    
assertThat(mapping.getPdxName()).isEqualTo(NonSerializedEmployee.class.getName());
+    assertThat(mapping.getIds()).isEqualTo("id");
+    assertThat(mapping.getFieldMappings().size()).isEqualTo(3);
+    
assertThat(mapping.getFieldMappings()).containsAll(getEmployeeTableFieldMappings());
+  }
+
+  @Test
+  public void mappingFailureWhenConnectWrongDataSource() {
+    Throwable throwable =
+        catchThrowable(() -> 
createCacheAndCreateJdbcMappingWithWrongDataSource("NoFieldMappings"));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage(String.format("No datasource \"%s\" found when creating 
default field mapping",
+            DATA_SOURCE_NAME));
+  }
+
+  @Test
+  public void mappingFailureWhenTableNotExists() {
+    Throwable throwable = catchThrowable(() -> 
createCacheAndCreateJdbcMapping("NoFieldMappings"));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage(String.format("No table was found that matches \"%s\"", 
REGION_TABLE_NAME));
+  }
+
+  @Test
+  public void mappingFailureWhenPdxNotExists() throws Exception {
+    createEmployeeTable();
+
+    Throwable throwable =
+        catchThrowable(() -> createCacheAndCreateJdbcMapping("WrongPdxName"));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessageContaining(
+            "The pdx class \"org.apache.geode.connectors.jdbc.NoPdx\" could 
not be loaded because: java.lang.ClassNotFoundException: 
org.apache.geode.connectors.jdbc.NoPdx");
+  }
+
+  @Test
+  public void mappingFailureWhenPdxFieldAndTableMetaDataUnMatch() throws 
Exception {
+    createEmployeeTableWithColumnNamesWithUnderscores();
+
+    Throwable throwable = catchThrowable(() -> 
createCacheAndCreateJdbcMapping("NoFieldMappings"));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessage("No PDX field name matched the column name \"_name\"");
+  }
+
+  @Test
+  public void mappingFailureWhenFieldMappingAndTableMetaDataUnMatch() throws 
Exception {
+    createEmployeeTableWithColumnNamesWithUnderscores();
+
+    Throwable throwable = catchThrowable(() -> 
createCacheAndCreateJdbcMapping("FieldMappings"));
+
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class)
+        .hasMessageContaining(
+            String.format("Jdbc mapping for \"%s\" does not match table 
definition", REGION_NAME));
+  }
+
+  @Test
+  public void 
mappingSuccessWhenPdxFieldAndTableMetaDataUnMatchButFieldMappingMatch()
+      throws Exception {
+    createEmployeeTableWithColumnNamesWithUnderscores();
+
+    cache = 
createCacheAndCreateJdbcMapping("FieldMappingsColumnNamesWithUnderscores");
+    JdbcConnectorService service = 
cache.getService(JdbcConnectorService.class);
+
+    RegionMapping mapping = service.getMappingForRegion(REGION_NAME);
+    assertThat(mapping.getDataSourceName()).isEqualTo(DATA_SOURCE_NAME);
+    assertThat(mapping.getTableName()).isEqualTo(REGION_TABLE_NAME);
+    assertThat(mapping.getRegionName()).isEqualTo(REGION_NAME);
+    assertThat(mapping.getPdxName()).isEqualTo(Employee.class.getName());
+    assertThat(mapping.getIds()).isEqualTo("id");
+    assertThat(mapping.getFieldMappings().size()).isEqualTo(3);
+    assertThat(mapping.getFieldMappings())
+        .containsAll(getEmployeeTableColumnNameWithUnderscoresFieldMappings());
+  }
+}
diff --git 
a/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/NonSerializedEmployee.java
 
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/NonSerializedEmployee.java
new file mode 100644
index 0000000..5f3fdbc
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/java/org/apache/geode/connectors/jdbc/NonSerializedEmployee.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+package org.apache.geode.connectors.jdbc;
+
+@SuppressWarnings("unused")
+public class NonSerializedEmployee {
+  private String id;
+  private String name;
+  private int age;
+
+  public NonSerializedEmployee() {
+    // nothing
+  }
+
+  NonSerializedEmployee(String id, String name, int age) {
+    this.id = id;
+    this.name = name;
+    this.age = age;
+  }
+
+  String getId() {
+    return id;
+  }
+
+  String getName() {
+    return name;
+  }
+
+  int getAge() {
+    return age;
+  }
+}
diff --git 
a/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.FieldMappings.cache.xml
 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.FieldMappings.cache.xml
new file mode 100644
index 0000000..373d48d
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.FieldMappings.cache.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<cache
+        xmlns="http://geode.apache.org/schema/cache";
+        xmlns:jdbc="http://geode.apache.org/schema/jdbc";
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+        xsi:schemaLocation="http://geode.apache.org/schema/cache
+        http://geode.apache.org/schema/cache/cache-1.0.xsd
+        http://geode.apache.org/schema/jdbc
+        http://geode.apache.org/schema/jdbc/jdbc-1.0.xsd";
+        version="1.0">
+
+    <region name="Region1" refid="REPLICATE">
+        <jdbc:mapping
+                data-source="TestDataSource"
+                table="employees"
+                pdx-name="org.apache.geode.connectors.jdbc.Employee"
+                ids="id">
+            <jdbc:field-mapping pdx-name="id" pdx-type="STRING" jdbc-name="id" 
jdbc-type="VARCHAR" jdbc-nullable="false"/>
+            <jdbc:field-mapping pdx-name="name" pdx-type="STRING" 
jdbc-name="name" jdbc-type="VARCHAR" jdbc-nullable="true"/>
+            <jdbc:field-mapping pdx-name="age" pdx-type="INT" jdbc-name="age" 
jdbc-type="INTEGER" jdbc-nullable="true"/>
+        </jdbc:mapping>
+    </region>
+
+    <jndi-bindings>
+        <jndi-binding type="SimpleDataSource"
+                      jndi-name="TestDataSource"
+                      jdbc-driver-class="com.mysql.jdbc.Driver"
+                      connection-url="${TestDataSourceUrl}">
+        </jndi-binding>
+    </jndi-bindings>
+</cache>
diff --git 
a/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.FieldMappingsColumnNamesWithUnderscores.cache.xml
 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.FieldMappingsColumnNamesWithUnderscores.cache.xml
new file mode 100644
index 0000000..1df7cce
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.FieldMappingsColumnNamesWithUnderscores.cache.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<cache
+        xmlns="http://geode.apache.org/schema/cache";
+        xmlns:jdbc="http://geode.apache.org/schema/jdbc";
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+        xsi:schemaLocation="http://geode.apache.org/schema/cache
+        http://geode.apache.org/schema/cache/cache-1.0.xsd
+        http://geode.apache.org/schema/jdbc
+        http://geode.apache.org/schema/jdbc/jdbc-1.0.xsd";
+        version="1.0">
+
+    <region name="Region1" refid="REPLICATE">
+        <jdbc:mapping
+                data-source="TestDataSource"
+                table="employees"
+                pdx-name="org.apache.geode.connectors.jdbc.Employee"
+                ids="id">
+            <jdbc:field-mapping pdx-name="id" pdx-type="STRING" jdbc-name="id" 
jdbc-type="VARCHAR" jdbc-nullable="false"/>
+            <jdbc:field-mapping pdx-name="name" pdx-type="STRING" 
jdbc-name="_name" jdbc-type="VARCHAR" jdbc-nullable="true"/>
+            <jdbc:field-mapping pdx-name="age" pdx-type="INT" jdbc-name="_age" 
jdbc-type="INTEGER" jdbc-nullable="true"/>
+        </jdbc:mapping>
+    </region>
+
+    <jndi-bindings>
+        <jndi-binding type="SimpleDataSource"
+                      jndi-name="TestDataSource"
+                      jdbc-driver-class="com.mysql.jdbc.Driver"
+                      connection-url="${TestDataSourceUrl}">
+        </jndi-binding>
+    </jndi-bindings>
+</cache>
diff --git 
a/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.NoFieldMappings.cache.xml
 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.NoFieldMappings.cache.xml
new file mode 100644
index 0000000..d7ac197
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.NoFieldMappings.cache.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<cache
+        xmlns="http://geode.apache.org/schema/cache";
+        xmlns:jdbc="http://geode.apache.org/schema/jdbc";
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+        xsi:schemaLocation="http://geode.apache.org/schema/cache
+        http://geode.apache.org/schema/cache/cache-1.0.xsd
+        http://geode.apache.org/schema/jdbc
+        http://geode.apache.org/schema/jdbc/jdbc-1.0.xsd";
+        version="1.0">
+
+    <region name="Region1" refid="REPLICATE">
+        <jdbc:mapping
+                data-source="TestDataSource"
+                table="employees"
+                pdx-name="org.apache.geode.connectors.jdbc.Employee"
+                ids="id"
+        />
+    </region>
+
+    <jndi-bindings>
+        <jndi-binding type="SimpleDataSource"
+                      jndi-name="TestDataSource"
+                      jdbc-driver-class="com.mysql.jdbc.Driver"
+                      connection-url="${TestDataSourceUrl}">
+        </jndi-binding>
+    </jndi-bindings>
+</cache>
diff --git 
a/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.NoFieldMappingsWithNonSerializedClass.cache.xml
 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.NoFieldMappingsWithNonSerializedClass.cache.xml
new file mode 100644
index 0000000..2a41f0a
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.NoFieldMappingsWithNonSerializedClass.cache.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<cache
+        xmlns="http://geode.apache.org/schema/cache";
+        xmlns:jdbc="http://geode.apache.org/schema/jdbc";
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+        xsi:schemaLocation="http://geode.apache.org/schema/cache
+        http://geode.apache.org/schema/cache/cache-1.0.xsd
+        http://geode.apache.org/schema/jdbc
+        http://geode.apache.org/schema/jdbc/jdbc-1.0.xsd";
+        version="1.0">
+
+    <region name="Region1" refid="REPLICATE">
+        <jdbc:mapping
+                data-source="TestDataSource"
+                table="employees"
+                
pdx-name="org.apache.geode.connectors.jdbc.NonSerializedEmployee"
+                ids="id"
+        />
+    </region>
+
+    <jndi-bindings>
+        <jndi-binding type="SimpleDataSource"
+                      jndi-name="TestDataSource"
+                      jdbc-driver-class="com.mysql.jdbc.Driver"
+                      connection-url="${TestDataSourceUrl}">
+        </jndi-binding>
+    </jndi-bindings>
+</cache>
diff --git 
a/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.WrongPdxName.cache.xml
 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.WrongPdxName.cache.xml
new file mode 100644
index 0000000..9e02f07
--- /dev/null
+++ 
b/geode-connectors/src/acceptanceTest/resources/org/apache/geode/connectors/jdbc/CacheXmlJdbcMappingIntegrationTest.WrongPdxName.cache.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<cache
+        xmlns="http://geode.apache.org/schema/cache";
+        xmlns:jdbc="http://geode.apache.org/schema/jdbc";
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+        xsi:schemaLocation="http://geode.apache.org/schema/cache
+        http://geode.apache.org/schema/cache/cache-1.0.xsd
+        http://geode.apache.org/schema/jdbc
+        http://geode.apache.org/schema/jdbc/jdbc-1.0.xsd";
+        version="1.0">
+
+    <region name="Region1" refid="REPLICATE">
+        <jdbc:mapping
+                data-source="TestDataSource"
+                table="employees"
+                pdx-name="org.apache.geode.connectors.jdbc.NoPdx"
+                ids="id"
+        />
+    </region>
+
+    <jndi-bindings>
+        <jndi-binding type="SimpleDataSource"
+                      jndi-name="TestDataSource"
+                      jdbc-driver-class="com.mysql.jdbc.Driver"
+                      connection-url="${TestDataSourceUrl}">
+        </jndi-binding>
+    </jndi-bindings>
+</cache>
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorService.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorService.java
index 4ae915a..c23c598 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorService.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorService.java
@@ -14,14 +14,17 @@
  */
 package org.apache.geode.connectors.jdbc.internal;
 
+import java.util.List;
 import java.util.Set;
 
 import javax.sql.DataSource;
 
 import org.apache.geode.annotations.Experimental;
 import org.apache.geode.cache.Cache;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.CacheService;
+import org.apache.geode.pdx.internal.PdxType;
 
 @Experimental
 public interface JdbcConnectorService extends CacheService {
@@ -44,4 +47,7 @@ public interface JdbcConnectorService extends CacheService {
   void validateMapping(RegionMapping regionMapping, DataSource dataSource);
 
   void validateMapping(RegionMapping regionMapping);
+
+  List<FieldMapping> createFieldMappingUsingPdx(PdxType pdxType,
+      TableMetaDataView tableMetaDataView);
 }
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceImpl.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceImpl.java
index cd28c72..aa55956 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceImpl.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceImpl.java
@@ -15,8 +15,11 @@
 package org.apache.geode.connectors.jdbc.internal;
 
 import java.sql.Connection;
+import java.sql.JDBCType;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -36,6 +39,8 @@ import org.apache.geode.internal.cache.CacheService;
 import org.apache.geode.internal.jndi.JNDIInvoker;
 import org.apache.geode.logging.internal.log4j.api.LogService;
 import org.apache.geode.management.internal.beans.CacheServiceMBeanBase;
+import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
 
 @Experimental
 public class JdbcConnectorServiceImpl implements JdbcConnectorService {
@@ -210,4 +215,61 @@ public class JdbcConnectorServiceImpl implements 
JdbcConnectorService {
               + regionMapping.getDataSourceName() + "\": ", ex);
     }
   }
+
+  @Override
+  public List<FieldMapping> createFieldMappingUsingPdx(PdxType pdxType,
+      TableMetaDataView tableMetaDataView) {
+
+    // TODO the table name returned in tableMetaData may be different than
+    // the table name specified on the command line at this point.
+    // Do we want to update the region mapping to hold the "real" table name
+    List<FieldMapping> fieldMappings = new ArrayList<>();
+    Set<String> columnNames = tableMetaDataView.getColumnNames();
+    if (columnNames.size() != pdxType.getFieldCount()) {
+      throw new JdbcConnectorException(
+          "The table and pdx class must have the same number of 
columns/fields. But the table has "
+              + columnNames.size()
+              + " columns and the pdx class has " + pdxType.getFieldCount() + 
" fields.");
+    }
+    List<PdxField> pdxFields = pdxType.getFields();
+    for (String jdbcName : columnNames) {
+      boolean isNullable = tableMetaDataView.isColumnNullable(jdbcName);
+      JDBCType jdbcType = tableMetaDataView.getColumnDataType(jdbcName);
+      FieldMapping fieldMapping =
+          createFieldMapping(jdbcName, jdbcType.getName(), isNullable, 
pdxFields);
+      fieldMappings.add(fieldMapping);
+    }
+    return fieldMappings;
+  }
+
+  private FieldMapping createFieldMapping(String jdbcName, String jdbcType, 
boolean jdbcNullable,
+      List<PdxField> pdxFields) {
+    String pdxName = null;
+    String pdxType = null;
+    for (PdxField pdxField : pdxFields) {
+      if (pdxField.getFieldName().equals(jdbcName)) {
+        pdxName = pdxField.getFieldName();
+        pdxType = pdxField.getFieldType().name();
+        break;
+      }
+    }
+    if (pdxName == null) {
+      // look for one inexact match
+      for (PdxField pdxField : pdxFields) {
+        if (pdxField.getFieldName().equalsIgnoreCase(jdbcName)) {
+          if (pdxName != null) {
+            throw new JdbcConnectorException(
+                "More than one PDX field name matched the column name \"" + 
jdbcName + "\"");
+          }
+          pdxName = pdxField.getFieldName();
+          pdxType = pdxField.getFieldType().name();
+        }
+      }
+    }
+    if (pdxName == null) {
+      throw new JdbcConnectorException(
+          "No PDX field name matched the column name \"" + jdbcName + "\"");
+    }
+    return new FieldMapping(pdxName, pdxType, jdbcName, jdbcType, 
jdbcNullable);
+  }
 }
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
index e456bf8..c7228f7 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunction.java
@@ -28,11 +28,8 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.sql.Connection;
-import java.sql.JDBCType;
 import java.sql.SQLException;
-import java.util.ArrayList;
 import java.util.List;
-import java.util.Set;
 
 import javax.sql.DataSource;
 
@@ -46,6 +43,7 @@ import org.apache.geode.SerializationException;
 import org.apache.geode.annotations.Experimental;
 import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.connectors.jdbc.JdbcConnectorException;
+import org.apache.geode.connectors.jdbc.internal.JdbcConnectorService;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
@@ -57,7 +55,6 @@ import org.apache.geode.management.cli.CliFunction;
 import org.apache.geode.management.internal.functions.CliFunctionResult;
 import org.apache.geode.pdx.PdxWriter;
 import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
-import org.apache.geode.pdx.internal.PdxField;
 import org.apache.geode.pdx.internal.PdxOutputStream;
 import org.apache.geode.pdx.internal.PdxType;
 import org.apache.geode.pdx.internal.PdxWriterImpl;
@@ -86,27 +83,11 @@ public class CreateMappingPreconditionCheckFunction extends 
CliFunction<Object[]
     try (Connection connection = dataSource.getConnection()) {
       TableMetaDataView tableMetaData =
           getTableMetaDataManager().getTableMetaDataView(connection, 
regionMapping);
-      // TODO the table name returned in tableMetaData may be different than
-      // the table name specified on the command line at this point.
-      // Do we want to update the region mapping to hold the "real" table name
+      JdbcConnectorService service = 
cache.getService(JdbcConnectorService.class);
+      List<FieldMapping> fieldMappings = 
service.createFieldMappingUsingPdx(pdxType, tableMetaData);
+
       Object[] output = new Object[2];
-      ArrayList<FieldMapping> fieldMappings = new ArrayList<>();
       output[1] = fieldMappings;
-      Set<String> columnNames = tableMetaData.getColumnNames();
-      if (columnNames.size() != pdxType.getFieldCount()) {
-        throw new JdbcConnectorException(
-            "The table and pdx class must have the same number of 
columns/fields. But the table has "
-                + columnNames.size()
-                + " columns and the pdx class has " + pdxType.getFieldCount() 
+ " fields.");
-      }
-      List<PdxField> pdxFields = pdxType.getFields();
-      for (String jdbcName : columnNames) {
-        boolean isNullable = tableMetaData.isColumnNullable(jdbcName);
-        JDBCType jdbcType = tableMetaData.getColumnDataType(jdbcName);
-        FieldMapping fieldMapping =
-            createFieldMapping(jdbcName, jdbcType.getName(), isNullable, 
pdxFields);
-        fieldMappings.add(fieldMapping);
-      }
       if (regionMapping.getIds() == null || regionMapping.getIds().isEmpty()) {
         List<String> keyColumnNames = tableMetaData.getKeyColumnNames();
         output[0] = String.join(",", keyColumnNames);
@@ -118,37 +99,6 @@ public class CreateMappingPreconditionCheckFunction extends 
CliFunction<Object[]
     }
   }
 
-  private FieldMapping createFieldMapping(String jdbcName, String jdbcType, 
boolean jdbcNullable,
-      List<PdxField> pdxFields) {
-    String pdxName = null;
-    String pdxType = null;
-    for (PdxField pdxField : pdxFields) {
-      if (pdxField.getFieldName().equals(jdbcName)) {
-        pdxName = pdxField.getFieldName();
-        pdxType = pdxField.getFieldType().name();
-        break;
-      }
-    }
-    if (pdxName == null) {
-      // look for one inexact match
-      for (PdxField pdxField : pdxFields) {
-        if (pdxField.getFieldName().equalsIgnoreCase(jdbcName)) {
-          if (pdxName != null) {
-            throw new JdbcConnectorException(
-                "More than one PDX field name matched the column name \"" + 
jdbcName + "\"");
-          }
-          pdxName = pdxField.getFieldName();
-          pdxType = pdxField.getFieldType().name();
-        }
-      }
-    }
-    if (pdxName == null) {
-      throw new JdbcConnectorException(
-          "No PDX field name matched the column name \"" + jdbcName + "\"");
-    }
-    return new FieldMapping(pdxName, pdxType, jdbcName, jdbcType, 
jdbcNullable);
-  }
-
   private PdxType getPdxTypeForClass(InternalCache cache, TypeRegistry 
typeRegistry,
       String className, String remoteInputStreamName, RemoteInputStream 
remoteInputStream) {
     Class<?> clazz = loadPdxClass(className, remoteInputStreamName, 
remoteInputStream);
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfiguration.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfiguration.java
index 475a55d..8398397 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfiguration.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfiguration.java
@@ -14,17 +14,38 @@
  */
 package org.apache.geode.connectors.jdbc.internal.xml;
 
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.List;
+
+import javax.sql.DataSource;
+
 import org.apache.geode.InternalGemFireException;
+import org.apache.geode.SerializationException;
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.Region;
+import org.apache.geode.connectors.jdbc.JdbcConnectorException;
 import org.apache.geode.connectors.jdbc.internal.JdbcConnectorService;
 import org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
+import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
+import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.extension.Extensible;
 import org.apache.geode.internal.cache.extension.Extension;
 import org.apache.geode.internal.cache.extension.ExtensionPoint;
 import org.apache.geode.internal.cache.xmlcache.XmlGenerator;
+import org.apache.geode.internal.classloader.ClassPathLoader;
+import org.apache.geode.internal.jndi.JNDIInvoker;
+import org.apache.geode.pdx.PdxWriter;
+import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
+import org.apache.geode.pdx.internal.PdxOutputStream;
+import org.apache.geode.pdx.internal.PdxType;
+import org.apache.geode.pdx.internal.PdxWriterImpl;
+import org.apache.geode.pdx.internal.TypeRegistry;
 
 public class RegionMappingConfiguration implements Extension<Region<?, ?>> {
 
@@ -50,6 +71,15 @@ public class RegionMappingConfiguration implements 
Extension<Region<?, ?>> {
     final Region<?, ?> region = extensionPoint.getTarget();
     InternalCache internalCache = (InternalCache) region.getRegionService();
     JdbcConnectorService service = 
internalCache.getService(JdbcConnectorService.class);
+    if (mapping.getFieldMappings().isEmpty()) {
+      Class<?> pdxClazz = loadPdxClass(mapping.getPdxName());
+      PdxType pdxType = getPdxTypeForClass(internalCache, pdxClazz);
+
+      List<FieldMapping> fieldMappings = createDefaultFieldMapping(service, 
pdxType);
+      fieldMappings.forEach(fieldMapping -> {
+        mapping.addFieldMapping(fieldMapping);
+      });
+    }
     service.validateMapping(mapping);
     createRegionMapping(service, mapping);
   }
@@ -62,4 +92,103 @@ public class RegionMappingConfiguration implements 
Extension<Region<?, ?>> {
       throw new InternalGemFireException(e);
     }
   }
+
+  List<FieldMapping> createDefaultFieldMapping(JdbcConnectorService service,
+      PdxType pdxType) {
+    DataSource dataSource = getDataSource(mapping.getDataSourceName());
+    if (dataSource == null) {
+      throw new JdbcConnectorException("No datasource \"" + 
mapping.getDataSourceName()
+          + "\" found when creating default field mapping");
+    }
+    TableMetaDataManager manager = getTableMetaDataManager();
+    try (Connection connection = dataSource.getConnection()) {
+      TableMetaDataView tableMetaData = 
manager.getTableMetaDataView(connection, mapping);
+      return service.createFieldMappingUsingPdx(pdxType, tableMetaData);
+    } catch (SQLException e) {
+      throw JdbcConnectorException.createException(e);
+    }
+  }
+
+  protected PdxType getPdxTypeForClass(Cache cache, Class<?> clazz) {
+    InternalCache internalCache = (InternalCache) cache;
+    TypeRegistry typeRegistry = internalCache.getPdxRegistry();
+
+    PdxType result = typeRegistry.getExistingTypeForClass(clazz);
+    if (result != null) {
+      return result;
+    }
+    return generatePdxTypeForClass(internalCache, typeRegistry, clazz);
+  }
+
+  /**
+   * Generates and returns a PdxType for the given class.
+   * The generated PdxType is also stored in the TypeRegistry.
+   *
+   * @param cache used to generate pdx type
+   * @param clazz the class to generate a PdxType for
+   * @return the generated PdxType
+   * @throws JdbcConnectorException if a PdxType can not be generated
+   */
+  PdxType generatePdxTypeForClass(InternalCache cache, TypeRegistry 
typeRegistry,
+      Class<?> clazz) {
+    Object object = createInstance(clazz);
+    try {
+      cache.registerPdxMetaData(object);
+    } catch (SerializationException ex) {
+      String className = clazz.getName();
+      ReflectionBasedAutoSerializer serializer =
+          getReflectionBasedAutoSerializer("\\Q" + className + "\\E");
+      PdxWriter writer = createPdxWriter(typeRegistry, object);
+      boolean result = serializer.toData(object, writer);
+      if (!result) {
+        throw new JdbcConnectorException(
+            "Could not generate a PdxType using the 
ReflectionBasedAutoSerializer for the class  "
+                + clazz.getName() + " after failing to register pdx metadata 
due to "
+                + ex.getMessage() + ". Check the server log for details.");
+      }
+    }
+    // serialization will leave the type in the registry
+    return typeRegistry.getExistingTypeForClass(clazz);
+  }
+
+  private Object createInstance(Class<?> clazz) {
+    try {
+      Constructor<?> ctor = clazz.getConstructor();
+      return ctor.newInstance(new Object[] {});
+    } catch (NoSuchMethodException | SecurityException | InstantiationException
+        | IllegalAccessException | IllegalArgumentException | 
InvocationTargetException ex) {
+      throw new JdbcConnectorException(
+          "Could not generate a PdxType for the class " + clazz.getName()
+              + " because it did not have a public zero arg constructor. 
Details: " + ex);
+    }
+  }
+
+  private Class<?> loadPdxClass(String className) {
+    try {
+      return ClassPathLoader.getLatest().forName(className);
+    } catch (ClassNotFoundException ex) {
+      throw new JdbcConnectorException(
+          "The pdx class \"" + className + "\" could not be loaded because: " 
+ ex);
+    }
+  }
+
+  // unit test mocks this method
+  DataSource getDataSource(String dataSourceName) {
+    return JNDIInvoker.getDataSource(dataSourceName);
+  }
+
+  // unit test mocks this method
+  ReflectionBasedAutoSerializer getReflectionBasedAutoSerializer(String 
className) {
+    return new ReflectionBasedAutoSerializer(className);
+  }
+
+  // unit test mocks this method
+  PdxWriter createPdxWriter(TypeRegistry typeRegistry, Object object) {
+    return new PdxWriterImpl(typeRegistry, object, new PdxOutputStream());
+  }
+
+  // unit test mocks this method
+  TableMetaDataManager getTableMetaDataManager() {
+    return new TableMetaDataManager();
+  }
 }
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceTest.java
index baf4b58..dae384b 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/JdbcConnectorServiceTest.java
@@ -28,6 +28,7 @@ import java.sql.Connection;
 import java.sql.JDBCType;
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -42,6 +43,9 @@ import 
org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
 import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
 import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.extension.ExtensionPoint;
+import org.apache.geode.pdx.FieldType;
+import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
 
 public class JdbcConnectorServiceTest {
 
@@ -63,6 +67,7 @@ public class JdbcConnectorServiceTest {
 
   TableMetaDataView view = mock(TableMetaDataView.class);
   TableMetaDataManager manager = mock(TableMetaDataManager.class);
+  PdxType pdxType = mock(PdxType.class);
   InternalCache cache = mock(InternalCache.class);
   DataSource dataSource = mock(DataSource.class);
   Connection connection = mock(Connection.class);
@@ -79,6 +84,16 @@ public class JdbcConnectorServiceTest {
     when(mapping.getIds()).thenReturn(KEY_COLUMN_NAME);
     when(mapping.getSpecifiedIds()).thenReturn(true);
 
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("id");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("name");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
     when(dataSource.getConnection()).thenReturn(connection);
     when(manager.getTableMetaDataView(connection, mapping)).thenReturn(view);
     when(view.getKeyColumnNames()).thenReturn(keyColumns);
@@ -216,4 +231,87 @@ public class JdbcConnectorServiceTest {
     when(mapping.getIds()).thenReturn(KEY_COLUMN_NAME + "," + 
COMPOSITE_KEY_COLUMN_NAME);
     service.validateMapping(mapping);
   }
+
+  @Test
+  public void createFieldMappingUsingPdxSucceedsWithExactMatchPdxFields() {
+    List<FieldMapping> fieldsMappings = 
service.createFieldMappingUsingPdx(pdxType, view);
+
+    assertThat(fieldsMappings).hasSize(2);
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("id", FieldType.INT.name(), "id", 
JDBCType.INTEGER.name(), false));
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("name", FieldType.STRING.name(), "name", 
JDBCType.VARCHAR.name(), true));
+  }
+
+  @Test
+  public void createFieldMappingUsingPdxSucceedsWithIgnoreCaseMatchPdxFields() 
{
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("ID");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("NAME");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    List<FieldMapping> fieldsMappings = 
service.createFieldMappingUsingPdx(pdxType, view);
+
+    assertThat(fieldsMappings).hasSize(2);
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("ID", FieldType.INT.name(), "id", 
JDBCType.INTEGER.name(), false));
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("NAME", FieldType.STRING.name(), "name", 
JDBCType.VARCHAR.name(), true));
+  }
+
+  @Test
+  public void 
createFieldMappingUsingPdxThrowsExceptionWhenGivenUnMatchPdxFieldName() {
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("id");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("nameString");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    Throwable throwable = catchThrowable(() -> 
service.createFieldMappingUsingPdx(pdxType, view));
+
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format("No PDX field name matched the column name \"%s\"",
+            VALUE_COLUMN_NAME));
+  }
+
+  @Test
+  public void 
createFieldMappingUsingPdxThrowsExceptionWhenGivenDuplicatePdxFieldName() {
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("id");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("NAME");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    PdxField field3 = mock(PdxField.class);
+    when(field3.getFieldName()).thenReturn("Name");
+    when(field3.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field2, field3, field1);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    Throwable throwable = catchThrowable(() -> 
service.createFieldMappingUsingPdx(pdxType, view));
+
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format("More than one PDX field name matched the column name 
\"%s\"",
+            VALUE_COLUMN_NAME));
+  }
+
+  @Test
+  public void 
createFieldMappingUsingPdxThrowsExceptionWhenGivenExistingPdxTypeWithWrongNumberOfFields()
 {
+    doReturn(3).when(pdxType).getFieldCount();
+    Throwable throwable = catchThrowable(() -> 
service.createFieldMappingUsingPdx(pdxType, view));
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format(
+            "The table and pdx class must have the same number of 
columns/fields. But the table has %d columns and the pdx class has %d fields.",
+            view.getColumnNames().size(), pdxType.getFieldCount()));
+  }
 }
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
index d7390e8..4888fe2 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/cli/CreateMappingPreconditionCheckFunctionTest.java
@@ -51,6 +51,8 @@ import org.apache.geode.SerializationException;
 import org.apache.geode.cache.execute.FunctionContext;
 import org.apache.geode.cache.execute.ResultSender;
 import org.apache.geode.connectors.jdbc.JdbcConnectorException;
+import org.apache.geode.connectors.jdbc.internal.JdbcConnectorService;
+import org.apache.geode.connectors.jdbc.internal.JdbcConnectorServiceImpl;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
 import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
 import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
@@ -71,6 +73,7 @@ public class CreateMappingPreconditionCheckFunctionTest {
   private static final String DATA_SOURCE_NAME = "testDataSourceName";
   private static final String MEMBER_NAME = "testMemberName";
 
+  private JdbcConnectorService service;
   private RegionMapping regionMapping;
   private FunctionContext<Object[]> context;
   private InternalCache cache;
@@ -105,6 +108,9 @@ public class CreateMappingPreconditionCheckFunctionTest {
     remoteInputStream = null;
     setupInputArgs();
 
+    service = spy(JdbcConnectorServiceImpl.class);
+    when(cache.getService(JdbcConnectorService.class)).thenReturn(service);
+
     when(regionMapping.getRegionName()).thenReturn(REGION_NAME);
     when(regionMapping.getPdxName()).thenReturn(PDX_CLASS_NAME);
     when(regionMapping.getDataSourceName()).thenReturn(DATA_SOURCE_NAME);
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfigurationTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfigurationTest.java
new file mode 100644
index 0000000..59b4546
--- /dev/null
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/xml/RegionMappingConfigurationTest.java
@@ -0,0 +1,325 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+package org.apache.geode.connectors.jdbc.internal.xml;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.catchThrowable;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.same;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.sql.Connection;
+import java.sql.JDBCType;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import javax.sql.DataSource;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.geode.SerializationException;
+import org.apache.geode.connectors.jdbc.JdbcConnectorException;
+import org.apache.geode.connectors.jdbc.internal.JdbcConnectorService;
+import org.apache.geode.connectors.jdbc.internal.JdbcConnectorServiceImpl;
+import org.apache.geode.connectors.jdbc.internal.TableMetaDataManager;
+import org.apache.geode.connectors.jdbc.internal.TableMetaDataView;
+import org.apache.geode.connectors.jdbc.internal.configuration.FieldMapping;
+import org.apache.geode.connectors.jdbc.internal.configuration.RegionMapping;
+import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.internal.cache.extension.ExtensionPoint;
+import org.apache.geode.pdx.FieldType;
+import org.apache.geode.pdx.PdxWriter;
+import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
+import org.apache.geode.pdx.internal.PdxField;
+import org.apache.geode.pdx.internal.PdxType;
+import org.apache.geode.pdx.internal.TypeRegistry;
+
+public class RegionMappingConfigurationTest {
+
+  private static final String TEST_REGION_NAME = "testRegion";
+  private static final String DATA_SOURCE_NAME = "dataSource";
+
+  private static final String KEY_COLUMN_NAME = "id";
+  private static final String VALUE_COLUMN_NAME = "name";
+
+  private final List<String> keyColumns = new ArrayList<>();
+  private final Set<String> allColumns = new HashSet<>();
+  private final List<FieldMapping> fieldMappings = new ArrayList<>();
+
+  private RegionMapping mapping;
+
+  private RegionMappingConfiguration config;
+
+  private JdbcConnectorService service;
+  private TableMetaDataView view = mock(TableMetaDataView.class);
+  private TableMetaDataManager manager = mock(TableMetaDataManager.class);
+  private InternalCache cache = mock(InternalCache.class);
+  private DataSource dataSource = mock(DataSource.class);
+  private Connection connection = mock(Connection.class);
+  private PdxType pdxType = mock(PdxType.class);
+  private TypeRegistry typeRegistry = mock(TypeRegistry.class);
+
+  public static class PdxClassDummy {
+  }
+
+  public static class PdxClassDummyNoZeroArg {
+    public PdxClassDummyNoZeroArg(@SuppressWarnings("unused") int arg) {}
+  }
+
+  @SuppressWarnings("unchecked")
+  @Before
+  public void setUp() throws Exception {
+    mapping = mock(RegionMapping.class);
+
+    service = spy(JdbcConnectorServiceImpl.class);
+    when(cache.getService(JdbcConnectorService.class)).thenReturn(service);
+
+    when(cache.getExtensionPoint()).thenReturn(mock(ExtensionPoint.class));
+    when(mapping.getRegionName()).thenReturn(TEST_REGION_NAME);
+    when(mapping.getDataSourceName()).thenReturn(DATA_SOURCE_NAME);
+    when(mapping.getFieldMappings()).thenReturn(fieldMappings);
+    when(mapping.getIds()).thenReturn(KEY_COLUMN_NAME);
+    when(mapping.getSpecifiedIds()).thenReturn(true);
+
+    when(dataSource.getConnection()).thenReturn(connection);
+    when(manager.getTableMetaDataView(connection, mapping)).thenReturn(view);
+    when(view.getKeyColumnNames()).thenReturn(keyColumns);
+    when(view.getColumnNames()).thenReturn(allColumns);
+    when(view.getColumnDataType(KEY_COLUMN_NAME)).thenReturn(JDBCType.INTEGER);
+    
when(view.getColumnDataType(VALUE_COLUMN_NAME)).thenReturn(JDBCType.VARCHAR);
+    when(view.isColumnNullable(KEY_COLUMN_NAME)).thenReturn(false);
+    when(view.isColumnNullable(VALUE_COLUMN_NAME)).thenReturn(true);
+
+    config = spy(new RegionMappingConfiguration(mapping));
+
+    keyColumns.add(KEY_COLUMN_NAME);
+    allColumns.add(KEY_COLUMN_NAME);
+    allColumns.add(VALUE_COLUMN_NAME);
+
+    fieldMappings
+        .add(new FieldMapping("id", "integer", KEY_COLUMN_NAME, 
JDBCType.INTEGER.getName(), false));
+    fieldMappings.add(
+        new FieldMapping("name", "string", VALUE_COLUMN_NAME, 
JDBCType.VARCHAR.getName(), true));
+
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("id");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("name");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    when(cache.getPdxRegistry()).thenReturn(typeRegistry);
+
+    doReturn(dataSource).when(config).getDataSource(DATA_SOURCE_NAME);
+    doReturn(manager).when(config).getTableMetaDataManager();
+  }
+
+  @Test
+  public void createDefaultFieldMappingSucceedsWithExactMatchPdxFields() {
+    List<FieldMapping> fieldsMappings = 
config.createDefaultFieldMapping(service, pdxType);
+
+    assertThat(fieldsMappings).hasSize(2);
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("id", FieldType.INT.name(), "id", 
JDBCType.INTEGER.name(), false));
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("name", FieldType.STRING.name(), "name", 
JDBCType.VARCHAR.name(), true));
+  }
+
+  @Test
+  public void createDefaultFieldMappingSucceedsWithIgnoreCaseMatchPdxFields() {
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("ID");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("NAME");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    List<FieldMapping> fieldsMappings = 
config.createDefaultFieldMapping(service, pdxType);
+
+    assertThat(fieldsMappings).hasSize(2);
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("ID", FieldType.INT.name(), "id", 
JDBCType.INTEGER.name(), false));
+    assertThat(fieldsMappings).contains(
+        new FieldMapping("NAME", FieldType.STRING.name(), "name", 
JDBCType.VARCHAR.name(), true));
+  }
+
+  @Test
+  public void 
createDefaultFieldMappingThrowsExceptionWhenGivenUnMatchPdxFieldName() {
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("id");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("nameString");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field1, field2);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    Throwable throwable = catchThrowable(() -> 
config.createDefaultFieldMapping(service, pdxType));
+
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format("No PDX field name matched the column name \"%s\"",
+            VALUE_COLUMN_NAME));
+  }
+
+  @Test
+  public void 
createDefaultFieldMappingThrowsExceptionWhenGivenDuplicatePdxFieldName() {
+    when(pdxType.getFieldCount()).thenReturn(2);
+    PdxField field1 = mock(PdxField.class);
+    when(field1.getFieldName()).thenReturn("id");
+    when(field1.getFieldType()).thenReturn(FieldType.INT);
+    PdxField field2 = mock(PdxField.class);
+    when(field2.getFieldName()).thenReturn("NAME");
+    when(field2.getFieldType()).thenReturn(FieldType.STRING);
+    PdxField field3 = mock(PdxField.class);
+    when(field3.getFieldName()).thenReturn("Name");
+    when(field3.getFieldType()).thenReturn(FieldType.STRING);
+    List<PdxField> pdxFields = Arrays.asList(field2, field3, field1);
+    when(pdxType.getFields()).thenReturn(pdxFields);
+
+    Throwable throwable = catchThrowable(() -> 
config.createDefaultFieldMapping(service, pdxType));
+
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format("More than one PDX field name matched the column name 
\"%s\"",
+            VALUE_COLUMN_NAME));
+  }
+
+  @Test
+  public void 
createDefaultFieldMappingThrowsExceptionWhenDataSourceDoesNotExist() {
+    doReturn(null).when(config).getDataSource(DATA_SOURCE_NAME);
+    Throwable throwable = catchThrowable(() -> 
config.createDefaultFieldMapping(service, pdxType));
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format("No datasource \"%s\" found when creating default field 
mapping",
+            mapping.getDataSourceName()));
+  }
+
+  @Test
+  public void 
createDefaultFieldMappingThrowsExceptionWhenGetConnectionHasSqlException()
+      throws SQLException {
+    when(dataSource.getConnection()).thenThrow(SQLException.class);
+    Throwable throwable = catchThrowable(() -> 
config.createDefaultFieldMapping(service, pdxType));
+    assertThat(throwable).isInstanceOf(JdbcConnectorException.class);
+    verify(connection, never()).close();
+  }
+
+  @Test
+  public void 
createDefaultFieldMappingThrowsExceptionWhenGivenExistingPdxTypeWithWrongNumberOfFields()
 {
+    doReturn(3).when(pdxType).getFieldCount();
+    Throwable throwable = catchThrowable(() -> 
config.createDefaultFieldMapping(service, pdxType));
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format(
+            "The table and pdx class must have the same number of 
columns/fields. But the table has %d columns and the pdx class has %d fields.",
+            view.getColumnNames().size(), pdxType.getFieldCount()));
+  }
+
+  @Test
+  public void getPdxTypeForClassSucceedsWithExistingPdxType() {
+    
when(typeRegistry.getExistingTypeForClass(PdxClassDummy.class)).thenReturn(pdxType);
+
+    PdxType result = config.getPdxTypeForClass(cache, PdxClassDummy.class);
+    verify(config, never()).generatePdxTypeForClass(cache, typeRegistry, 
PdxClassDummy.class);
+    assertThat(result).isEqualTo(pdxType);
+  }
+
+  @Test
+  public void getPdxTypeForClassSucceedsWithGeneratingPdxType() {
+    
when(typeRegistry.getExistingTypeForClass(PdxClassDummy.class)).thenReturn(null)
+        .thenReturn(pdxType);
+
+    PdxType result = config.getPdxTypeForClass(cache, PdxClassDummy.class);
+    verify(config, times(1)).generatePdxTypeForClass(cache, typeRegistry, 
PdxClassDummy.class);
+    verify(cache, times(1)).registerPdxMetaData(any());
+    assertThat(result).isEqualTo(pdxType);
+  }
+
+  @Test
+  public void 
getPdxTypeForClassSucceedsWithGivenNonPdxUsesReflectionBasedAutoSerializer() {
+    
when(typeRegistry.getExistingTypeForClass(PdxClassDummy.class)).thenReturn(null)
+        .thenReturn(pdxType);
+
+    SerializationException ex = new SerializationException("test");
+    doThrow(ex).when(cache).registerPdxMetaData(any());
+
+    ReflectionBasedAutoSerializer serializer = 
mock(ReflectionBasedAutoSerializer.class);
+    PdxWriter pdxWriter = mock(PdxWriter.class);
+    String domainClassNameInAutoSerializer = "\\Q" + 
PdxClassDummy.class.getName() + "\\E";
+    doReturn(serializer).when(config)
+        .getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
+    doReturn(pdxWriter).when(config).createPdxWriter(same(typeRegistry), 
any());
+    when(serializer.toData(any(), same(pdxWriter))).thenReturn(true);
+
+    PdxType result = config.getPdxTypeForClass(cache, PdxClassDummy.class);
+    verify(config, times(1)).generatePdxTypeForClass(cache, typeRegistry, 
PdxClassDummy.class);
+    verify(cache, times(1)).registerPdxMetaData(any());
+    verify(config, 
times(1)).getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
+    assertThat(result).isEqualTo(pdxType);
+  }
+
+  @Test
+  public void 
getPdxTypeForClassThrowsExceptionWhenGivenPdxRegistrationFailsAndReflectionBasedAutoSerializer()
 {
+    
when(typeRegistry.getExistingTypeForClass(PdxClassDummy.class)).thenReturn(null);
+
+    SerializationException ex = new SerializationException("test");
+    doThrow(ex).when(cache).registerPdxMetaData(any());
+
+    ReflectionBasedAutoSerializer serializer = 
mock(ReflectionBasedAutoSerializer.class);
+    PdxWriter pdxWriter = mock(PdxWriter.class);
+    String domainClassNameInAutoSerializer = "\\Q" + 
PdxClassDummy.class.getName() + "\\E";
+    doReturn(serializer).when(config)
+        .getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
+    doReturn(pdxWriter).when(config).createPdxWriter(same(typeRegistry), 
any());
+    when(serializer.toData(any(), same(pdxWriter))).thenReturn(false);
+
+    Throwable throwable =
+        catchThrowable(() -> config.getPdxTypeForClass(cache, 
PdxClassDummy.class));
+    verify(config, times(1)).generatePdxTypeForClass(cache, typeRegistry, 
PdxClassDummy.class);
+    verify(cache, times(1)).registerPdxMetaData(any());
+    verify(config, 
times(1)).getReflectionBasedAutoSerializer(domainClassNameInAutoSerializer);
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format(
+            "Could not generate a PdxType using the 
ReflectionBasedAutoSerializer for the class  %s after failing to register pdx 
metadata due to %s. Check the server log for details.",
+            PdxClassDummy.class.getName(), ex.getMessage()));
+  }
+
+  @Test
+  public void 
getPdxTypeForClassThrowsExceptionWhenGivenPdxSerializableWithNoZeroArgConstructor()
 {
+    Throwable throwable =
+        catchThrowable(() -> config.getPdxTypeForClass(cache, 
PdxClassDummyNoZeroArg.class));
+    verify(config, times(1)).generatePdxTypeForClass(cache, typeRegistry,
+        PdxClassDummyNoZeroArg.class);
+    
assertThat(throwable).isInstanceOf(JdbcConnectorException.class).hasMessageContaining(
+        String.format(
+            "Could not generate a PdxType for the class %s because it did not 
have a public zero arg constructor.",
+            PdxClassDummyNoZeroArg.class.getName()));
+  }
+}

Reply via email to