This is an automated email from the ASF dual-hosted git repository.

udo pushed a commit to branch feature/GEODE-4685
in repository https://gitbox.apache.org/repos/asf/geode.git

commit 9b0e3fde893bcdae5c93d4db1f0bcf555d73b747
Author: Udo <ukohlme...@pivotal.io>
AuthorDate: Mon Feb 26 16:37:10 2018 -0800

    GEODE-4685: Moving of static DefaultQuery.setPdxReadSerialized to the 
TypeRegistry.
    Cleaned up the overriding of readSerialized to reset to previous value.
    Added cache to AbstractJdbcCallback.java so that children classes can 
access it.
    Replaced AtomicLong with LongAdder.
---
 .../geode/connectors/jdbc/JdbcAsyncWriter.java     |  32 +--
 .../apache/geode/connectors/jdbc/JdbcLoader.java   |   4 +-
 .../apache/geode/connectors/jdbc/JdbcWriter.java   |  12 +-
 .../jdbc/internal/AbstractJdbcCallback.java        |   5 +-
 .../jdbc/JdbcAsyncWriterIntegrationTest.java       |   7 +-
 .../geode/connectors/jdbc/JdbcAsyncWriterTest.java |   9 +-
 .../connectors/jdbc/JdbcLoaderIntegrationTest.java |   7 +-
 .../geode/connectors/jdbc/JdbcLoaderTest.java      |   7 +-
 .../connectors/jdbc/JdbcWriterIntegrationTest.java |   7 +-
 .../geode/connectors/jdbc/JdbcWriterTest.java      |   8 +-
 .../jdbc/internal/AbstractJdbcCallbackTest.java    |  11 +-
 .../geode/cache/query/internal/DefaultQuery.java   | 243 +++++++++------------
 .../cache/query/internal/index/IndexManager.java   |  30 +--
 .../internal/streaming/StreamingOperation.java     |  92 +++++---
 .../geode/internal/cache/GemFireCacheImpl.java     |  11 +-
 .../internal/cache/partitioned/QueryMessage.java   |   6 +-
 .../apache/geode/pdx/internal/TypeRegistry.java    |  12 +-
 .../BaseLineAndCompareQueryPerfJUnitTest.java      |   3 +-
 .../geode/cache/query/PdxStringQueryJUnitTest.java | 179 +++++++--------
 .../cache/query/dunit/PdxStringQueryDUnitTest.java |  48 ++--
 .../PRQueryRemoteNodeExceptionDUnitTest.java       |   2 +-
 .../AnalyzeSerializablesJUnitTest.java             |  45 ++--
 .../geode/internal/PdxDeleteFieldJUnitTest.java    |  12 +-
 .../apache/geode/pdx/PdxClientServerDUnitTest.java |   7 +-
 .../apache/geode/pdx/PdxSerializableJUnitTest.java | 135 ++++++------
 .../java/org/apache/geode/test/fake/Fakes.java     |   3 +
 .../apache/geode/codeAnalysis/excludedClasses.txt  |   1 +
 .../codeAnalysis/sanctionedDataSerializables.txt   |   4 +-
 .../cache/query/internal/cq/CqServiceImpl.java     |   5 +-
 .../lucene/internal/IndexRepositoryFactory.java    |  13 +-
 .../cache/lucene/internal/LuceneEventListener.java |  26 ++-
 .../cache/lucene/internal/LuceneIndexImpl.java     |   2 +-
 .../internal/LuceneEventListenerJUnitTest.java     |  81 +++++--
 .../PartitionedRepositoryManagerJUnitTest.java     |   2 +
 34 files changed, 583 insertions(+), 488 deletions(-)

diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriter.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriter.java
index 9720882..e4af171 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriter.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriter.java
@@ -16,7 +16,7 @@ package org.apache.geode.connectors.jdbc;
 
 import java.sql.SQLException;
 import java.util.List;
-import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.LongAdder;
 
 import org.apache.logging.log4j.Logger;
 
@@ -24,7 +24,6 @@ import org.apache.geode.CopyHelper;
 import org.apache.geode.annotations.Experimental;
 import org.apache.geode.cache.asyncqueue.AsyncEvent;
 import org.apache.geode.cache.asyncqueue.AsyncEventListener;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.connectors.jdbc.internal.AbstractJdbcCallback;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.internal.cache.InternalCache;
@@ -40,9 +39,9 @@ import org.apache.geode.pdx.PdxInstance;
 public class JdbcAsyncWriter extends AbstractJdbcCallback implements 
AsyncEventListener {
   private static final Logger logger = LogService.getLogger();
 
-  private AtomicLong totalEvents = new AtomicLong();
-  private AtomicLong successfulEvents = new AtomicLong();
-  private AtomicLong failedEvents = new AtomicLong();
+  private LongAdder totalEvents = new LongAdder();
+  private LongAdder successfulEvents = new LongAdder();
+  private LongAdder failedEvents = new LongAdder();
 
   @SuppressWarnings("unused")
   public JdbcAsyncWriter() {
@@ -50,8 +49,8 @@ public class JdbcAsyncWriter extends AbstractJdbcCallback 
implements AsyncEventL
   }
 
   // Constructor for test purposes only
-  JdbcAsyncWriter(SqlHandler sqlHandler) {
-    super(sqlHandler);
+  JdbcAsyncWriter(SqlHandler sqlHandler, InternalCache cache) {
+    super(sqlHandler, cache);
   }
 
   @Override
@@ -62,9 +61,11 @@ public class JdbcAsyncWriter extends AbstractJdbcCallback 
implements AsyncEventL
       checkInitialized((InternalCache) 
events.get(0).getRegion().getRegionService());
     }
 
-    DefaultQuery.setPdxReadSerialized(true);
+    Boolean initialPdxReadSerialized = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     try {
       for (AsyncEvent event : events) {
+
         try {
           getSqlHandler().write(event.getRegion(), event.getOperation(), 
event.getKey(),
               getPdxInstance(event));
@@ -75,34 +76,33 @@ public class JdbcAsyncWriter extends AbstractJdbcCallback 
implements AsyncEventL
         }
       }
     } finally {
-      DefaultQuery.setPdxReadSerialized(false);
+      
cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
     }
-
     return true;
   }
 
   long getTotalEvents() {
-    return totalEvents.get();
+    return totalEvents.longValue();
   }
 
   long getSuccessfulEvents() {
-    return successfulEvents.get();
+    return successfulEvents.longValue();
   }
 
   long getFailedEvents() {
-    return failedEvents.get();
+    return failedEvents.longValue();
   }
 
   private void changeSuccessfulEvents(long delta) {
-    successfulEvents.addAndGet(delta);
+    successfulEvents.add(delta);
   }
 
   private void changeFailedEvents(long delta) {
-    failedEvents.addAndGet(delta);
+    failedEvents.add(delta);
   }
 
   private void changeTotalEvents(long delta) {
-    totalEvents.addAndGet(delta);
+    totalEvents.add(delta);
   }
 
   /**
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcLoader.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcLoader.java
index bc380dd..5196bce 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcLoader.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcLoader.java
@@ -38,8 +38,8 @@ public class JdbcLoader<K, V> extends AbstractJdbcCallback 
implements CacheLoade
   }
 
   // Constructor for test purposes only
-  JdbcLoader(SqlHandler sqlHandler) {
-    super(sqlHandler);
+  JdbcLoader(SqlHandler sqlHandler, InternalCache cache) {
+    super(sqlHandler, cache);
   }
 
   /**
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcWriter.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcWriter.java
index 72ba298..81b4709 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcWriter.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/JdbcWriter.java
@@ -23,7 +23,6 @@ import org.apache.geode.cache.CacheWriterException;
 import org.apache.geode.cache.EntryEvent;
 import org.apache.geode.cache.RegionEvent;
 import org.apache.geode.cache.SerializedCacheValue;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.connectors.jdbc.internal.AbstractJdbcCallback;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.internal.cache.InternalCache;
@@ -43,8 +42,8 @@ public class JdbcWriter<K, V> extends AbstractJdbcCallback 
implements CacheWrite
   }
 
   // Constructor for test purposes only
-  JdbcWriter(SqlHandler sqlHandler) {
-    super(sqlHandler);
+  JdbcWriter(SqlHandler sqlHandler, InternalCache cache) {
+    super(sqlHandler, cache);
   }
 
 
@@ -84,7 +83,8 @@ public class JdbcWriter<K, V> extends AbstractJdbcCallback 
implements CacheWrite
   }
 
   private PdxInstance getPdxNewValue(EntryEvent<K, V> event) {
-    DefaultQuery.setPdxReadSerialized(true);
+    Boolean initialPdxReadSerialized = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     try {
       Object newValue = event.getNewValue();
       if (!(newValue instanceof PdxInstance)) {
@@ -95,14 +95,14 @@ public class JdbcWriter<K, V> extends AbstractJdbcCallback 
implements CacheWrite
           newValue = CopyHelper.copy(newValue);
         }
         if (newValue != null && !(newValue instanceof PdxInstance)) {
-          String valueClassName = newValue == null ? "null" : 
newValue.getClass().getName();
+          String valueClassName = newValue.getClass().getName();
           throw new IllegalArgumentException(getClass().getSimpleName()
               + " only supports PDX values; newValue is " + valueClassName);
         }
       }
       return (PdxInstance) newValue;
     } finally {
-      DefaultQuery.setPdxReadSerialized(false);
+      
cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
     }
   }
 }
diff --git 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallback.java
 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallback.java
index 8f28d67..9074e68 100644
--- 
a/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallback.java
+++ 
b/geode-connectors/src/main/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallback.java
@@ -24,13 +24,15 @@ import org.apache.geode.internal.cache.InternalCache;
 public abstract class AbstractJdbcCallback implements CacheCallback {
 
   private volatile SqlHandler sqlHandler;
+  protected volatile InternalCache cache;
 
   protected AbstractJdbcCallback() {
     // nothing
   }
 
-  protected AbstractJdbcCallback(SqlHandler sqlHandler) {
+  protected AbstractJdbcCallback(SqlHandler sqlHandler, InternalCache cache) {
     this.sqlHandler = sqlHandler;
+    this.cache = cache;
   }
 
   @Override
@@ -57,6 +59,7 @@ public abstract class AbstractJdbcCallback implements 
CacheCallback {
 
   private synchronized void initialize(InternalCache cache) {
     if (sqlHandler == null) {
+      this.cache = cache;
       JdbcConnectorService service = 
cache.getService(JdbcConnectorService.class);
       DataSourceManager manager = new DataSourceManager(new 
HikariJdbcDataSourceFactory());
       sqlHandler = new SqlHandler(manager, service);
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
index e8c7e63..28188d2 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterIntegrationTest.java
@@ -39,6 +39,7 @@ import 
org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.connectors.jdbc.internal.TestConfigService;
 import org.apache.geode.connectors.jdbc.internal.TestableConnectionManager;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.test.junit.categories.IntegrationTest;
 
@@ -49,7 +50,7 @@ public class JdbcAsyncWriterIntegrationTest {
   private static final String REGION_TABLE_NAME = "employees";
   private static final String CONNECTION_URL = "jdbc:derby:memory:" + DB_NAME 
+ ";create=true";
 
-  private Cache cache;
+  private InternalCache cache;
   private Region<String, PdxInstance> employees;
   private Connection connection;
   private Statement statement;
@@ -61,7 +62,7 @@ public class JdbcAsyncWriterIntegrationTest {
 
   @Before
   public void setup() throws Exception {
-    cache = new CacheFactory().set("locators", "").set("mcast-port", "0")
+    cache = (InternalCache) new CacheFactory().set("locators", 
"").set("mcast-port", "0")
         .setPdxReadSerialized(false).create();
     employees = createRegionWithJDBCAsyncWriter(REGION_TABLE_NAME);
     connection = DriverManager.getConnection(CONNECTION_URL);
@@ -235,7 +236,7 @@ public class JdbcAsyncWriterIntegrationTest {
 
   private Region<String, PdxInstance> createRegionWithJDBCAsyncWriter(String 
regionName)
       throws ConnectionConfigExistsException, RegionMappingExistsException {
-    jdbcWriter = new JdbcAsyncWriter(createSqlHandler());
+    jdbcWriter = new JdbcAsyncWriter(createSqlHandler(), cache);
     
cache.createAsyncEventQueueFactory().setBatchSize(1).setBatchTimeInterval(1)
         .create("jdbcAsyncQueue", jdbcWriter);
 
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterTest.java
index 578b5c4..f3f445f 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcAsyncWriterTest.java
@@ -31,9 +31,12 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.asyncqueue.AsyncEvent;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.InternalRegion;
+import org.apache.geode.test.fake.Fakes;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 @Category(UnitTest.class)
@@ -43,13 +46,17 @@ public class JdbcAsyncWriterTest {
   private InternalRegion region;
 
   private JdbcAsyncWriter writer;
+  private InternalCache cache;
 
   @Before
   public void setup() {
     sqlHandler = mock(SqlHandler.class);
     region = mock(InternalRegion.class);
+    cache = Fakes.cache();
 
-    writer = new JdbcAsyncWriter(sqlHandler);
+    writer = new JdbcAsyncWriter(sqlHandler, cache);
+
+    when(region.getRegionService()).thenReturn(cache);
   }
 
   @Test
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
index f162c3d..f37087f 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderIntegrationTest.java
@@ -35,6 +35,7 @@ import 
org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.connectors.jdbc.internal.TestConfigService;
 import org.apache.geode.connectors.jdbc.internal.TestableConnectionManager;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.test.junit.categories.IntegrationTest;
 
@@ -45,13 +46,13 @@ public class JdbcLoaderIntegrationTest {
   private static final String REGION_TABLE_NAME = "employees";
   private static final String CONNECTION_URL = "jdbc:derby:memory:" + DB_NAME 
+ ";create=true";
 
-  private Cache cache;
+  private InternalCache cache;
   private Connection connection;
   private Statement statement;
 
   @Before
   public void setUp() throws Exception {
-    cache = new CacheFactory().set("locators", "").set("mcast-port", "0")
+    cache = (InternalCache) new CacheFactory().set("locators", 
"").set("mcast-port", "0")
         .setPdxReadSerialized(false).create();
     connection = DriverManager.getConnection(CONNECTION_URL);
     statement = connection.createStatement();
@@ -102,7 +103,7 @@ public class JdbcLoaderIntegrationTest {
 
   private Region<String, PdxInstance> createRegionWithJDBCLoader(String 
regionName)
       throws ConnectionConfigExistsException, RegionMappingExistsException {
-    JdbcLoader<String, PdxInstance> jdbcLoader = new 
JdbcLoader<>(createSqlHandler());
+    JdbcLoader<String, PdxInstance> jdbcLoader = new 
JdbcLoader<>(createSqlHandler(), cache);
     RegionFactory<String, PdxInstance> regionFactory = 
cache.createRegionFactory(REPLICATE);
     regionFactory.setCacheLoader(jdbcLoader);
     return regionFactory.create(regionName);
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderTest.java
index 8d4d062..76db6c4 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcLoaderTest.java
@@ -26,7 +26,9 @@ import org.junit.experimental.categories.Category;
 
 import org.apache.geode.cache.LoaderHelper;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.InternalRegion;
+import org.apache.geode.test.fake.Fakes;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 @Category(UnitTest.class)
@@ -37,14 +39,17 @@ public class JdbcLoaderTest {
 
   private JdbcLoader<Object, Object> loader;
 
+  private InternalCache cache;
+
   @Before
   public void setUp() throws Exception {
+    cache = Fakes.cache();
     sqlHandler = mock(SqlHandler.class);
     loaderHelper = mock(LoaderHelper.class);
 
     when(loaderHelper.getRegion()).thenReturn(mock(InternalRegion.class));
 
-    loader = new JdbcLoader<>(sqlHandler);
+    loader = new JdbcLoader<>(sqlHandler, cache);
   }
 
   @Test
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
index 8401c42..8945838 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterIntegrationTest.java
@@ -42,6 +42,7 @@ import 
org.apache.geode.connectors.jdbc.internal.RegionMappingExistsException;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
 import org.apache.geode.connectors.jdbc.internal.TestConfigService;
 import org.apache.geode.connectors.jdbc.internal.TestableConnectionManager;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.test.junit.categories.IntegrationTest;
 
@@ -52,7 +53,7 @@ public class JdbcWriterIntegrationTest {
   private static final String REGION_TABLE_NAME = "employees";
   private static final String CONNECTION_URL = "jdbc:derby:memory:" + DB_NAME 
+ ";create=true";
 
-  private Cache cache;
+  private InternalCache cache;
   private Region<String, PdxInstance> employees;
   private Connection connection;
   private Statement statement;
@@ -64,7 +65,7 @@ public class JdbcWriterIntegrationTest {
 
   @Before
   public void setup() throws Exception {
-    cache = new CacheFactory().set("locators", "").set("mcast-port", "0")
+    cache = (InternalCache) new CacheFactory().set("locators", 
"").set("mcast-port", "0")
         .setPdxReadSerialized(false).create();
     employees = createRegionWithJDBCSynchronousWriter(REGION_TABLE_NAME);
 
@@ -209,7 +210,7 @@ public class JdbcWriterIntegrationTest {
 
   private Region<String, PdxInstance> 
createRegionWithJDBCSynchronousWriter(String regionName)
       throws ConnectionConfigExistsException, RegionMappingExistsException {
-    jdbcWriter = new JdbcWriter(createSqlHandler());
+    jdbcWriter = new JdbcWriter(createSqlHandler(), cache);
 
     RegionFactory<String, PdxInstance> regionFactory =
         cache.createRegionFactory(RegionShortcut.REPLICATE);
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterTest.java
index f8a4dc8..eeb6bca 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/JdbcWriterTest.java
@@ -27,12 +27,15 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.EntryEvent;
 import org.apache.geode.cache.RegionEvent;
 import org.apache.geode.cache.SerializedCacheValue;
 import org.apache.geode.connectors.jdbc.internal.SqlHandler;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.InternalRegion;
 import org.apache.geode.pdx.PdxInstance;
+import org.apache.geode.test.fake.Fakes;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 @Category(UnitTest.class)
@@ -43,6 +46,7 @@ public class JdbcWriterTest {
   private SqlHandler sqlHandler;
   private SerializedCacheValue<Object> serializedNewValue;
   private RegionEvent<Object, Object> regionEvent;
+  private InternalCache cache;
 
   private JdbcWriter<Object, Object> writer;
 
@@ -53,12 +57,14 @@ public class JdbcWriterTest {
     sqlHandler = mock(SqlHandler.class);
     serializedNewValue = mock(SerializedCacheValue.class);
     regionEvent = mock(RegionEvent.class);
+    cache = Fakes.cache();
 
     when(entryEvent.getRegion()).thenReturn(mock(InternalRegion.class));
+    when(entryEvent.getRegion().getRegionService()).thenReturn(cache);
     when(entryEvent.getSerializedNewValue()).thenReturn(serializedNewValue);
     when(serializedNewValue.getDeserializedValue()).thenReturn(pdxInstance);
 
-    writer = new JdbcWriter<>(sqlHandler);
+    writer = new JdbcWriter<>(sqlHandler, cache);
   }
 
   @Test
diff --git 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallbackTest.java
 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallbackTest.java
index 95fcdf0..c089b76 100644
--- 
a/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallbackTest.java
+++ 
b/geode-connectors/src/test/java/org/apache/geode/connectors/jdbc/internal/AbstractJdbcCallbackTest.java
@@ -27,6 +27,7 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.test.fake.Fakes;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 @Category(UnitTest.class)
@@ -34,21 +35,23 @@ public class AbstractJdbcCallbackTest {
 
   private AbstractJdbcCallback jdbcCallback;
   private SqlHandler sqlHandler;
+  private InternalCache cache;
 
   @Before
-  public void setUp() throws Exception {
+  public void setUp() {
+    cache = Fakes.cache();
     sqlHandler = mock(SqlHandler.class);
-    jdbcCallback = new AbstractJdbcCallback(sqlHandler) {};
+    jdbcCallback = new AbstractJdbcCallback(sqlHandler, cache) {};
   }
 
   @Test
-  public void closesSqlHandler() throws Exception {
+  public void closesSqlHandler() {
     jdbcCallback.close();
     verify(sqlHandler, times(1)).close();
   }
 
   @Test
-  public void returnsCorrectSqlHander() throws Exception {
+  public void returnsCorrectSqlHander() {
     assertThat(jdbcCallback.getSqlHandler()).isSameAs(sqlHandler);
   }
 
diff --git 
a/geode-core/src/main/java/org/apache/geode/cache/query/internal/DefaultQuery.java
 
b/geode-core/src/main/java/org/apache/geode/cache/query/internal/DefaultQuery.java
index 4df0bbf..74e6686 100644
--- 
a/geode-core/src/main/java/org/apache/geode/cache/query/internal/DefaultQuery.java
+++ 
b/geode-core/src/main/java/org/apache/geode/cache/query/internal/DefaultQuery.java
@@ -23,10 +23,8 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.LongAdder;
 
-import org.apache.geode.cache.Cache;
-import org.apache.geode.cache.CacheClosedException;
 import org.apache.geode.cache.CacheRuntimeException;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.client.internal.ProxyCache;
@@ -73,9 +71,9 @@ public class DefaultQuery implements Query {
 
   private ServerProxy serverProxy;
 
-  protected AtomicLong numExecutions = new AtomicLong(0);
+  private final LongAdder numExecutions = new LongAdder();
 
-  private final AtomicLong totalExecutionTime = new AtomicLong(0);
+  private final LongAdder totalExecutionTime = new LongAdder();
 
   private final QueryStatistics stats;
 
@@ -128,110 +126,106 @@ public class DefaultQuery implements Query {
 
   public static TestHook testHook;
 
-  private static final ThreadLocal<Boolean> pdxReadSerialized =
-      ThreadLocal.withInitial(() -> Boolean.FALSE);
-
-  /** indicates query executed remotely */
+  /**
+   * indicates query executed remotely
+   */
   private boolean isRemoteQuery = false;
 
   // to prevent objects from getting deserialized
   private boolean keepSerialized = false;
 
-  public static final Set<String> reservedKeywords = new HashSet<>();
-  static {
-    reservedKeywords.add("hint");
-    reservedKeywords.add("all");
-    reservedKeywords.add("map");
-    reservedKeywords.add("count");
-    reservedKeywords.add("sum");
-    reservedKeywords.add("nvl");
-    reservedKeywords.add("unique");
-    reservedKeywords.add("except");
-    reservedKeywords.add("declare");
-    reservedKeywords.add("for");
-    reservedKeywords.add("list");
-    reservedKeywords.add("min");
-    reservedKeywords.add("element");
-    reservedKeywords.add("false");
-    reservedKeywords.add("abs");
-    reservedKeywords.add("true");
-    reservedKeywords.add("bag");
-    reservedKeywords.add("time");
-    reservedKeywords.add("define");
-    reservedKeywords.add("and");
-    reservedKeywords.add("asc");
-    reservedKeywords.add("desc");
-    reservedKeywords.add("select");
-    reservedKeywords.add("intersect");
-    reservedKeywords.add("flatten");
-    reservedKeywords.add("float");
-    reservedKeywords.add("import");
-    reservedKeywords.add("exists");
-    reservedKeywords.add("distinct");
-    reservedKeywords.add("boolean");
-    reservedKeywords.add("string");
-    reservedKeywords.add("group");
-    reservedKeywords.add("interval");
-    reservedKeywords.add("orelse");
-    reservedKeywords.add("where");
-    reservedKeywords.add("trace");
-    reservedKeywords.add("first");
-    reservedKeywords.add("set");
-    reservedKeywords.add("octet");
-    reservedKeywords.add("nil");
-    reservedKeywords.add("avg");
-    reservedKeywords.add("order");
-    reservedKeywords.add("long");
-    reservedKeywords.add("limit");
-    reservedKeywords.add("mod");
-    reservedKeywords.add("type");
-    reservedKeywords.add("undefine");
-    reservedKeywords.add("in");
-    reservedKeywords.add("null");
-    reservedKeywords.add("some");
-    reservedKeywords.add("to_date");
-    reservedKeywords.add("short");
-    reservedKeywords.add("enum");
-    reservedKeywords.add("timestamp");
-    reservedKeywords.add("having");
-    reservedKeywords.add("dictionary");
-    reservedKeywords.add("char");
-    reservedKeywords.add("listtoset");
-    reservedKeywords.add("array");
-    reservedKeywords.add("union");
-    reservedKeywords.add("or");
-    reservedKeywords.add("max");
-    reservedKeywords.add("from");
-    reservedKeywords.add("query");
-    reservedKeywords.add("collection");
-    reservedKeywords.add("like");
-    reservedKeywords.add("date");
-    reservedKeywords.add("byte");
-    reservedKeywords.add("any");
-    reservedKeywords.add("is_undefined");
-    reservedKeywords.add("double");
-    reservedKeywords.add("int");
-    reservedKeywords.add("andthen");
-    reservedKeywords.add("last");
-    reservedKeywords.add("struct");
-    reservedKeywords.add("undefined");
-    reservedKeywords.add("is_defined");
-    reservedKeywords.add("not");
-    reservedKeywords.add("by");
-    reservedKeywords.add("as");
-  }
+  public static final Set<String> reservedKeywords = new HashSet<String>() {
+    {
+      add("hint");
+      add("all");
+      add("map");
+      add("count");
+      add("sum");
+      add("nvl");
+      add("unique");
+      add("except");
+      add("declare");
+      add("for");
+      add("list");
+      add("min");
+      add("element");
+      add("false");
+      add("abs");
+      add("true");
+      add("bag");
+      add("time");
+      add("define");
+      add("and");
+      add("asc");
+      add("desc");
+      add("select");
+      add("intersect");
+      add("flatten");
+      add("float");
+      add("import");
+      add("exists");
+      add("distinct");
+      add("boolean");
+      add("string");
+      add("group");
+      add("interval");
+      add("orelse");
+      add("where");
+      add("trace");
+      add("first");
+      add("set");
+      add("octet");
+      add("nil");
+      add("avg");
+      add("order");
+      add("long");
+      add("limit");
+      add("mod");
+      add("type");
+      add("undefine");
+      add("in");
+      add("null");
+      add("some");
+      add("to_date");
+      add("short");
+      add("enum");
+      add("timestamp");
+      add("having");
+      add("dictionary");
+      add("char");
+      add("listtoset");
+      add("array");
+      add("union");
+      add("or");
+      add("max");
+      add("from");
+      add("query");
+      add("collection");
+      add("like");
+      add("date");
+      add("byte");
+      add("any");
+      add("is_undefined");
+      add("double");
+      add("int");
+      add("andthen");
+      add("last");
+      add("struct");
+      add("undefined");
+      add("is_defined");
+      add("not");
+      add("by");
+      add("as");
+    }
+  };
 
-  /**
-   * Caches the fields not found in any Pdx version. This threadlocal will be 
cleaned up after query
-   * execution completes in {@linkplain #executeUsingContext(ExecutionContext)}
-   */
+  // /**
+  // * Caches the fields not found in any Pdx version. This threadlocal will 
be cleaned up after
+  // query
+  // * execution completes in {@linkplain 
#executeUsingContext(ExecutionContext)}
+  // */
   private static final ThreadLocal<Map<String, Set<String>>> 
pdxClassToFieldsMap =
-      new ThreadLocal() {
-        @Override
-        protected Map<String, Set<String>> initialValue() {
-          return new HashMap<>();
-        }
-      };
+      ThreadLocal.withInitial(HashMap::new);
 
   public static Map<String, Set<String>> getPdxClasstofieldsmap() {
     return pdxClassToFieldsMap.get();
@@ -243,12 +237,7 @@ public class DefaultQuery implements Query {
    * query execution completes in {@linkplain 
#executeUsingContext(ExecutionContext)}
    */
   private static final ThreadLocal<Map<String, Set<String>>> 
pdxClassToMethodsMap =
-      new ThreadLocal() {
-        @Override
-        protected Map<String, Set<String>> initialValue() {
-          return new HashMap<String, Set<String>>();
-        }
-      };
+      ThreadLocal.withInitial(HashMap::new);
 
   public static void setPdxClasstoMethodsmap(Map<String, Set<String>> map) {
     pdxClassToMethodsMap.set(map);
@@ -282,23 +271,6 @@ public class DefaultQuery implements Query {
     this.stats = new DefaultQueryStatistics();
   }
 
-  public static boolean getPdxReadSerialized() {
-    return pdxReadSerialized.get();
-  }
-
-  public static void setPdxReadSerialized(boolean readSerialized) {
-    pdxReadSerialized.set(readSerialized);
-  }
-
-  /**
-   * helper method for setPdxReadSerialized
-   */
-  public static void setPdxReadSerialized(Cache cache, boolean readSerialized) 
{
-    if (cache != null && !cache.getPdxReadSerialized()) {
-      setPdxReadSerialized(readSerialized);
-    }
-  }
-
   /**
    * Get statistics information for this query.
    */
@@ -347,9 +319,10 @@ public class DefaultQuery implements Query {
     QueryExecutor qe = checkQueryOnPR(params);
 
     Object result = null;
+    Boolean initialPdxReadSerialized = 
this.cache.getPdxRegistry().getPdxReadSerializedOverride();
     try {
       // Setting the readSerialized flag for local queries
-      setPdxReadSerialized(this.cache, true);
+      this.cache.getPdxRegistry().setPdxReadSerializedOverride(true);
       ExecutionContext context = new QueryExecutionContext(params, this.cache, 
this);
       indexObserver = this.startTrace();
       if (qe != null) {
@@ -426,7 +399,7 @@ public class DefaultQuery implements Query {
             "Query was canceled. It may be due to low memory or the query was 
running longer than the MAX_QUERY_EXECUTION_TIME.");
       }
     } finally {
-      setPdxReadSerialized(this.cache, false);
+      
this.cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
       if (queryMonitor != null) {
         queryMonitor.stopMonitoringQueryThread(Thread.currentThread(), this);
       }
@@ -611,7 +584,7 @@ public class DefaultQuery implements Query {
       if (!this.isQueryWithFunctionContext()) {
         throw new UnsupportedOperationException(
             
LocalizedStrings.DefaultQuery_A_QUERY_ON_A_PARTITIONED_REGION_0_MAY_NOT_REFERENCE_ANY_OTHER_REGION_1
-                .toLocalizedString(new Object[] {prs.get(0).getName(), 
prs.get(1).getName()}));
+                .toLocalizedString(prs.get(0).getName(), 
prs.get(1).getName()));
       }
 
       // If there are more than one PRs they have to be co-located.
@@ -634,7 +607,7 @@ public class DefaultQuery implements Query {
         if (!colocated) {
           throw new UnsupportedOperationException(
               
LocalizedStrings.DefaultQuery_A_QUERY_ON_A_PARTITIONED_REGION_0_MAY_NOT_REFERENCE_ANY_OTHER_NON_COLOCATED_PARTITIONED_REGION_1
-                  .toLocalizedString(new Object[] {eachPR.getName(), 
other.getName()}));
+                  .toLocalizedString(eachPR.getName(), other.getName()));
         }
 
       } // eachPR
@@ -723,14 +696,14 @@ public class DefaultQuery implements Query {
   }
 
   private void updateStatistics(long executionTime) {
-    this.numExecutions.incrementAndGet();
-    this.totalExecutionTime.addAndGet(executionTime);
+    this.numExecutions.increment();
+    this.totalExecutionTime.add(executionTime);
     this.cache.getCachePerfStats().endQueryExecution(executionTime);
   }
 
   // TODO: Implement the function. Toggle the isCompiled flag accordingly
   @Override
-  public void compile() throws TypeMismatchException, NameResolutionException {
+  public void compile() {
     throw new UnsupportedOperationException(
         LocalizedStrings.DefaultQuery_NOT_YET_IMPLEMENTED.toLocalizedString());
   }
@@ -751,7 +724,7 @@ public class DefaultQuery implements Query {
      */
     @Override
     public long getTotalExecutionTime() {
-      return DefaultQuery.this.totalExecutionTime.get();
+      return DefaultQuery.this.totalExecutionTime.longValue();
     }
 
     /**
@@ -759,7 +732,7 @@ public class DefaultQuery implements Query {
      */
     @Override
     public long getNumExecutions() {
-      return DefaultQuery.this.numExecutions.get();
+      return DefaultQuery.this.numExecutions.longValue();
     }
   }
 
@@ -892,7 +865,7 @@ public class DefaultQuery implements Query {
     float time = (NanoTimer.getTime() - startTime) / 1.0e6f;
 
     String usedIndexesString = null;
-    if (observer != null && observer instanceof IndexTrackingQueryObserver) {
+    if (observer instanceof IndexTrackingQueryObserver) {
       IndexTrackingQueryObserver indexObserver = (IndexTrackingQueryObserver) 
observer;
       Map usedIndexes = indexObserver.getUsedIndexes();
       indexObserver.reset();
diff --git 
a/geode-core/src/main/java/org/apache/geode/cache/query/internal/index/IndexManager.java
 
b/geode-core/src/main/java/org/apache/geode/cache/query/internal/index/IndexManager.java
index 104bfcb..4340b7c 100644
--- 
a/geode-core/src/main/java/org/apache/geode/cache/query/internal/index/IndexManager.java
+++ 
b/geode-core/src/main/java/org/apache/geode/cache/query/internal/index/IndexManager.java
@@ -52,7 +52,6 @@ import org.apache.geode.cache.query.QueryException;
 import org.apache.geode.cache.query.TypeMismatchException;
 import org.apache.geode.cache.query.internal.CompiledPath;
 import org.apache.geode.cache.query.internal.CompiledValue;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.cache.query.internal.ExecutionContext;
 import org.apache.geode.cache.query.internal.MapIndexable;
 import org.apache.geode.cache.query.internal.NullToken;
@@ -274,12 +273,12 @@ public class IndexManager {
           
LocalizedStrings.IndexCreationMsg_CANCELED_DUE_TO_LOW_MEMORY.toLocalizedString());
     }
 
-    boolean oldReadSerialized = DefaultQuery.getPdxReadSerialized();
-    DefaultQuery.setPdxReadSerialized(this.region.getCache(), true);
+    boolean oldReadSerialized = 
this.cache.getPdxRegistry().getPdxReadSerializedOverride();
+    this.cache.getPdxRegistry().setPdxReadSerializedOverride(true);
 
     TXStateProxy tx = null;
-    if (!((InternalCache) this.region.getCache()).isClient()) {
-      tx = ((TXManagerImpl) 
this.region.getCache().getCacheTransactionManager()).pauseTransaction();
+    if (!((InternalCache) this.cache).isClient()) {
+      tx = ((TXManagerImpl) 
this.cache.getCacheTransactionManager()).pauseTransaction();
     }
 
     try {
@@ -430,8 +429,8 @@ public class IndexManager {
       return index;
 
     } finally {
-      DefaultQuery.setPdxReadSerialized(this.region.getCache(), 
oldReadSerialized);
-      ((TXManagerImpl) 
this.region.getCache().getCacheTransactionManager()).unpauseTransaction(tx);
+      
this.cache.getPdxRegistry().setPdxReadSerializedOverride(oldReadSerialized);
+      ((TXManagerImpl) 
this.cache.getCacheTransactionManager()).unpauseTransaction(tx);
 
     }
   }
@@ -905,8 +904,8 @@ public class IndexManager {
     }
     boolean throwException = false;
     HashMap<String, Exception> exceptionsMap = new HashMap<String, 
Exception>();
-    boolean oldReadSerialized = DefaultQuery.getPdxReadSerialized();
-    DefaultQuery.setPdxReadSerialized(true);
+    boolean oldReadSerialized = 
this.cache.getPdxRegistry().getPdxReadSerializedOverride();
+    this.cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     try {
       Iterator entryIter = ((LocalRegion) region).getBestIterator(true);
       while (entryIter.hasNext()) {
@@ -947,7 +946,7 @@ public class IndexManager {
         throw new MultiIndexCreationException(exceptionsMap);
       }
     } finally {
-      DefaultQuery.setPdxReadSerialized(oldReadSerialized);
+      
this.cache.getPdxRegistry().setPdxReadSerializedOverride(oldReadSerialized);
       notifyAfterUpdate();
     }
   }
@@ -1002,10 +1001,11 @@ public class IndexManager {
    */
   private void processAction(RegionEntry entry, int action, int opCode) throws 
QueryException {
     final long startPA = getCachePerfStats().startIndexUpdate();
-    DefaultQuery.setPdxReadSerialized(this.region.getCache(), true);
+    Boolean initialPdxReadSerialized = 
this.cache.getPdxRegistry().getPdxReadSerializedOverride();
+    this.cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     TXStateProxy tx = null;
-    if (!((InternalCache) this.region.getCache()).isClient()) {
-      tx = ((TXManagerImpl) 
this.region.getCache().getCacheTransactionManager()).pauseTransaction();
+    if (!this.cache.isClient()) {
+      tx = ((TXManagerImpl) 
this.cache.getCacheTransactionManager()).pauseTransaction();
     }
 
     try {
@@ -1147,8 +1147,8 @@ public class IndexManager {
         }
       }
     } finally {
-      DefaultQuery.setPdxReadSerialized(this.region.getCache(), false);
-      ((TXManagerImpl) 
this.region.getCache().getCacheTransactionManager()).unpauseTransaction(tx);
+      
this.cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
+      ((TXManagerImpl) 
this.cache.getCacheTransactionManager()).unpauseTransaction(tx);
 
       getCachePerfStats().endIndexUpdate(startPA);
     }
diff --git 
a/geode-core/src/main/java/org/apache/geode/distributed/internal/streaming/StreamingOperation.java
 
b/geode-core/src/main/java/org/apache/geode/distributed/internal/streaming/StreamingOperation.java
index 5aa9c8c..5c6675f 100644
--- 
a/geode-core/src/main/java/org/apache/geode/distributed/internal/streaming/StreamingOperation.java
+++ 
b/geode-core/src/main/java/org/apache/geode/distributed/internal/streaming/StreamingOperation.java
@@ -33,6 +33,9 @@ import org.apache.geode.GemFireRethrowable;
 import org.apache.geode.InternalGemFireError;
 import org.apache.geode.InternalGemFireException;
 import org.apache.geode.SystemFailure;
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheClosedException;
+import org.apache.geode.cache.CacheFactory;
 import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.cache.query.internal.PRQueryTraceInfo;
 import org.apache.geode.cache.query.internal.QueryMonitor;
@@ -52,17 +55,18 @@ import 
org.apache.geode.distributed.internal.membership.InternalDistributedMembe
 import org.apache.geode.internal.HeapDataOutputStream;
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.Version;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.PartitionedRegionQueryEvaluator;
 import org.apache.geode.internal.cache.Token;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.util.BlobHelper;
+import org.apache.geode.pdx.internal.TypeRegistry;
 
 /**
  * StreamingOperation is an abstraction for sending messages to multiple (or 
single) recipient
  * requesting a potentially large amount of data and receiving the reply with 
data chunked into
  * several messages.
- *
  */
 public abstract class StreamingOperation {
   private static final Logger logger = LogService.getLogger();
@@ -75,7 +79,9 @@ public abstract class StreamingOperation {
 
   public final InternalDistributedSystem sys;
 
-  /** Creates a new instance of StreamingOperation */
+  /**
+   * Creates a new instance of StreamingOperation
+   */
   public StreamingOperation(InternalDistributedSystem sys) {
     this.sys = sys;
   }
@@ -87,10 +93,12 @@ public abstract class StreamingOperation {
    */
   public void getDataFromAll(Set recipients)
       throws org.apache.geode.cache.TimeoutException, InterruptedException {
-    if (Thread.interrupted())
+    if (Thread.interrupted()) {
       throw new InterruptedException();
-    if (recipients.isEmpty())
+    }
+    if (recipients.isEmpty()) {
       return;
+    }
 
     StreamingProcessor processor = new StreamingProcessor(this.sys, 
recipients);
     DistributionMessage m = createRequestMessage(recipients, processor);
@@ -111,7 +119,9 @@ public abstract class StreamingOperation {
     }
   }
 
-  /** Override in subclass to instantiate request message */
+  /**
+   * Override in subclass to instantiate request message
+   */
   protected abstract DistributionMessage createRequestMessage(Set recipients,
       ReplyProcessor21 processor);
 
@@ -146,7 +156,9 @@ public abstract class StreamingOperation {
       int msgsProcessed = 0;
       int numMsgs = 0;
 
-      /** Return true if this is the very last reply msg to process for this 
member */
+      /**
+       * Return true if this is the very last reply msg to process for this 
member
+       */
       protected synchronized boolean trackMessage(StreamingReplyMessage m) {
         this.msgsProcessed++;
 
@@ -209,12 +221,12 @@ public abstract class StreamingOperation {
         }
         if (isLast) {
           super.process(msg, false); // removes from members and cause us to
-                                     // ignore future messages received from 
that member
+          // ignore future messages received from that member
         }
       } finally {
         this.msgsBeingProcessed.decrementAndGet();
         checkIfDone(); // check to see if decrementing msgsBeingProcessed 
requires signalling to
-                       // proceed
+        // proceed
       }
     }
 
@@ -312,7 +324,7 @@ public abstract class StreamingOperation {
             // for the next objects, disallow stream from allocating more 
storage
             do {
               outStream.disallowExpansion(CHUNK_FULL); // sets the mark where 
rollback occurs on
-                                                       // CHUNK_FULL
+              // CHUNK_FULL
 
               nextObject = getNextReplyObject();
 
@@ -341,8 +353,8 @@ public abstract class StreamingOperation {
             break; // receiver no longer cares
           }
           outStream.reset(); // ready for reuse, assumes replyWithData
-                             // does not queue the message but outStream has
-                             // already been used
+          // does not queue the message but outStream has
+          // already been used
         } while (nextObject != Token.END_OF_STREAM);
         // } catch (CancelException e) {
         // // if cache is closed, we cannot send a reply (correct?)
@@ -417,28 +429,30 @@ public abstract class StreamingOperation {
 
   public static class StreamingReplyMessage extends ReplyMessage {
 
-    /** the number of this message */
+    /**
+     * the number of this message
+     */
     protected int msgNum;
 
-    /** whether this message is the last one in this series */
+    /**
+     * whether this message is the last one in this series
+     */
     protected boolean lastMsg;
 
     private transient HeapDataOutputStream chunkStream; // used only on 
sending side, null means
-                                                        // abort
+    // abort
     private transient int numObjects; // used only on sending side
     private transient List objectList = null; // used only on receiving side
 
     private boolean pdxReadSerialized = false; // used to read PDX types in 
serialized form.
     private transient boolean isCanceled = false; // used only on receiving 
side and if
-                                                  // messageProcessor is of 
type
-                                                  // 
PartitionedRegionQueryEvaluator.StreamingQueryPartitionResponse
-
+    // messageProcessor is of type
+    // PartitionedRegionQueryEvaluator.StreamingQueryPartitionResponse
 
     /**
      * @param chunkStream the data to send back, if null then all the 
following parameters are
      *        ignored and any future replies from this member will be ignored, 
and the streaming of
      *        chunks is considered aborted by the receiver.
-     *
      * @param msgNum message number in this series (0-based)
      * @param lastMsg if this is the last message in this series
      */
@@ -451,21 +465,21 @@ public abstract class StreamingOperation {
     public static void send(InternalDistributedMember recipient, int 
processorId,
         ReplyException exception, DistributionManager dm, HeapDataOutputStream 
chunkStream,
         int numObjects, int msgNum, boolean lastMsg, boolean 
pdxReadSerialized) {
-      StreamingReplyMessage m = new StreamingReplyMessage();
-      m.processorId = processorId;
+      StreamingReplyMessage replyMessage = new StreamingReplyMessage();
+      replyMessage.processorId = processorId;
 
       if (exception != null) {
-        m.setException(exception);
-        logger.debug("Replying with exception: {}", m, exception);
+        replyMessage.setException(exception);
+        logger.debug("Replying with exception: {}", replyMessage, exception);
       }
 
-      m.chunkStream = chunkStream;
-      m.numObjects = numObjects;
-      m.setRecipient(recipient);
-      m.msgNum = msgNum;
-      m.lastMsg = lastMsg;
-      m.pdxReadSerialized = pdxReadSerialized;
-      dm.putOutgoing(m);
+      replyMessage.chunkStream = chunkStream;
+      replyMessage.numObjects = numObjects;
+      replyMessage.setRecipient(recipient);
+      replyMessage.msgNum = msgNum;
+      replyMessage.lastMsg = lastMsg;
+      replyMessage.pdxReadSerialized = pdxReadSerialized;
+      dm.putOutgoing(replyMessage);
     }
 
     public int getMessageNumber() {
@@ -480,7 +494,9 @@ public abstract class StreamingOperation {
       return isCanceled;
     }
 
-    /** Return the objects in this chunk as a List, used only on receiving 
side */
+    /**
+     * Return the objects in this chunk as a List, used only on receiving side
+     */
     public List getObjects() {
       return this.objectList;
     }
@@ -501,6 +517,14 @@ public abstract class StreamingOperation {
       this.pdxReadSerialized = in.readBoolean();
       Version senderVersion = 
InternalDataSerializer.getVersionForDataStream(in);
       boolean isSenderAbove_8_1 = senderVersion.compareTo(Version.GFE_81) > 0;
+      InternalCache cache = null;
+      Boolean initialPdxReadSerialized = false;
+      try {
+        cache = (InternalCache) CacheFactory.getAnyInstance();
+        initialPdxReadSerialized = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+      } catch (CacheClosedException e) {
+
+      }
       if (n == -1) {
         this.objectList = null;
       } else {
@@ -508,8 +532,8 @@ public abstract class StreamingOperation {
         this.objectList = new ArrayList(n);
         // Check if the PDX types needs to be kept in serialized form.
         // This will make readObject() to return PdxInstance form.
-        if (this.pdxReadSerialized) {
-          DefaultQuery.setPdxReadSerialized(true);
+        if (this.pdxReadSerialized && cache != null) {
+          cache.getPdxRegistry().setPdxReadSerializedOverride(true);
         }
         try {
           ReplyProcessor21 messageProcessor = 
ReplyProcessor21.getProcessor(processorId);
@@ -554,8 +578,8 @@ public abstract class StreamingOperation {
             }
           }
         } finally {
-          if (this.pdxReadSerialized) {
-            DefaultQuery.setPdxReadSerialized(false);
+          if (this.pdxReadSerialized && cache != null) {
+            
cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
           }
         }
       }
diff --git 
a/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
 
b/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
index 6efb94c..2be02c3 100755
--- 
a/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
+++ 
b/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
@@ -134,7 +134,6 @@ import org.apache.geode.cache.client.internal.PoolImpl;
 import org.apache.geode.cache.control.ResourceManager;
 import org.apache.geode.cache.execute.FunctionService;
 import org.apache.geode.cache.query.QueryService;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.cache.query.internal.DefaultQueryService;
 import org.apache.geode.cache.query.internal.InternalQueryService;
 import org.apache.geode.cache.query.internal.QueryMonitor;
@@ -5057,7 +5056,13 @@ public class GemFireCacheImpl implements InternalCache, 
InternalClientCache, Has
    * requested getObject() on the PdxInstance.
    */
   public boolean getPdxReadSerializedByAnyGemFireServices() {
-    return (getPdxReadSerialized() || DefaultQuery.getPdxReadSerialized())
+    TypeRegistry pdxRegistry = this.getPdxRegistry();
+    boolean pdxReadSerializedOverriden = false;
+    if (pdxRegistry != null) {
+      pdxReadSerializedOverriden = pdxRegistry.getPdxReadSerializedOverride();
+    }
+
+    return (getPdxReadSerialized() || pdxReadSerializedOverriden)
         && PdxInstanceImpl.getPdxReadSerialized();
   }
 
@@ -5151,7 +5156,7 @@ public class GemFireCacheImpl implements InternalCache, 
InternalClientCache, Has
   @Override
   public void setReadSerializedForCurrentThread(boolean value) {
     PdxInstanceImpl.setPdxReadSerialized(value);
-    DefaultQuery.setPdxReadSerialized(value);
+    this.getPdxRegistry().setPdxReadSerializedOverride(value);
   }
 
   // test hook
diff --git 
a/geode-core/src/main/java/org/apache/geode/internal/cache/partitioned/QueryMessage.java
 
b/geode-core/src/main/java/org/apache/geode/internal/cache/partitioned/QueryMessage.java
index ce2dbdd..8098c41 100644
--- 
a/geode-core/src/main/java/org/apache/geode/internal/cache/partitioned/QueryMessage.java
+++ 
b/geode-core/src/main/java/org/apache/geode/internal/cache/partitioned/QueryMessage.java
@@ -169,7 +169,9 @@ public class QueryMessage extends 
StreamingPartitionOperation.StreamingPartition
 
     DefaultQuery query = new DefaultQuery(this.queryString, pr.getCache(), 
false);
     // Remote query, use the PDX types in serialized form.
-    DefaultQuery.setPdxReadSerialized(pr.getCache(), true);
+    Boolean initialPdxReadSerialized =
+        pr.getCache().getPdxRegistry().getPdxReadSerializedOverride();
+    pr.getCache().getPdxRegistry().setPdxReadSerializedOverride(true);
     // In case of "select *" queries we can keep the results in serialized 
form and send
     query.setRemoteQuery(true);
     QueryObserver indexObserver = query.startTrace();
@@ -249,7 +251,7 @@ public class QueryMessage extends 
StreamingPartitionOperation.StreamingPartition
       if (isQueryTraced) {
         this.resultCollector.remove(queryTraceList);
       }
-      DefaultQuery.setPdxReadSerialized(pr.getCache(), false);
+      
pr.getCache().getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
       query.setRemoteQuery(false);
       query.endTrace(indexObserver, traceStartTime, this.resultCollector);
     }
diff --git 
a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java 
b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
index ab02806..85575c8 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
@@ -65,6 +65,9 @@ public class TypeRegistry {
 
   private final InternalCache cache;
 
+  private static final ThreadLocal<Boolean> pdxReadSerializedOverride =
+      ThreadLocal.withInitial(() -> Boolean.FALSE);
+
   public TypeRegistry(InternalCache cache, boolean disableTypeRegistry) {
     this.cache = cache;
 
@@ -489,7 +492,6 @@ public class TypeRegistry {
    * @param fieldName the field to look for in the PdxTypes
    * @param className the PdxTypes for this class would be searched
    * @return PdxType having the field or null if not found
-   *
    */
   PdxType getPdxTypeForField(String fieldName, String className) {
     return this.distributedTypeRegistry.getPdxTypeForField(fieldName, 
className);
@@ -533,4 +535,12 @@ public class TypeRegistry {
     }
     return result;
   }
+
+  public Boolean getPdxReadSerializedOverride() {
+    return pdxReadSerializedOverride.get();
+  }
+
+  public void setPdxReadSerializedOverride(boolean overridePdxReadSerialized) {
+    pdxReadSerializedOverride.set(overridePdxReadSerialized);
+  }
 }
diff --git 
a/geode-core/src/test/java/org/apache/geode/cache/query/BaseLineAndCompareQueryPerfJUnitTest.java
 
b/geode-core/src/test/java/org/apache/geode/cache/query/BaseLineAndCompareQueryPerfJUnitTest.java
index 1eecb5b..1787e97 100755
--- 
a/geode-core/src/test/java/org/apache/geode/cache/query/BaseLineAndCompareQueryPerfJUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/cache/query/BaseLineAndCompareQueryPerfJUnitTest.java
@@ -362,7 +362,8 @@ public class BaseLineAndCompareQueryPerfJUnitTest {
      * Indices share the following percentages: a. countryName: 20% objects b. 
stateName: 33.33%
      * objects c. districtName: 20% objects d. cityName: 50% objects e. 
villageName: No index
      */
-    // qs.createIndex("villageName", IndexType.FUNCTIONAL, "v.name", 
"/Countries c, c.states s,
+    // queryService.createIndex("villageName", IndexType.FUNCTIONAL, "v.name", 
"/Countries c,
+    // c.states s,
     // s.districts d, d.cities ct, d.villages v");
     qs.createIndex("cityName", IndexType.FUNCTIONAL, "ct.name",
         "/Countries c, c.states s, s.districts d, d.cities ct, d.villages 
v");//
diff --git 
a/geode-core/src/test/java/org/apache/geode/cache/query/PdxStringQueryJUnitTest.java
 
b/geode-core/src/test/java/org/apache/geode/cache/query/PdxStringQueryJUnitTest.java
index f27f0e1..871dad4 100644
--- 
a/geode-core/src/test/java/org/apache/geode/cache/query/PdxStringQueryJUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/cache/query/PdxStringQueryJUnitTest.java
@@ -24,10 +24,8 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
 import org.apache.geode.cache.Region;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.cache.query.internal.QueryObserver;
 import org.apache.geode.cache.query.internal.index.CompactRangeIndex;
 import org.apache.geode.cache.query.internal.index.IndexStore.IndexStoreEntry;
@@ -50,10 +48,10 @@ import 
org.apache.geode.test.junit.categories.IntegrationTest;
  */
 @Category(IntegrationTest.class)
 public class PdxStringQueryJUnitTest {
-  private InternalCache c;
-  private Region r;
+  private InternalCache cache;
+  private Region region;
   private String regName = "exampleRegion";
-  QueryService qs;
+  QueryService queryService;
   QueryObserver observer;
 
   private static final int NO_INDEX = 0;
@@ -63,33 +61,33 @@ public class PdxStringQueryJUnitTest {
 
   @Before
   public void setUp() {
-    this.c = (InternalCache) new CacheFactory().set(MCAST_PORT, "0").create();
-    r = c.createRegionFactory().create(regName);
-    qs = c.getQueryService();
+    this.cache = (InternalCache) new CacheFactory().set(MCAST_PORT, 
"0").create();
+    region = cache.createRegionFactory().create(regName);
+    queryService = cache.getQueryService();
   }
 
   @After
   public void tearDown() {
-    this.c.close();
+    this.cache.close();
   }
 
   @Test
   public void testQueriesPdxInstances() throws Exception {
     putPdxInstances();
     executeQueriesValidateResults(NO_INDEX);
-    r.clear();
+    region.clear();
   }
 
   @Test
   public void testQueriesHeterogenousObjects() throws Exception {
     putHeterogeneousObjects();
     executeQueriesValidateResults(NO_INDEX);
-    r.clear();
+    region.clear();
   }
 
   @Test
   public void testQueriesWithCompactRangeIndexPdxInstances() throws Exception {
-    Index index = qs.createIndex("index1", "secId", "/exampleRegion");
+    Index index = queryService.createIndex("index1", "secId", 
"/exampleRegion");
     assertTrue(index instanceof CompactRangeIndex);
     putPdxInstances();
     CloseableIterator<IndexStoreEntry> indexIterator = null;
@@ -104,12 +102,12 @@ public class PdxStringQueryJUnitTest {
       }
     }
     executeQueriesValidateResults(INDEX_TYPE_COMPACTRANGE);
-    r.clear();
+    region.clear();
   }
 
   @Test
   public void testQueriesWithCompactRangeIndexPdxInstancesREUpdateInProgress() 
throws Exception {
-    Index index = qs.createIndex("index1", "secId", "/exampleRegion");
+    Index index = queryService.createIndex("index1", "secId", 
"/exampleRegion");
     assertTrue(index instanceof CompactRangeIndex);
     putPdxInstancesWithREUpdateInProgress();
     CloseableIterator<IndexStoreEntry> indexIterator = null;
@@ -124,21 +122,22 @@ public class PdxStringQueryJUnitTest {
       }
     }
     executeQueriesValidateResults(INDEX_TYPE_COMPACTRANGE);
-    r.clear();
+    region.clear();
   }
 
   @Test
   public void testQueriesWithCompactRangeIndexHeterogenousObjects() throws 
Exception {
     putHeterogeneousObjects();
     executeQueriesValidateResults(INDEX_TYPE_COMPACTRANGE);
-    r.clear();
+    region.clear();
   }
 
   @Test
   public void testQueriesWithRangeIndex() throws Exception {
-    Index index = qs.createIndex("index2", "p.secId", "/exampleRegion p, 
p.positions.values");
+    Index index =
+        queryService.createIndex("index2", "p.secId", "/exampleRegion p, 
p.positions.values");
     assertTrue(index instanceof RangeIndex);
-    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.c);
+    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("secId", "IBM");
     pf.writeString("status", "active");
@@ -148,14 +147,14 @@ public class PdxStringQueryJUnitTest {
     pf.writeObject("positions", positions);
     PdxInstance pi = pf.create();
 
-    r.put("IBM", pi);
+    region.put("IBM", pi);
 
     positions = new HashMap();
     positions.put("price", "100");
     positions.put("price", "120");
-    r.put("YHOO", new TestObject(222, "YHOO", positions, "inactive"));
+    region.put("YHOO", new TestObject(222, "YHOO", positions, "inactive"));
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 333);
     pf.writeString("secId", "GOOGL");
     pf.writeString("status", "active");
@@ -168,9 +167,9 @@ public class PdxStringQueryJUnitTest {
     positions = new HashMap();
     positions.put("price", "200");
     positions.put("price", "220");
-    r.put("VMW", new TestObject(111, "VMW", positions, "inactive"));
+    region.put("VMW", new TestObject(111, "VMW", positions, "inactive"));
 
-    r.put("GOOGL", pi);
+    region.put("GOOGL", pi);
 
     Map map = ((RangeIndex) index).getValueToEntriesMap();
     for (Object key : map.keySet()) {
@@ -178,15 +177,16 @@ public class PdxStringQueryJUnitTest {
     }
 
     executeQueriesValidateResults(INDEX_TYPE_RANGE);
-    qs.removeIndex(index);
-    r.clear();
+    queryService.removeIndex(index);
+    region.clear();
   }
 
   @Test
   public void testQueriesWithRangeIndexWithREUpdateInProgress() throws 
Exception {
-    Index index = qs.createIndex("index2", "p.secId", "/exampleRegion p, 
p.positions.values");
+    Index index =
+        queryService.createIndex("index2", "p.secId", "/exampleRegion p, 
p.positions.values");
     assertTrue(index instanceof RangeIndex);
-    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.c);
+    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("secId", "IBM");
     pf.writeString("status", "active");
@@ -196,14 +196,14 @@ public class PdxStringQueryJUnitTest {
     pf.writeObject("positions", positions);
     PdxInstance pi = pf.create();
 
-    r.put("IBM", pi);
+    region.put("IBM", pi);
 
     positions = new HashMap();
     positions.put("price", "100");
     positions.put("price", "120");
-    r.put("YHOO", new TestObject(222, "YHOO", positions, "inactive"));
+    region.put("YHOO", new TestObject(222, "YHOO", positions, "inactive"));
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 333);
     pf.writeString("secId", "GOOGL");
     pf.writeString("status", "active");
@@ -216,32 +216,32 @@ public class PdxStringQueryJUnitTest {
     positions = new HashMap();
     positions.put("price", "200");
     positions.put("price", "220");
-    r.put("VMW", new TestObject(111, "VMW", positions, "inactive"));
+    region.put("VMW", new TestObject(111, "VMW", positions, "inactive"));
 
-    r.put("GOOGL", pi);
+    region.put("GOOGL", pi);
     makeREUpdateInProgress();
 
     Map map = ((RangeIndex) index).getValueToEntriesMap();
     for (Object key : map.keySet()) {
       assertTrue(key instanceof PdxString);
     }
-    DefaultQuery.setPdxReadSerialized(true);
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     executeQueriesValidateResults(INDEX_TYPE_RANGE);
-    qs.removeIndex(index);
-    r.clear();
+    queryService.removeIndex(index);
+    region.clear();
   }
 
   @Test
   public void testQueriesWithPrimaryKeyIndex() throws Exception {
-    Index index = qs.createKeyIndex("index3", "secId", "/exampleRegion");
+    Index index = queryService.createKeyIndex("index3", "secId", 
"/exampleRegion");
     assertTrue(index instanceof PrimaryKeyIndex);
     putPdxInstances();
     executeQueriesValidateResults(INDEX_TYPE_PRIMARYKEY);
-    r.clear();
+    region.clear();
     putHeterogeneousObjects();
     executeQueriesValidateResults(INDEX_TYPE_PRIMARYKEY);
-    qs.removeIndex(index);
-    r.clear();
+    queryService.removeIndex(index);
+    region.clear();
   }
 
   @Test
@@ -250,77 +250,77 @@ public class PdxStringQueryJUnitTest {
     String queries[] = {"select secId from /exampleRegion where 
secId.toLowerCase()  = 'ibm'",
         "select secId from /exampleRegion where secId.startsWith('I')"};
     for (int i = 0; i < queries.length; i++) {
-      SelectResults res = (SelectResults) qs.newQuery(queries[i]).execute();
+      SelectResults res = (SelectResults) 
queryService.newQuery(queries[i]).execute();
       assertEquals("Incorrect result size returned for query. " + queries[i], 
1, res.size());
       validateStringResult("IBM", res.iterator().next());
     }
-    r.clear();
+    region.clear();
   }
 
   public void putPdxInstances() throws Exception {
-    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.c);
+    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("status", "active");
     pf.writeString("secId", "IBM");
     PdxInstance pi = pf.create();
-    r.put("IBM", pi);
+    region.put("IBM", pi);
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 222);
     pf.writeString("status", "inactive");
     pf.writeString("secId", "YHOO");
     pi = pf.create();
-    r.put("YHOO", pi);
+    region.put("YHOO", pi);
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 333);
     pf.writeString("status", "active");
     pf.writeString("secId", "GOOGL");
     pi = pf.create();
-    r.put("GOOGL", pi);
+    region.put("GOOGL", pi);
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("status", "inactive");
     pf.writeString("secId", "VMW");
     pi = pf.create();
-    r.put("VMW", pi);
+    region.put("VMW", pi);
   }
 
   public void putPdxInstancesWithREUpdateInProgress() throws Exception {
-    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.c);
+    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("status", "active");
     pf.writeString("secId", "IBM");
     PdxInstance pi = pf.create();
-    r.put("IBM", pi);
+    region.put("IBM", pi);
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 222);
     pf.writeString("status", "inactive");
     pf.writeString("secId", "YHOO");
     pi = pf.create();
-    r.put("YHOO", pi);
+    region.put("YHOO", pi);
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 333);
     pf.writeString("status", "active");
     pf.writeString("secId", "GOOGL");
     pi = pf.create();
-    r.put("GOOGL", pi);
+    region.put("GOOGL", pi);
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("status", "inactive");
     pf.writeString("secId", "VMW");
     pi = pf.create();
-    r.put("VMW", pi);
+    region.put("VMW", pi);
 
     makeREUpdateInProgress();
   }
 
   public void makeREUpdateInProgress() {
-    Iterator entryItr = r.entrySet().iterator();
+    Iterator entryItr = region.entrySet().iterator();
     while (entryItr.hasNext()) {
       Region.Entry nonTxEntry = (Region.Entry) entryItr.next();
       RegionEntry entry = ((NonTXEntry) nonTxEntry).getRegionEntry();
@@ -330,38 +330,38 @@ public class PdxStringQueryJUnitTest {
   }
 
   public void putHeterogeneousObjects() throws Exception {
-    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.c);
+    PdxInstanceFactory pf = PdxInstanceFactoryImpl.newCreator("Portfolio", 
false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("secId", "IBM");
     pf.writeString("status", "active");
     PdxInstance pi = pf.create();
-    r.put("IBM", pi);
+    region.put("IBM", pi);
 
-    r.put("YHOO", new TestObject(222, "YHOO", "inactive"));
-    r.put("GOOGL", new TestObject(333, "GOOGL", "active"));
+    region.put("YHOO", new TestObject(222, "YHOO", "inactive"));
+    region.put("GOOGL", new TestObject(333, "GOOGL", "active"));
 
-    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.c);
+    pf = PdxInstanceFactoryImpl.newCreator("Portfolio", false, this.cache);
     pf.writeInt("ID", 111);
     pf.writeString("secId", "VMW");
     pf.writeString("status", "inactive");
     pi = pf.create();
-    r.put("VMW", pi);
+    region.put("VMW", pi);
   }
 
   private void executeQueriesValidateResults(int indexType) throws Exception {
-    DefaultQuery.setPdxReadSerialized(true);
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
 
     String[] query = {"select count(*) from /exampleRegion",
         "select count(*) from /exampleRegion p, p.positions.values v",
         "select count(*) from /exampleRegion"};
 
-    SelectResults res = (SelectResults) 
qs.newQuery(query[indexType]).execute();
+    SelectResults res = (SelectResults) 
queryService.newQuery(query[indexType]).execute();
     assertEquals(4, res.iterator().next());
 
     query = new String[] {"select secId from /exampleRegion where secId  = 
'IBM'",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId = 'IBM'",
         "select secId from /exampleRegion where secId  = 'IBM'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     validateStringResult("IBM", res.iterator().next());
 
@@ -369,28 +369,28 @@ public class PdxStringQueryJUnitTest {
         "select p.secId from /exampleRegion p where p.secId  = ELEMENT(select 
e.secId from /exampleRegion e where e.secId  = 'IBM') ",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId = ELEMENT(select p1.secId from /exampleRegion p1, p.positions.values v1 
where p1.secId = 'IBM')",
         "select p.secId from /exampleRegion p where p.secId  = ELEMENT(select 
e.secId from /exampleRegion e where e.secId  = 'IBM' )"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     validateStringResult("IBM", res.iterator().next());
 
     query = new String[] {"select secId from /exampleRegion where secId LIKE 
'VMW'",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId LIKE 'VMW'",
         "select secId from /exampleRegion where secId LIKE 'VMW'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     validateStringResult("VMW", res.iterator().next());
 
     query = new String[] {"select secId from /exampleRegion where secId LIKE 
'VM%'",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId LIKE 'VM%'",
         "select secId from /exampleRegion where secId LIKE 'VM%'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     validateStringResult("VMW", res.iterator().next());
 
     query = new String[] {"select secId from /exampleRegion where secId IN SET 
('YHOO', 'VMW')",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId  IN SET ('YHOO', 'VMW')",
         "select secId from /exampleRegion where secId IN SET ('YHOO', 'VMW')"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(2, res.size());
     List secIdsList = new ArrayList();
     secIdsList.add("VMW");
@@ -405,7 +405,7 @@ public class PdxStringQueryJUnitTest {
         "select p.secId from /exampleRegion p where p.secId IN  (select 
e.secId from /exampleRegion e where e.secId ='YHOO' or e.secId = 'VMW')",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId  IN  (select e.secId from /exampleRegion e where e.secId ='YHOO' or 
e.secId = 'VMW')",
         "select p.secId from /exampleRegion p where p.secId IN  (select 
e.secId from /exampleRegion e where e.secId ='YHOO' or e.secId = 'VMW')"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(2, res.size());
     secIdsList = new ArrayList();
     secIdsList.add("VMW");
@@ -419,7 +419,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId, status from /exampleRegion where 
secId = 'IBM'",
         "select p.secId, p.status from /exampleRegion p, p.positions.values v 
where p.secId = 'IBM'",
         "select secId, status from /exampleRegion where secId = 'IBM'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     secIdsList = new ArrayList();
     secIdsList.add("active");
@@ -435,7 +435,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where secId < 
'YHOO'",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId < 'YHOO'",
         "select secId from /exampleRegion where secId < 'YHOO'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(3, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -449,7 +449,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where 'YHOO' > 
secId",
         "select p.secId from /exampleRegion p, p.positions.values v where 
'YHOO' >  p.secId",
         "select secId from /exampleRegion where 'YHOO' > secId"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(3, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -463,7 +463,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where secId > 
'IBM'",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId > 'IBM'",
         "select secId from /exampleRegion where secId > 'IBM'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(2, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -476,7 +476,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where secId > 
'IBM' or ID=333",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId > 'IBM' or p.ID=333",
         "select secId from /exampleRegion where secId = 'VMW' or secId = 
'YHOO' or secId = 'GOOGL'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(3, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -490,7 +490,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where secId > 
'IBM' and secId < 'YHOO'",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId > 'IBM' and p.secId < 'YHOO'",
         "select secId from /exampleRegion where secId > 'IBM' and secId < 
'YHOO'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -502,7 +502,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where ID = 111",
         "select p.secId from /exampleRegion p, p.positions.values v where p.ID 
= 111",
         "select secId from /exampleRegion where secId = 'VMW' or secId = 
'IBM'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(2, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -515,19 +515,19 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select distinct ID from /exampleRegion where ID = 
111",
         "select distinct p.ID from /exampleRegion p, p.positions.values v 
where p.ID = 111",
         "select distinct secId from /exampleRegion where secId = 'VMW'"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
 
     query = new String[] {"select ID from /exampleRegion where ID = 111 limit 
1",
         "select p.ID from /exampleRegion p, p.positions.values v where p.ID = 
111 limit 1",
         "select secId from /exampleRegion where secId = 'VMW' limit 1"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
 
     query = new String[] {"select distinct secId from /exampleRegion order by 
secId",
         "select distinct p.secId from /exampleRegion p, p.positions.values 
order by p.secId",
         "select distinct secId from /exampleRegion order by secId"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(4, res.size());
     iter = res.iterator();
     String[] secIds = {"GOOGL", "IBM", "VMW", "YHOO"};
@@ -539,7 +539,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select distinct * from /exampleRegion order by 
secId",
         "select distinct * from /exampleRegion p, p.positions.values v  order 
by p.secId",
         "select distinct * from /exampleRegion order by secId"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(4, res.size());
     iter = res.iterator();
     secIds = new String[] {"GOOGL", "IBM", "VMW", "YHOO"};
@@ -556,7 +556,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select distinct secId from /exampleRegion order by 
secId limit 2",
         "select distinct p.secId from /exampleRegion p, p.positions.values v  
order by p.secId limit 2",
         "select distinct secId from /exampleRegion order by secId limit 2"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(2, res.size());
     iter = res.iterator();
     secIds = new String[] {"GOOGL", "IBM"};
@@ -568,7 +568,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where NOT (secId = 
'VMW')",
         "select p.secId from /exampleRegion p, p.positions.values v where  NOT 
(p.secId = 'VMW')",
         "select secId from /exampleRegion where NOT (secId = 'VMW')"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(3, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -582,7 +582,7 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion p where NOT (p.ID 
IN SET(111, 222)) ",
         "select p.secId from /exampleRegion p, p.positions.values v where NOT 
(p.ID IN SET(111, 222)) ",
         "select secId from /exampleRegion where NOT (secId IN 
SET('VMW','IBM','YHOO'))"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute();
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute();
     assertEquals(1, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -594,14 +594,15 @@ public class PdxStringQueryJUnitTest {
     query = new String[] {"select secId from /exampleRegion where secId  = $1",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId = $1",
         "select secId from /exampleRegion where secId  = $1"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute(new Object[] 
{"IBM"});
+    res = (SelectResults) queryService.newQuery(query[indexType]).execute(new 
Object[] {"IBM"});
     assertEquals(1, res.size());
     validateStringResult("IBM", res.iterator().next());
 
     query = new String[] {"select secId from /exampleRegion where secId > $1 
and secId < $2",
         "select p.secId from /exampleRegion p, p.positions.values v where 
p.secId > $1 and p.secId < $2",
         "select secId from /exampleRegion where secId > $1 and secId < $2"};
-    res = (SelectResults) qs.newQuery(query[indexType]).execute(new Object[] 
{"IBM", "YHOO"});
+    res = (SelectResults) queryService.newQuery(query[indexType])
+        .execute(new Object[] {"IBM", "YHOO"});
     assertEquals(1, res.size());
     iter = res.iterator();
     secIdsList.clear();
@@ -609,7 +610,7 @@ public class PdxStringQueryJUnitTest {
     while (iter.hasNext()) {
       validateResult(secIdsList, iter.next());
     }
-    DefaultQuery.setPdxReadSerialized(false);
+    cache.getPdxRegistry().setPdxReadSerializedOverride(false);
 
   }
 
diff --git 
a/geode-core/src/test/java/org/apache/geode/cache/query/dunit/PdxStringQueryDUnitTest.java
 
b/geode-core/src/test/java/org/apache/geode/cache/query/dunit/PdxStringQueryDUnitTest.java
index cc12d93..9c67c22 100644
--- 
a/geode-core/src/test/java/org/apache/geode/cache/query/dunit/PdxStringQueryDUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/cache/query/dunit/PdxStringQueryDUnitTest.java
@@ -47,7 +47,6 @@ import org.apache.geode.cache.query.Struct;
 import org.apache.geode.cache.query.data.Portfolio;
 import org.apache.geode.cache.query.data.PortfolioPdx;
 import org.apache.geode.cache.query.data.PositionPdx;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.cache.query.internal.index.CompactRangeIndex;
 import org.apache.geode.cache.query.internal.index.IndexManager;
 import org.apache.geode.cache.query.internal.index.IndexStore.IndexStoreEntry;
@@ -84,8 +83,10 @@ public class PdxStringQueryDUnitTest extends 
JUnit4CacheTestCase {
   private static final int[] groupByQueryIndex = new int[] {7, 8, 9, 10};
 
   private final String[] queryString = new String[] {
-      "SELECT pos.secId FROM " + regName + " p, p.positions.values pos WHERE 
pos.secId LIKE '%L'", // 0
-      "SELECT pos.secId FROM " + regName + " p, p.positions.values pos where 
pos.secId = 'IBM'", // 1
+      "SELECT pos.secId FROM " + regName + " p, p.positions.values pos WHERE 
pos.secId LIKE '%L'",
+      // 0
+      "SELECT pos.secId FROM " + regName + " p, p.positions.values pos where 
pos.secId = 'IBM'",
+      // 1
       "SELECT pos.secId, p.status FROM " + regName
           + " p, p.positions.values pos where pos.secId > 'APPL'", // 2
       "SELECT pos.secId FROM " + regName
@@ -104,7 +105,8 @@ public class PdxStringQueryDUnitTest extends 
JUnit4CacheTestCase {
           + " p, p.positions.values pos where  pos.secId > 'APPL' group by 
pos.secId ", // 9
       "select  count(distinct pos.secId) from " + regName
           + " p, p.positions.values pos where  pos.secId > 'APPL' ", // 10
-      "SELECT distinct pos.secId FROM " + regName + " p, p.positions.values 
pos order by pos.secId", // 11
+      "SELECT distinct pos.secId FROM " + regName + " p, p.positions.values 
pos order by pos.secId",
+      // 11
       "SELECT distinct pos.secId FROM " + regName
           + " p, p.positions.values pos WHERE p.ID > 1 order by pos.secId 
limit 5",// 12
   };
@@ -117,7 +119,8 @@ public class PdxStringQueryDUnitTest extends 
JUnit4CacheTestCase {
       "SELECT pos.secIdIndexed, p.status FROM " + regName
           + " p, p.positions.values pos where pos.secIdIndexed > 'APPL'", // 2
       "SELECT pos.secIdIndexed FROM " + regName
-          + " p, p.positions.values pos WHERE pos.secIdIndexed > 'APPL' and 
pos.secIdIndexed < 'SUN'", // 3
+          + " p, p.positions.values pos WHERE pos.secIdIndexed > 'APPL' and 
pos.secIdIndexed < 'SUN'",
+      // 3
       "select pos.secIdIndexed from " + regName
           + " p, p.positions.values pos where pos.secIdIndexed  IN SET 
('YHOO', 'VMW')", // 4
       "select pos.secIdIndexed from " + regName
@@ -125,11 +128,14 @@ public class PdxStringQueryDUnitTest extends 
JUnit4CacheTestCase {
       "select pos.secIdIndexed from " + regName
           + " p, p.positions.values pos where NOT (pos.secIdIndexed IN 
SET('SUN', 'ORCL')) ", // 6
       "select pos.secIdIndexed , count(pos.id) from " + regName
-          + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL' group 
by pos.secIdIndexed ", // 7
+          + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL' group 
by pos.secIdIndexed ",
+      // 7
       "select pos.secIdIndexed , sum(pos.id) from " + regName
-          + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL' group 
by pos.secIdIndexed ", // 8
+          + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL' group 
by pos.secIdIndexed ",
+      // 8
       "select pos.secIdIndexed , count(distinct pos.secIdIndexed) from " + 
regName
-          + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL' group 
by pos.secIdIndexed ", // 9
+          + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL' group 
by pos.secIdIndexed ",
+      // 9
       "select  count(distinct pos.secIdIndexed) from " + regName
           + " p, p.positions.values pos where  pos.secIdIndexed > 'APPL'  ", 
// 10
       "SELECT distinct pos.secIdIndexed FROM " + regName
@@ -1926,8 +1932,6 @@ public class PdxStringQueryDUnitTest extends 
JUnit4CacheTestCase {
   /**
    * Test to verify if duplicate results are not being accumulated when 
PdxString is used in PR
    * query
-   *
-   * @throws CacheException
    */
   @Test
   public void testPRQueryForDuplicates() throws CacheException {
@@ -1995,21 +1999,19 @@ public class PdxStringQueryDUnitTest extends 
JUnit4CacheTestCase {
 
     // execute query on server by setting DefaultQuery.setPdxReadSerialized
     // to simulate remote query
-    vm0.invoke(new SerializableCallable("Create server") {
-      @Override
-      public Object call() throws Exception {
-        DefaultQuery.setPdxReadSerialized(true);
-        try {
-          for (int i = 0; i < qs.length; i++) {
-            SelectResults sr =
-                (SelectResults) 
getCache().getQueryService().newQuery(qs[i]).execute();
-            assertEquals("Did not get expected result from query: " + qs[i] + 
" ", 2, sr.size());
-          }
-        } finally {
-          DefaultQuery.setPdxReadSerialized(false);
+    vm0.invoke("Create server", () -> {
+
+      Boolean previousPdxReadSerializedFlag = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+      cache.getPdxRegistry().setPdxReadSerializedOverride(true);
+      try {
+        for (int i = 0; i < qs.length; i++) {
+          SelectResults sr = (SelectResults) 
getCache().getQueryService().newQuery(qs[i]).execute();
+          assertEquals("Did not get expected result from query: " + qs[i] + " 
", 2, sr.size());
         }
-        return null;
+      } finally {
+        
cache.getPdxRegistry().setPdxReadSerializedOverride(previousPdxReadSerializedFlag);
       }
+      return null;
     });
 
     disconnectAllFromDS();
diff --git 
a/geode-core/src/test/java/org/apache/geode/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
 
b/geode-core/src/test/java/org/apache/geode/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
index bdfbddc..48cfcd3 100644
--- 
a/geode-core/src/test/java/org/apache/geode/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/cache/query/partitioned/PRQueryRemoteNodeExceptionDUnitTest.java
@@ -78,7 +78,7 @@ public class PRQueryRemoteNodeExceptionDUnitTest extends 
CacheTestCase {
   public void tearDown() throws Exception {
     disconnectAllFromDS();
     invokeInEveryVM(() -> PRQueryDUnitHelper.setCache(null));
-    invokeInEveryVM(() -> QueryObserverHolder.reset());
+    invokeInEveryVM(QueryObserverHolder::reset);
   }
 
   @Override
diff --git 
a/geode-core/src/test/java/org/apache/geode/codeAnalysis/AnalyzeSerializablesJUnitTest.java
 
b/geode-core/src/test/java/org/apache/geode/codeAnalysis/AnalyzeSerializablesJUnitTest.java
index 241944c..3adcf71 100644
--- 
a/geode-core/src/test/java/org/apache/geode/codeAnalysis/AnalyzeSerializablesJUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/codeAnalysis/AnalyzeSerializablesJUnitTest.java
@@ -82,12 +82,14 @@ public class AnalyzeSerializablesJUnitTest {
       + "If the class is not persisted or sent over the wire add it to the 
file " + NEW_LINE + "%s"
       + NEW_LINE + "Otherwise if this doesn't break backward compatibility, 
copy the file "
       + NEW_LINE + "%s to " + NEW_LINE + "%s.";
-  public static final String EXCLUDED_CLASSES_TXT = "excludedClasses.txt";
-  public static final String ACTUAL_DATA_SERIALIZABLES_DAT = 
"actualDataSerializables.dat";
-  public static final String ACTUAL_SERIALIZABLES_DAT = 
"actualSerializables.dat";
-  public static final String OPEN_BUGS_TXT = "openBugs.txt";
+  private static final String EXCLUDED_CLASSES_TXT = "excludedClasses.txt";
+  private static final String ACTUAL_DATA_SERIALIZABLES_DAT = 
"actualDataSerializables.dat";
+  private static final String ACTUAL_SERIALIZABLES_DAT = 
"actualSerializables.dat";
+  private static final String OPEN_BUGS_TXT = "openBugs.txt";
 
-  /** all loaded classes */
+  /**
+   * all loaded classes
+   */
   private Map<String, CompiledClass> classes;
 
   private File expectedDataSerializablesFile;
@@ -98,13 +100,10 @@ public class AnalyzeSerializablesJUnitTest {
   private List<ClassAndMethodDetails> expectedDataSerializables;
   private List<ClassAndVariableDetails> expectedSerializables;
 
-  private File actualDataSerializablesFile;
-  private File actualSerializablesFile;
-
   @Rule
   public TestName testName = new TestName();
 
-  public void loadExpectedDataSerializables() throws Exception {
+  private void loadExpectedDataSerializables() throws Exception {
     this.expectedDataSerializablesFile = 
getResourceAsFile("sanctionedDataSerializables.txt");
     assertThat(this.expectedDataSerializablesFile).exists().canRead();
 
@@ -159,12 +158,12 @@ public class AnalyzeSerializablesJUnitTest {
     findClasses();
     loadExpectedDataSerializables();
 
-    this.actualDataSerializablesFile = 
createEmptyFile(ACTUAL_DATA_SERIALIZABLES_DAT);
+    File actualDataSerializablesFile = 
createEmptyFile(ACTUAL_DATA_SERIALIZABLES_DAT);
     System.out.println(this.testName.getMethodName() + " 
actualDataSerializablesFile="
-        + this.actualDataSerializablesFile.getAbsolutePath());
+        + actualDataSerializablesFile.getAbsolutePath());
 
     List<ClassAndMethods> actualDataSerializables = findToDatasAndFromDatas();
-    storeClassesAndMethods(actualDataSerializables, 
this.actualDataSerializablesFile);
+    storeClassesAndMethods(actualDataSerializables, 
actualDataSerializablesFile);
 
     String diff =
         diffSortedClassesAndMethods(this.expectedDataSerializables, 
actualDataSerializables);
@@ -173,7 +172,7 @@ public class AnalyzeSerializablesJUnitTest {
           "++++++++++++++++++++++++++++++testDataSerializables found 
discrepancies++++++++++++++++++++++++++++++++++++");
       System.out.println(diff);
       fail(diff + FAIL_MESSAGE, 
getSrcPathFor(getResourceAsFile(EXCLUDED_CLASSES_TXT)),
-          this.actualDataSerializablesFile.getAbsolutePath(),
+          actualDataSerializablesFile.getAbsolutePath(),
           getSrcPathFor(this.expectedDataSerializablesFile));
     }
   }
@@ -184,12 +183,12 @@ public class AnalyzeSerializablesJUnitTest {
     findClasses();
     loadExpectedSerializables();
 
-    this.actualSerializablesFile = createEmptyFile(ACTUAL_SERIALIZABLES_DAT);
+    File actualSerializablesFile = createEmptyFile(ACTUAL_SERIALIZABLES_DAT);
     System.out.println(this.testName.getMethodName() + " 
actualSerializablesFile="
-        + this.actualSerializablesFile.getAbsolutePath());
+        + actualSerializablesFile.getAbsolutePath());
 
     List<ClassAndVariables> actualSerializables = findSerializables();
-    storeClassesAndVariables(actualSerializables, 
this.actualSerializablesFile);
+    storeClassesAndVariables(actualSerializables, actualSerializablesFile);
 
     String diff = diffSortedClassesAndVariables(this.expectedSerializables, 
actualSerializables);
     if (!diff.isEmpty()) {
@@ -197,7 +196,7 @@ public class AnalyzeSerializablesJUnitTest {
           "++++++++++++++++++++++++++++++testSerializables found 
discrepancies++++++++++++++++++++++++++++++++++++");
       System.out.println(diff);
       fail(diff + FAIL_MESSAGE, 
getSrcPathFor(getResourceAsFile(EXCLUDED_CLASSES_TXT)),
-          this.actualSerializablesFile.getAbsolutePath(),
+          actualSerializablesFile.getAbsolutePath(),
           getSrcPathFor(this.expectedSerializablesFile, "main"));
     }
   }
@@ -221,12 +220,7 @@ public class AnalyzeSerializablesJUnitTest {
               + " is not Serializable and should be removed from 
excludedClasses.txt",
           Serializable.class.isAssignableFrom(excludedClass));
 
-      if (excludedClass.isEnum()) {
-        // geode enums are special cased by DataSerializer and are never 
java-serialized
-        // for (Object instance: excludedClass.getEnumConstants()) {
-        // serializeAndDeserializeObject(instance);
-        // }
-      } else {
+      if (!excludedClass.isEnum()) {
         final Object excludedInstance;
         try {
           excludedInstance = excludedClass.newInstance();
@@ -251,7 +245,7 @@ public class AnalyzeSerializablesJUnitTest {
       System.out.println("Not Serializable: " + object.getClass().getName());
     }
     try {
-      Object instance = DataSerializer
+      DataSerializer
           .readObject(new DataInputStream(new 
ByteArrayInputStream(outputStream.toByteArray())));
       fail("I was able to deserialize " + object.getClass().getName());
     } catch (InvalidClassException e) {
@@ -264,7 +258,6 @@ public class AnalyzeSerializablesJUnitTest {
     loadExpectedSerializables();
     Set<String> openBugs = new 
HashSet<>(loadOpenBugs(getResourceAsFile(OPEN_BUGS_TXT)));
 
-
     DistributionConfig distributionConfig = new DistributionConfigImpl(new 
Properties());
     distributionConfig.setValidateSerializableObjects(true);
     distributionConfig.setSerializableObjectFilter("!*");
@@ -310,7 +303,7 @@ public class AnalyzeSerializablesJUnitTest {
         continue;
       }
 
-      Object sanctionedInstance = null;
+      Object sanctionedInstance;
       if (!Serializable.class.isAssignableFrom(sanctionedClass)) {
         throw new AssertionError(
             className + " is not serializable.  Remove it from " + 
expectedSerializablesFileName);
diff --git 
a/geode-core/src/test/java/org/apache/geode/internal/PdxDeleteFieldJUnitTest.java
 
b/geode-core/src/test/java/org/apache/geode/internal/PdxDeleteFieldJUnitTest.java
index 6a79bc8..9eddb1d 100644
--- 
a/geode-core/src/test/java/org/apache/geode/internal/PdxDeleteFieldJUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/internal/PdxDeleteFieldJUnitTest.java
@@ -33,9 +33,9 @@ import org.apache.geode.cache.DiskStoreFactory;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.RegionFactory;
 import org.apache.geode.cache.RegionShortcut;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.internal.cache.DiskStoreImpl;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.util.BlobHelper;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.PdxInstanceFactory;
@@ -121,7 +121,7 @@ public class PdxDeleteFieldJUnitTest {
     props.setProperty(MCAST_PORT, "0");
     props.setProperty(LOCATORS, "");
     try {
-      Cache cache = (new CacheFactory(props)).create();
+      InternalCache cache = (InternalCache) new CacheFactory(props).create();
       try {
         PdxValue pdxValue = new PdxValue(1, 2L);
         byte[] pdxValueBytes = BlobHelper.serializeToBlob(pdxValue);
@@ -131,14 +131,14 @@ public class PdxDeleteFieldJUnitTest {
           assertEquals(2L, deserializedPdxValue.fieldToDelete);
         }
         PdxType pt;
-        DefaultQuery.setPdxReadSerialized(true); // force PdxInstance on 
deserialization
+        cache.getPdxRegistry().setPdxReadSerializedOverride(true);
         try {
           PdxInstanceImpl pi = (PdxInstanceImpl) 
BlobHelper.deserializeBlob(pdxValueBytes);
           pt = pi.getPdxType();
           assertEquals(1, pi.getField("value"));
           assertEquals(2L, pi.getField("fieldToDelete"));
         } finally {
-          DefaultQuery.setPdxReadSerialized(false);
+          cache.getPdxRegistry().setPdxReadSerializedOverride(false);
         }
         assertEquals(PdxValue.class.getName(), pt.getClassName());
         PdxField field = pt.getPdxField("fieldToDelete");
@@ -153,7 +153,7 @@ public class PdxDeleteFieldJUnitTest {
           // fieldToDelete should now be 0 (the default) instead of 2.
           assertEquals(0L, deserializedPdxValue.fieldToDelete);
         }
-        DefaultQuery.setPdxReadSerialized(true); // force PdxInstance on 
deserialization
+        cache.getPdxRegistry().setPdxReadSerializedOverride(true);
         try {
           PdxInstance pi = (PdxInstance) 
BlobHelper.deserializeBlob(pdxValueBytes);
           assertEquals(1, pi.getField("value"));
@@ -164,7 +164,7 @@ public class PdxDeleteFieldJUnitTest {
           assertEquals(1, deserializedPdxValue.value);
           assertEquals(0L, deserializedPdxValue.fieldToDelete);
         } finally {
-          DefaultQuery.setPdxReadSerialized(false);
+          cache.getPdxRegistry().setPdxReadSerializedOverride(false);
         }
         TypeRegistry tr = ((GemFireCacheImpl) cache).getPdxRegistry();
         // Clear the local registry so we will regenerate a type for the same 
class
diff --git 
a/geode-core/src/test/java/org/apache/geode/pdx/PdxClientServerDUnitTest.java 
b/geode-core/src/test/java/org/apache/geode/pdx/PdxClientServerDUnitTest.java
index 37d5da0..7ef14a0 100644
--- 
a/geode-core/src/test/java/org/apache/geode/pdx/PdxClientServerDUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/pdx/PdxClientServerDUnitTest.java
@@ -44,7 +44,6 @@ import org.apache.geode.cache.client.ClientRegionShortcut;
 import org.apache.geode.cache.client.PoolFactory;
 import org.apache.geode.cache.client.PoolManager;
 import org.apache.geode.cache.client.internal.PoolImpl;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.cache.server.CacheServer;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.internal.AvailablePortHelper;
@@ -350,7 +349,9 @@ public class PdxClientServerDUnitTest extends 
JUnit4CacheTestCase {
     final SerializableCallable checkValue = new SerializableCallable() {
       public Object call() throws Exception {
         Region r = getRootRegion("testSimplePdx");
-        DefaultQuery.setPdxReadSerialized(true);
+        Boolean previousPdxReadSerializedFlag =
+            cache.getPdxRegistry().getPdxReadSerializedOverride();
+        cache.getPdxRegistry().setPdxReadSerializedOverride(true);
         try {
           Object v = r.get(1);
           if (!(v instanceof PdxInstance)) {
@@ -364,7 +365,7 @@ public class PdxClientServerDUnitTest extends 
JUnit4CacheTestCase {
           }
           assertEquals(v, v2);
         } finally {
-          DefaultQuery.setPdxReadSerialized(false);
+          
cache.getPdxRegistry().setPdxReadSerializedOverride(previousPdxReadSerializedFlag);
         }
         return null;
       }
diff --git 
a/geode-core/src/test/java/org/apache/geode/pdx/PdxSerializableJUnitTest.java 
b/geode-core/src/test/java/org/apache/geode/pdx/PdxSerializableJUnitTest.java
index 6887208..0f3ef62 100644
--- 
a/geode-core/src/test/java/org/apache/geode/pdx/PdxSerializableJUnitTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/pdx/PdxSerializableJUnitTest.java
@@ -54,7 +54,6 @@ import org.apache.geode.cache.CacheFactory;
 import org.apache.geode.cache.DiskStoreFactory;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.RegionShortcut;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.internal.DSCODE;
 import org.apache.geode.internal.HeapDataOutputStream;
 import org.apache.geode.internal.PdxSerializerObject;
@@ -75,37 +74,37 @@ import 
org.apache.geode.test.junit.categories.SerializationTest;
 @Category({IntegrationTest.class, SerializationTest.class})
 public class PdxSerializableJUnitTest {
 
-  private GemFireCacheImpl c;
+  private GemFireCacheImpl cache;
 
   @Before
   public void setUp() {
     // make it a loner
-    this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").create();
+    this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").create();
   }
 
   @After
   public void tearDown() {
-    this.c.close();
+    this.cache.close();
   }
 
   private int getLastPdxTypeId() {
-    return this.c.getPdxRegistry().getLastAllocatedTypeId();
+    return this.cache.getPdxRegistry().getLastAllocatedTypeId();
   }
 
   private int getPdxTypeIdForClass(Class c) {
     // here we are assuming Dsid == 0
-    return this.c.getPdxRegistry().getExistingTypeForClass(c).hashCode()
+    return this.cache.getPdxRegistry().getExistingTypeForClass(c).hashCode()
         & PeerTypeRegistration.PLACE_HOLDER_FOR_TYPE_ID;
   }
 
   private int getNumPdxTypes() {
-    return this.c.getPdxRegistry().typeMap().size();
+    return this.cache.getPdxRegistry().typeMap().size();
   }
 
   @Test
   public void testNoDiskStore() throws Exception {
-    this.c.close();
-    this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
+    this.cache.close();
+    this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
         .setPdxDiskStore("doesNotExist").create();
     HeapDataOutputStream out = new HeapDataOutputStream(Version.CURRENT);
     PdxSerializable object = new SimpleClass(1, (byte) 5, null);
@@ -119,29 +118,29 @@ public class PdxSerializableJUnitTest {
   // for bugs 44271 and 44914
   @Test
   public void testPdxPersistentKeys() throws Exception {
-    this.c.close();
-    this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
+    this.cache.close();
+    this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
         .setPdxDiskStore("pdxDS").create();
     try {
-      DiskStoreFactory dsf = this.c.createDiskStoreFactory();
+      DiskStoreFactory dsf = this.cache.createDiskStoreFactory();
       dsf.create("pdxDS");
-      this.c.createDiskStoreFactory().create("r2DS");
-      Region r1 = 
this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).create("r1");
+      this.cache.createDiskStoreFactory().create("r2DS");
+      Region r1 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).create("r1");
       r1.put(new SimpleClass(1, (byte) 1), "1");
       r1.put(new SimpleClass(2, (byte) 2), "2");
       r1.put(new SimpleClass(1, (byte) 1), "1.2"); // so we have something to 
compact offline
-      Region r2 = this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT)
+      Region r2 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT)
           .setDiskStoreName("r2DS").create("r2");
       r2.put(new SimpleClass(1, (byte) 1), new SimpleClass(1, (byte) 1));
       r2.put(new SimpleClass(2, (byte) 2), new SimpleClass(2, (byte) 2));
-      this.c.close();
-      this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
+      this.cache.close();
+      this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
           .setPdxDiskStore("pdxDS").create();
-      dsf = this.c.createDiskStoreFactory();
+      dsf = this.cache.createDiskStoreFactory();
       dsf.create("pdxDS");
-      this.c.createDiskStoreFactory().create("r2DS");
-      r1 = 
this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).create("r1");
-      r2 = 
this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("r2DS")
+      this.cache.createDiskStoreFactory().create("r2DS");
+      r1 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).create("r1");
+      r2 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("r2DS")
           .create("r2");
       assertEquals(true, r1.containsKey(new SimpleClass(1, (byte) 1)));
       assertEquals(true, r1.containsKey(new SimpleClass(2, (byte) 2)));
@@ -149,9 +148,9 @@ public class PdxSerializableJUnitTest {
       assertEquals(true, r2.containsKey(new SimpleClass(2, (byte) 2)));
       assertEquals(new SimpleClass(1, (byte) 1), r2.get(new SimpleClass(1, 
(byte) 1)));
       assertEquals(new SimpleClass(2, (byte) 2), r2.get(new SimpleClass(2, 
(byte) 2)));
-      this.c.close();
+      this.cache.close();
       // use a cache.xml to recover
-      this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").create();
+      this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").create();
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
       PrintWriter pw = new PrintWriter(new OutputStreamWriter(baos), true);
       pw.println("<?xml version=\"1.0\"?>");
@@ -169,16 +168,16 @@ public class PdxSerializableJUnitTest {
       pw.println("</cache>");
       pw.close();
       byte[] bytes = baos.toByteArray();
-      this.c.loadCacheXml(new ByteArrayInputStream(bytes));
-      r1 = this.c.getRegion("/r1");
-      r2 = this.c.getRegion("/r2");
+      this.cache.loadCacheXml(new ByteArrayInputStream(bytes));
+      r1 = this.cache.getRegion("/r1");
+      r2 = this.cache.getRegion("/r2");
       assertEquals(true, r1.containsKey(new SimpleClass(1, (byte) 1)));
       assertEquals(true, r1.containsKey(new SimpleClass(2, (byte) 2)));
       assertEquals(true, r2.containsKey(new SimpleClass(1, (byte) 1)));
       assertEquals(true, r2.containsKey(new SimpleClass(2, (byte) 2)));
       assertEquals(new SimpleClass(1, (byte) 1), r2.get(new SimpleClass(1, 
(byte) 1)));
       assertEquals(new SimpleClass(2, (byte) 2), r2.get(new SimpleClass(2, 
(byte) 2)));
-      this.c.close();
+      this.cache.close();
       // make sure offlines tools work with disk store that has pdx keys
       SystemAdmin.validateDiskStore("DEFAULT", ".");
       SystemAdmin.compactDiskStore("DEFAULT", ".");
@@ -191,7 +190,7 @@ public class PdxSerializableJUnitTest {
       SystemAdmin.modifyDiskStore("pdxDS", ".");
     } finally {
       try {
-        this.c.close();
+        this.cache.close();
       } finally {
         Pattern pattern = Pattern.compile("BACKUP(DEFAULT|pdxDS|r2DS).*");
         File[] files = new File(".").listFiles((dir1, name) -> 
pattern.matcher(name).matches());
@@ -206,27 +205,27 @@ public class PdxSerializableJUnitTest {
 
   @Test
   public void testPdxPersistentKeysDefDS() throws Exception {
-    this.c.close();
-    this.c =
+    this.cache.close();
+    this.cache =
         (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true).create();
     try {
-      this.c.createDiskStoreFactory().create("r2DS");
-      Region r1 = this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT)
+      this.cache.createDiskStoreFactory().create("r2DS");
+      Region r1 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT)
           .setDiskStoreName("r2DS").create("r1");
       r1.put(new SimpleClass(1, (byte) 1), "1");
       r1.put(new SimpleClass(2, (byte) 2), "2");
       r1.put(new SimpleClass(1, (byte) 1), "1.2"); // so we have something to 
compact offline
-      Region r2 = this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT)
+      Region r2 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT)
           .setDiskStoreName("r2DS").create("r2");
       r2.put(new SimpleClass(1, (byte) 1), new SimpleClass(1, (byte) 1));
       r2.put(new SimpleClass(2, (byte) 2), new SimpleClass(2, (byte) 2));
-      this.c.close();
-      this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
+      this.cache.close();
+      this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").setPdxPersistent(true)
           .create();
-      this.c.createDiskStoreFactory().create("r2DS");
-      r1 = 
this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("r2DS")
+      this.cache.createDiskStoreFactory().create("r2DS");
+      r1 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("r2DS")
           .create("r1");
-      r2 = 
this.c.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("r2DS")
+      r2 = 
this.cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("r2DS")
           .create("r2");
       assertEquals(true, r1.containsKey(new SimpleClass(1, (byte) 1)));
       assertEquals(true, r1.containsKey(new SimpleClass(2, (byte) 2)));
@@ -234,9 +233,9 @@ public class PdxSerializableJUnitTest {
       assertEquals(true, r2.containsKey(new SimpleClass(2, (byte) 2)));
       assertEquals(new SimpleClass(1, (byte) 1), r2.get(new SimpleClass(1, 
(byte) 1)));
       assertEquals(new SimpleClass(2, (byte) 2), r2.get(new SimpleClass(2, 
(byte) 2)));
-      this.c.close();
+      this.cache.close();
       // use a cache.xml to recover
-      this.c = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").create();
+      this.cache = (GemFireCacheImpl) new CacheFactory().set(MCAST_PORT, 
"0").create();
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
       PrintWriter pw = new PrintWriter(new OutputStreamWriter(baos), true);
       pw.println("<?xml version=\"1.0\"?>");
@@ -255,16 +254,16 @@ public class PdxSerializableJUnitTest {
       pw.println("</cache>");
       pw.close();
       byte[] bytes = baos.toByteArray();
-      this.c.loadCacheXml(new ByteArrayInputStream(bytes));
-      r1 = this.c.getRegion("/r1");
-      r2 = this.c.getRegion("/r2");
+      this.cache.loadCacheXml(new ByteArrayInputStream(bytes));
+      r1 = this.cache.getRegion("/r1");
+      r2 = this.cache.getRegion("/r2");
       assertEquals(true, r1.containsKey(new SimpleClass(1, (byte) 1)));
       assertEquals(true, r1.containsKey(new SimpleClass(2, (byte) 2)));
       assertEquals(true, r2.containsKey(new SimpleClass(1, (byte) 1)));
       assertEquals(true, r2.containsKey(new SimpleClass(2, (byte) 2)));
       assertEquals(new SimpleClass(1, (byte) 1), r2.get(new SimpleClass(1, 
(byte) 1)));
       assertEquals(new SimpleClass(2, (byte) 2), r2.get(new SimpleClass(2, 
(byte) 2)));
-      this.c.close();
+      this.cache.close();
       // make sure offlines tools work with disk store that has pdx keys
       SystemAdmin.validateDiskStore("DEFAULT", ".");
       SystemAdmin.compactDiskStore("DEFAULT", ".");
@@ -274,7 +273,7 @@ public class PdxSerializableJUnitTest {
       SystemAdmin.modifyDiskStore("r2DS", ".");
     } finally {
       try {
-        this.c.close();
+        this.cache.close();
       } finally {
         Pattern pattern = Pattern.compile("BACKUP(DEFAULT|r2DS).*");
         File[] files = new File(".").listFiles((dir1, name) -> 
pattern.matcher(name).matches());
@@ -438,7 +437,7 @@ public class PdxSerializableJUnitTest {
         "Mismatch in write and read value: Value Write..." + pdx + " Value 
Read..." + actualVal,
         pdx.equals(actualVal));
 
-    c.setReadSerializedForTest(true);
+    cache.setReadSerializedForTest(true);
     try {
       in = new DataInputStream(new ByteArrayInputStream(actual));
       PdxInstance pi = (PdxInstance) DataSerializer.readObject(in);
@@ -535,7 +534,7 @@ public class PdxSerializableJUnitTest {
           (byte) (floatBytes >> 16), (byte) (floatBytes >> 8), (byte) 
floatBytes}),
           reader.getRaw(7));
     } finally {
-      c.setReadSerializedForTest(false);
+      cache.setReadSerializedForTest(false);
     }
   }
 
@@ -664,7 +663,7 @@ public class PdxSerializableJUnitTest {
     assertTrue(
         "Mismatch in write and read value: Value Write..." + pdx + " Value 
Read..." + actualVal,
         pdx.equals(actualVal));
-    c.setReadSerializedForTest(true);
+    cache.setReadSerializedForTest(true);
     try {
       in = new DataInputStream(new ByteArrayInputStream(actual));
       PdxInstance pi = (PdxInstance) DataSerializer.readObject(in);
@@ -761,7 +760,7 @@ public class PdxSerializableJUnitTest {
           (byte) (floatBytes >> 16), (byte) (floatBytes >> 8), (byte) 
floatBytes}),
           reader.getRaw(7));
     } finally {
-      c.setReadSerializedForTest(false);
+      cache.setReadSerializedForTest(false);
     }
   }
 
@@ -1095,7 +1094,8 @@ public class PdxSerializableJUnitTest {
   // this method adds coverage for bug 43236
   @Test
   public void testObjectPdxInstance() throws IOException, 
ClassNotFoundException {
-    DefaultQuery.setPdxReadSerialized(true);
+    Boolean previousPdxReadSerializedFlag = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     PdxReaderImpl.TESTHOOK_TRACKREADS = true;
     try {
       PdxInstance pi = (PdxInstance) serializeAndDeserialize(new 
ObjectHolder("hello"));
@@ -1110,14 +1110,15 @@ public class PdxSerializableJUnitTest {
       assertEquals("hello", v3.getObject());
       assertEquals("goodbye", wpi.getField("f1"));
     } finally {
-      DefaultQuery.setPdxReadSerialized(false);
+      
cache.getPdxRegistry().setPdxReadSerializedOverride(previousPdxReadSerializedFlag);
       PdxReaderImpl.TESTHOOK_TRACKREADS = false;
     }
   }
 
   @Test
   public void testObjectArrayPdxInstance() throws IOException, 
ClassNotFoundException {
-    DefaultQuery.setPdxReadSerialized(true);
+    Boolean previousPdxReadSerializedFlag = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     PdxReaderImpl.TESTHOOK_TRACKREADS = true;
     try {
       LongFieldHolder[] v = new LongFieldHolder[] {new LongFieldHolder(1), new 
LongFieldHolder(2)};
@@ -1139,7 +1140,7 @@ public class PdxSerializableJUnitTest {
             "expected " + Arrays.toString(v) + " but had " + 
Arrays.toString(nv2));
       }
     } finally {
-      DefaultQuery.setPdxReadSerialized(false);
+      
cache.getPdxRegistry().setPdxReadSerializedOverride(previousPdxReadSerializedFlag);
       PdxReaderImpl.TESTHOOK_TRACKREADS = false;
     }
   }
@@ -1167,24 +1168,24 @@ public class PdxSerializableJUnitTest {
     } catch (NotSerializableException expected) {
 
     }
-    this.c.setPdxSerializer(new BasicAllFieldTypesPdxSerializer());
+    this.cache.setPdxSerializer(new BasicAllFieldTypesPdxSerializer());
     try {
       BasicAllFieldTypes v2 = (BasicAllFieldTypes) serializeAndDeserialize(v1);
       assertEquals(v1, v2);
     } finally {
-      this.c.setPdxSerializer(null);
+      this.cache.setPdxSerializer(null);
     }
   }
 
   @Test
   public void testPdxSerializerFalse() throws IOException, 
ClassNotFoundException {
-    this.c.setPdxSerializer(new BasicAllFieldTypesPdxSerializer());
+    this.cache.setPdxSerializer(new BasicAllFieldTypesPdxSerializer());
     try {
       POS v1 = new POS(3);
       POS v2 = (POS) serializeAndDeserialize(v1);
       assertEquals(v1, v2);
     } finally {
-      this.c.setPdxSerializer(null);
+      this.cache.setPdxSerializer(null);
     }
   }
 
@@ -1204,12 +1205,12 @@ public class PdxSerializableJUnitTest {
     } catch (NotSerializableException expected) {
 
     }
-    this.c.setPdxSerializer(new BasicAllFieldTypesPdxSerializer());
+    this.cache.setPdxSerializer(new BasicAllFieldTypesPdxSerializer());
     try {
       BasicAllFieldTypes v2 = (BasicAllFieldTypes) serializeAndDeserialize(v1);
       assertEquals(v1, v2);
     } finally {
-      this.c.setPdxSerializer(null);
+      this.cache.setPdxSerializer(null);
     }
   }
 
@@ -2056,7 +2057,7 @@ public class PdxSerializableJUnitTest {
     byte[] v2actual = createBlob(pdx);
     int v2typeId = getBlobPdxTypeId(v2actual);
 
-    c.getPdxRegistry().removeLocal(pdx);
+    cache.getPdxRegistry().removeLocal(pdx);
     MyEvolvablePdx.setVersion(1);
     MyEvolvablePdx pdxv1 = deblob(v2actual);
     assertEquals(7, pdxv1.f1);
@@ -2077,7 +2078,7 @@ public class PdxSerializableJUnitTest {
     checkBytes(v2actual, v1actual);
 
     MyEvolvablePdx.setVersion(2);
-    c.getPdxRegistry().removeLocal(pdx);
+    cache.getPdxRegistry().removeLocal(pdx);
     MyEvolvablePdx pdxv2 = deblob(v1actual);
     assertEquals(7, pdxv2.f1);
     assertEquals(8, pdxv2.f2);
@@ -2100,7 +2101,7 @@ public class PdxSerializableJUnitTest {
     assertEquals(8, pdx.f2);
     byte[] v3actual = createBlob(pdx);
     int v3typeId = getBlobPdxTypeId(v3actual);
-    c.getPdxRegistry().removeLocal(pdx);
+    cache.getPdxRegistry().removeLocal(pdx);
     MyEvolvablePdx.setVersion(1);
     MyEvolvablePdx pdxv1 = deblob(v3actual);
     assertEquals(7, pdxv1.f1);
@@ -2112,14 +2113,14 @@ public class PdxSerializableJUnitTest {
 
     int mergedTypeId = getBlobPdxTypeId(v1actual);
     assertEquals(numPdxTypes + 1, getNumPdxTypes());
-    TypeRegistry tr = c.getPdxRegistry();
+    TypeRegistry tr = cache.getPdxRegistry();
     PdxType v3Type = tr.getType(v3typeId);
     PdxType mergedType = tr.getType(mergedTypeId);
     assertFalse(mergedType.equals(v3Type));
     assertTrue(mergedType.compatible(v3Type));
 
     MyEvolvablePdx.setVersion(3);
-    c.getPdxRegistry().removeLocal(pdxv1);
+    cache.getPdxRegistry().removeLocal(pdxv1);
     MyEvolvablePdx pdxv3 = deblob(v1actual);
     assertEquals(7, pdxv3.f1);
     assertEquals(8, pdxv3.f2);
@@ -2137,7 +2138,7 @@ public class PdxSerializableJUnitTest {
     byte[] v1actual = createBlob(pdx);
     int v1typeId = getBlobPdxTypeId(v1actual);
 
-    c.getPdxRegistry().removeLocal(pdx);
+    cache.getPdxRegistry().removeLocal(pdx);
     MyEvolvablePdx.setVersion(2);
     MyEvolvablePdx pdxv2 = deblob(v1actual);
     assertEquals(7, pdxv2.f1);
@@ -2150,7 +2151,7 @@ public class PdxSerializableJUnitTest {
     int v2typeId = getBlobPdxTypeId(v2actual);
     assertEquals(numPdxTypes + 1, getNumPdxTypes());
 
-    TypeRegistry tr = c.getPdxRegistry();
+    TypeRegistry tr = cache.getPdxRegistry();
     PdxType v2Type = tr.getType(v2typeId);
     PdxType v1Type = tr.getType(v1typeId);
     assertFalse(v1Type.equals(v2Type));
@@ -2161,7 +2162,7 @@ public class PdxSerializableJUnitTest {
     assertNotNull(v2Type.getPdxField("f2"));
 
     MyEvolvablePdx.setVersion(1);
-    c.getPdxRegistry().removeLocal(pdx);
+    cache.getPdxRegistry().removeLocal(pdx);
     MyEvolvablePdx pdxv3 = deblob(v2actual);
     assertEquals(7, pdxv3.f1);
     assertEquals(0, pdxv3.f2);
diff --git a/geode-core/src/test/java/org/apache/geode/test/fake/Fakes.java 
b/geode-core/src/test/java/org/apache/geode/test/fake/Fakes.java
index 31852dd..ef7de58 100644
--- a/geode-core/src/test/java/org/apache/geode/test/fake/Fakes.java
+++ b/geode-core/src/test/java/org/apache/geode/test/fake/Fakes.java
@@ -37,6 +37,7 @@ import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.cache.TXManagerImpl;
 import org.apache.geode.internal.security.SecurityService;
 import org.apache.geode.pdx.PdxInstanceFactory;
+import org.apache.geode.pdx.internal.TypeRegistry;
 
 /**
  * Factory methods for fake objects for use in test.
@@ -65,6 +66,7 @@ public class Fakes {
     DistributionConfig config = mock(DistributionConfig.class);
     ClusterDistributionManager distributionManager = 
mock(ClusterDistributionManager.class);
     PdxInstanceFactory pdxInstanceFactory = mock(PdxInstanceFactory.class);
+    TypeRegistry pdxRegistryMock = mock(TypeRegistry.class);
     CancelCriterion systemCancelCriterion = mock(CancelCriterion.class);
     DSClock clock = mock(DSClock.class);
     LogWriter logger = mock(LogWriter.class);
@@ -86,6 +88,7 @@ public class Fakes {
     when(cache.getCachePerfStats()).thenReturn(mock(CachePerfStats.class));
     when(cache.getSecurityService()).thenReturn(mock(SecurityService.class));
     when(cache.createPdxInstanceFactory(any())).thenReturn(pdxInstanceFactory);
+    when(cache.getPdxRegistry()).thenReturn(pdxRegistryMock);
     when(cache.getTxManager()).thenReturn(txManager);
 
     when(system.getDistributedMember()).thenReturn(member);
diff --git 
a/geode-core/src/test/resources/org/apache/geode/codeAnalysis/excludedClasses.txt
 
b/geode-core/src/test/resources/org/apache/geode/codeAnalysis/excludedClasses.txt
index c00f72b..75e91d8 100644
--- 
a/geode-core/src/test/resources/org/apache/geode/codeAnalysis/excludedClasses.txt
+++ 
b/geode-core/src/test/resources/org/apache/geode/codeAnalysis/excludedClasses.txt
@@ -91,3 +91,4 @@ 
org/apache/geode/cache/query/internal/types/TypeUtils$ComparisonStrategy$4
 org/apache/geode/cache/query/internal/types/TypeUtils$ComparisonStrategy$5
 
org/apache/geode/cache/client/internal/pooling/ConnectionManagerImpl$ClosedPoolConnectionList
 org/apache/geode/cache/query/internal/parse/ASTArithmeticOp
+org/apache/geode/cache/query/internal/DefaultQuery$1
\ No newline at end of file
diff --git 
a/geode-core/src/test/resources/org/apache/geode/codeAnalysis/sanctionedDataSerializables.txt
 
b/geode-core/src/test/resources/org/apache/geode/codeAnalysis/sanctionedDataSerializables.txt
index 5abf7ae..391fc61 100644
--- 
a/geode-core/src/test/resources/org/apache/geode/codeAnalysis/sanctionedDataSerializables.txt
+++ 
b/geode-core/src/test/resources/org/apache/geode/codeAnalysis/sanctionedDataSerializables.txt
@@ -353,8 +353,8 @@ fromData,16,2a2bb700202a2bb900210100b50003b1
 toData,16,2a2bb700222b2ab40003b900230200b1
 
 
org/apache/geode/distributed/internal/streaming/StreamingOperation$StreamingReplyMessage,2
-fromData,339,2a2bb700142bb9001501003d2a2bb900150100b500102a2bb900160100b500112a2bb900160100b500032bb800174e2db20018b600199e000704a700040336041c02a0000b2a01b50002a701082a1cb5000e2abb001a591cb7001bb500022ab4000399000704b8001c2ab40008b8001d3a051905c1001e3606013a07150699000d1905c0001eb6001f3a0703360803360915091ca20087b20020c6000cb2002006b900210200150699000fb80022990009043608a700672bb800233a0a150699004a1907c600451907b90024010099003b1504360b150b99001715099a0012190ac100259a000704a7000403360b150
 [...]
-toData,85,2a2bb7002b2ab4000dc7000d2b02b9002c0200a7000d2b2ab4000eb9002c02002b2ab40010b9002c02002b2ab40011b9002d02002b2ab40003b9002d02002ab4000dc600122ab4000e9e000b2ab4000d2bb6002eb1
+fromData,417,2a2bb700142bb9001501003d2a2bb900150100b500102a2bb900160100b500112a2bb900160100b500032bb800174e2db20018b600199e000704a70004033604013a0503b8001a3a06b8001bc0001c3a051905b9001d0100b6001e3a06a700053a071c02a0000b2a01b50002a701342a1cb5000e2abb0020591cb70021b500022ab400039900131905c6000e1905b9001d010004b600222ab40008b800233a071907c100243608013a09150899000d1907c00024b600253a0903360a03360b150b1ca20087b20026c6000cb2002606b900270200150899000fb8002899000904360aa700672bb800293a0c150899004
 [...]
+toData,85,2a2bb700322ab4000dc7000d2b02b900330200a7000d2b2ab4000eb9003302002b2ab40010b9003302002b2ab40011b9003402002b2ab40003b9003402002ab4000dc600122ab4000e9e000b2ab4000d2bb60035b1
 
 org/apache/geode/distributed/internal/tcpserver/InfoRequest,2
 fromData,1,b1
diff --git 
a/geode-cq/src/main/java/org/apache/geode/cache/query/internal/cq/CqServiceImpl.java
 
b/geode-cq/src/main/java/org/apache/geode/cache/query/internal/cq/CqServiceImpl.java
index e1ceb58..8f99f87 100644
--- 
a/geode-cq/src/main/java/org/apache/geode/cache/query/internal/cq/CqServiceImpl.java
+++ 
b/geode-cq/src/main/java/org/apache/geode/cache/query/internal/cq/CqServiceImpl.java
@@ -1187,11 +1187,12 @@ public class CqServiceImpl implements CqService {
       processRegionEvent(event, localProfile, profiles, frInfo);
     } else {
       // Use the PDX types in serialized form.
-      DefaultQuery.setPdxReadSerialized(this.cache, true);
+      Boolean initialPdxReadSerialized = 
this.cache.getPdxRegistry().getPdxReadSerializedOverride();
+      this.cache.getPdxRegistry().setPdxReadSerializedOverride(true);
       try {
         processEntryEvent(event, localProfile, profiles, frInfo);
       } finally {
-        DefaultQuery.setPdxReadSerialized(this.cache, false);
+        
this.cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
       }
     }
   }
diff --git 
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
 
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
index 5df2617..ee64cc1 100644
--- 
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
+++ 
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/IndexRepositoryFactory.java
@@ -32,10 +32,10 @@ import 
org.apache.geode.cache.lucene.internal.directory.RegionDirectory;
 import org.apache.geode.cache.lucene.internal.partition.BucketTargetingMap;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepository;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepositoryImpl;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.distributed.DistributedLockService;
 import org.apache.geode.internal.cache.BucketRegion;
 import org.apache.geode.internal.cache.EntrySnapshot;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.PartitionRegionConfig;
 import org.apache.geode.internal.cache.PartitionedRegion;
 import org.apache.geode.internal.cache.PartitionedRegionHelper;
@@ -95,8 +95,9 @@ public class IndexRepositoryFactory {
     }
 
     final IndexRepository repo;
-    boolean initialPdxReadSerializedFlag = DefaultQuery.getPdxReadSerialized();
-    DefaultQuery.setPdxReadSerialized(true);
+    InternalCache cache = (InternalCache) userRegion.getRegionService();
+    boolean initialPdxReadSerializedFlag = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
     try {
       // bucketTargetingMap handles partition resolver (via bucketId as 
callbackArg)
       Map bucketTargetingMap = getBucketTargetingMap(fileAndChunkBucket, 
bucketId);
@@ -127,7 +128,7 @@ public class IndexRepositoryFactory {
     } finally {
       if (!success) {
         lockService.unlock(lockName);
-        DefaultQuery.setPdxReadSerialized(initialPdxReadSerializedFlag);
+        
cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerializedFlag);
       }
     }
   }
@@ -137,9 +138,7 @@ public class IndexRepositoryFactory {
       throws IOException {
     Set<IndexRepository> affectedRepos = new HashSet<IndexRepository>();
 
-    Iterator keysIterator = dataBucket.keySet().iterator();
-    while (keysIterator.hasNext()) {
-      Object key = keysIterator.next();
+    for (Object key : dataBucket.keySet()) {
       Object value = getValue(userRegion.getEntry(key));
       if (value != null) {
         repo.update(key, value);
diff --git 
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneEventListener.java
 
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneEventListener.java
index 8033f9a..b9209e7 100644
--- 
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneEventListener.java
+++ 
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneEventListener.java
@@ -32,9 +32,9 @@ import org.apache.geode.cache.asyncqueue.AsyncEvent;
 import org.apache.geode.cache.asyncqueue.AsyncEventListener;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepository;
 import org.apache.geode.cache.lucene.internal.repository.RepositoryManager;
-import org.apache.geode.cache.query.internal.DefaultQuery;
 import org.apache.geode.internal.cache.BucketNotFoundException;
 import org.apache.geode.internal.cache.EntrySnapshot;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.PrimaryBucketException;
 import org.apache.geode.internal.logging.LogService;
 
@@ -45,13 +45,15 @@ public class LuceneEventListener implements 
AsyncEventListener {
 
   private static LuceneExceptionObserver exceptionObserver = exception -> {
   };
+  private InternalCache cache;
 
-  Logger logger = LogService.getLogger();
+  private static final Logger logger = LogService.getLogger();
 
   private final RepositoryManager repositoryManager;
 
-  public LuceneEventListener(RepositoryManager repositoryManager) {
+  public LuceneEventListener(InternalCache cache, RepositoryManager 
repositoryManager) {
     this.repositoryManager = repositoryManager;
+    this.cache = cache;
   }
 
   @Override
@@ -68,18 +70,22 @@ public class LuceneEventListener implements 
AsyncEventListener {
       exceptionObserver.onException(e);
       throw e;
     }
+  }
 
+  private Logger getLogger() {
+    return logger;
   }
 
   protected boolean process(final List<AsyncEvent> events) {
     // Try to get a PDX instance if possible, rather than a deserialized object
-    boolean initialPdxReadSerializedFlag = DefaultQuery.getPdxReadSerialized();
-    DefaultQuery.setPdxReadSerialized(true);
+    Boolean initialPdxReadSerialized = 
this.cache.getPdxRegistry().getPdxReadSerializedOverride();
+    cache.getPdxRegistry().setPdxReadSerializedOverride(true);
 
-    Set<IndexRepository> affectedRepos = new HashSet<IndexRepository>();
+    Set<IndexRepository> affectedRepos = new HashSet<>();
 
     try {
       for (AsyncEvent event : events) {
+
         Region region = event.getRegion();
         Object key = event.getKey();
         Object callbackArgument = event.getCallbackArgument();
@@ -101,18 +107,18 @@ public class LuceneEventListener implements 
AsyncEventListener {
       }
       return true;
     } catch (BucketNotFoundException | RegionDestroyedException | 
PrimaryBucketException e) {
-      logger.debug("Bucket not found while saving to lucene index: " + 
e.getMessage(), e);
+      getLogger().debug("Bucket not found while saving to lucene index: " + 
e.getMessage(), e);
       return false;
     } catch (CacheClosedException e) {
-      logger.debug("Unable to save to lucene index, cache has been closed", e);
+      getLogger().debug("Unable to save to lucene index, cache has been 
closed", e);
       return false;
     } catch (AlreadyClosedException e) {
-      logger.debug("Unable to commit, the lucene index is already closed", e);
+      getLogger().debug("Unable to commit, the lucene index is already 
closed", e);
       return false;
     } catch (IOException e) {
       throw new InternalGemFireError("Unable to save to lucene index", e);
     } finally {
-      DefaultQuery.setPdxReadSerialized(initialPdxReadSerializedFlag);
+      
cache.getPdxRegistry().setPdxReadSerializedOverride(initialPdxReadSerialized);
     }
   }
 
diff --git 
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneIndexImpl.java
 
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneIndexImpl.java
index fd4d060..cba5b69 100644
--- 
a/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneIndexImpl.java
+++ 
b/geode-lucene/src/main/java/org/apache/geode/cache/lucene/internal/LuceneIndexImpl.java
@@ -206,7 +206,7 @@ public abstract class LuceneIndexImpl implements 
InternalLuceneIndex {
     if (factory == null) {
       return null;
     }
-    LuceneEventListener listener = new LuceneEventListener(repositoryManager);
+    LuceneEventListener listener = new LuceneEventListener(cache, 
repositoryManager);
     factory.setGatewayEventSubstitutionListener(new 
LuceneEventSubstitutionFilter());
     AsyncEventQueue indexQueue = factory.create(aeqId, listener);
     return indexQueue;
diff --git 
a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/LuceneEventListenerJUnitTest.java
 
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/LuceneEventListenerJUnitTest.java
index cc7caff..d7af3d4 100644
--- 
a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/LuceneEventListenerJUnitTest.java
+++ 
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/LuceneEventListenerJUnitTest.java
@@ -14,8 +14,21 @@
  */
 package org.apache.geode.cache.lucene.internal;
 
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -26,9 +39,17 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.logging.log4j.Logger;
 import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
+import org.mockito.stubbing.Answer;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
 import org.apache.geode.InternalGemFireError;
 import org.apache.geode.cache.Region;
@@ -36,16 +57,35 @@ import org.apache.geode.cache.asyncqueue.AsyncEvent;
 import org.apache.geode.cache.lucene.internal.repository.IndexRepository;
 import org.apache.geode.cache.lucene.internal.repository.RepositoryManager;
 import org.apache.geode.cache.query.internal.DefaultQuery;
+import org.apache.geode.internal.Assert;
 import org.apache.geode.internal.cache.BucketNotFoundException;
 import org.apache.geode.internal.cache.EntrySnapshot;
+import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.pdx.internal.TypeRegistry;
+import org.apache.geode.test.fake.Fakes;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 /**
  * Unit test that async event listener dispatched the events to the 
appropriate repository.
  */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({LuceneEventListener.class, LogService.class})
 @Category(UnitTest.class)
+@PowerMockIgnore("*.UnitTest")
 public class LuceneEventListenerJUnitTest {
 
+  private RepositoryManager manager;
+  private LuceneEventListener listener;
+  private InternalCache cache;
+
+  @Before
+  public void setup() {
+    cache = Fakes.cache();
+    manager = Mockito.mock(RepositoryManager.class);
+    listener = new LuceneEventListener(cache, manager);
+  }
+
   @After
   public void clearExceptionListener() {
     LuceneEventListener.setExceptionObserver(null);
@@ -53,20 +93,25 @@ public class LuceneEventListenerJUnitTest {
 
   @Test
   public void 
pdxReadSerializedFlagShouldBeResetBackToOriginalValueAfterProcessingEvents() {
-    boolean originalPdxReadSerialized = DefaultQuery.getPdxReadSerialized();
+    ArgumentCaptor valueCapture = ArgumentCaptor.forClass(Boolean.class);
+    TypeRegistry mockTypeRegistry = cache.getPdxRegistry();
+    doNothing().when(mockTypeRegistry)
+        .setPdxReadSerializedOverride((Boolean) valueCapture.capture());
+
+    boolean originalPdxReadSerialized = 
cache.getPdxRegistry().getPdxReadSerializedOverride();
     try {
-      DefaultQuery.setPdxReadSerialized(true);
-      LuceneEventListener luceneEventListener = new LuceneEventListener(null);
-      luceneEventListener.process(new LinkedList());
-      assertTrue(DefaultQuery.getPdxReadSerialized());
+      cache.getPdxRegistry().setPdxReadSerializedOverride(true);
+      Assert.assertTrue((Boolean) valueCapture.getValue());
+      listener.process(new LinkedList<>());
+      Assert.assertTrue(!(Boolean) valueCapture.getValue());
     } finally {
-      DefaultQuery.setPdxReadSerialized(originalPdxReadSerialized);
+      
cache.getPdxRegistry().setPdxReadSerializedOverride(originalPdxReadSerialized);
     }
   }
 
   @Test
   public void testProcessBatch() throws Exception {
-    RepositoryManager manager = Mockito.mock(RepositoryManager.class);
+
     IndexRepository repo1 = Mockito.mock(IndexRepository.class);
     IndexRepository repo2 = Mockito.mock(IndexRepository.class);
     Region region1 = Mockito.mock(Region.class);
@@ -76,7 +121,6 @@ public class LuceneEventListenerJUnitTest {
 
     Mockito.when(manager.getRepository(eq(region1), any(), 
eq(callback1))).thenReturn(repo1);
     Mockito.when(manager.getRepository(eq(region2), any(), 
eq(null))).thenReturn(repo2);
-    LuceneEventListener listener = new LuceneEventListener(manager);
     List<AsyncEvent> events = new ArrayList<AsyncEvent>();
 
     int numEntries = 100;
@@ -118,28 +162,27 @@ public class LuceneEventListenerJUnitTest {
   @Test
   public void shouldHandleBucketNotFoundExceptionWithoutLoggingError()
       throws BucketNotFoundException {
-    RepositoryManager manager = Mockito.mock(RepositoryManager.class);
-    Logger log = Mockito.mock(Logger.class);
     Mockito.when(manager.getRepository(any(), any(), any()))
         .thenThrow(BucketNotFoundException.class);
-    LuceneEventListener listener = new LuceneEventListener(manager);
-    listener.logger = log;
+
+    PowerMockito.mockStatic(LogService.class);
+    Logger logger = Mockito.mock(Logger.class);
+    Mockito.when(LogService.getLogger()).thenReturn(logger);
+
     AsyncEvent event = Mockito.mock(AsyncEvent.class);
-    boolean result = listener.processEvents(Arrays.asList(new AsyncEvent[] 
{event}));
+    boolean result = listener.processEvents(Arrays.asList(event));
     assertFalse(result);
-    verify(log, never()).error(anyString(), any(Exception.class));
+    verify(logger, never()).error(anyString(), any(Exception.class));
   }
 
   @Test
   public void shouldThrowAndCaptureIOException() throws 
BucketNotFoundException {
-    RepositoryManager manager = Mockito.mock(RepositoryManager.class);
     Mockito.when(manager.getRepository(any(), any(), 
any())).thenThrow(IOException.class);
     AtomicReference<Throwable> lastException = new AtomicReference<>();
     LuceneEventListener.setExceptionObserver(lastException::set);
-    LuceneEventListener listener = new LuceneEventListener(manager);
     AsyncEvent event = Mockito.mock(AsyncEvent.class);
     try {
-      listener.processEvents(Arrays.asList(new AsyncEvent[] {event}));
+      listener.processEvents(Arrays.asList(event));
       fail("should have thrown an exception");
     } catch (InternalGemFireError expected) {
       assertEquals(expected, lastException.get());
diff --git 
a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
 
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
index ec122d1..d87be41 100644
--- 
a/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
+++ 
b/geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/PartitionedRepositoryManagerJUnitTest.java
@@ -104,8 +104,10 @@ public class PartitionedRepositoryManagerJUnitTest {
     serializer = new HeterogeneousLuceneSerializer();
     DLockService lockService = mock(DLockService.class);
     when(lockService.lock(any(), anyLong(), anyLong())).thenReturn(true);
+    when(userRegion.getRegionService()).thenReturn(cache);
     
DLockService.addLockServiceForTests(PartitionedRegionHelper.PARTITION_LOCK_SERVICE_NAME,
         lockService);
+
     createIndexAndRepoManager();
   }
 

-- 
To stop receiving notification emails like this one, please contact
u...@apache.org.

Reply via email to