Merge branch 'cassandra-2.2' into cassandra-3.0
Conflicts:
test/unit/org/apache/cassandra/db/KeyCacheTest.java
Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo
Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/028c3f77
Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/028c3f77
Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/028c3f77
Branch: refs/heads/trunk
Commit: 028c3f779fe5bafcecd07b0606783df7675a701f
Parents: a9db74b 3573faf
Author: Sylvain Lebresne <[email protected]>
Authored: Fri Oct 9 14:58:40 2015 +0200
Committer: Sylvain Lebresne <[email protected]>
Committed: Fri Oct 9 14:58:40 2015 +0200
----------------------------------------------------------------------
.../org/apache/cassandra/db/KeyCacheTest.java | 66 ++++++++++++++++++--
1 file changed, 60 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/cassandra/blob/028c3f77/test/unit/org/apache/cassandra/db/KeyCacheTest.java
----------------------------------------------------------------------
diff --cc test/unit/org/apache/cassandra/db/KeyCacheTest.java
index 4db8703,c8caff9..515d30e
--- a/test/unit/org/apache/cassandra/db/KeyCacheTest.java
+++ b/test/unit/org/apache/cassandra/db/KeyCacheTest.java
@@@ -32,17 -34,20 +34,18 @@@ import org.junit.Test
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.cache.KeyCacheKey;
+import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.DatabaseDescriptor;
-import org.apache.cassandra.config.KSMetaData;
-import org.apache.cassandra.concurrent.ScheduledExecutors;
+import org.apache.cassandra.config.Schema;
+ import org.apache.cassandra.db.compaction.OperationType;
-import org.apache.cassandra.db.composites.*;
import org.apache.cassandra.db.compaction.CompactionManager;
-import org.apache.cassandra.db.filter.QueryFilter;
+import org.apache.cassandra.db.lifecycle.LifecycleTransaction;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.format.SSTableReader;
-import org.apache.cassandra.locator.SimpleStrategy;
+import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.service.CacheService;
-import org.apache.cassandra.utils.ByteBufferUtil;
-
import org.apache.cassandra.utils.concurrent.Refs;
+
import static org.junit.Assert.assertEquals;
public class KeyCacheTest
@@@ -56,9 -63,11 +61,10 @@@
{
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE1,
- SimpleStrategy.class,
- KSMetaData.optsWithRF(1),
+ KeyspaceParams.simple(1),
SchemaLoader.standardCFMD(KEYSPACE1,
COLUMN_FAMILY1),
- SchemaLoader.standardCFMD(KEYSPACE1,
COLUMN_FAMILY2));
+ SchemaLoader.standardCFMD(KEYSPACE1,
COLUMN_FAMILY2),
+ SchemaLoader.standardCFMD(KEYSPACE1,
COLUMN_FAMILY3));
}
@AfterClass
@@@ -83,7 -92,7 +89,7 @@@
store.forceBlockingFlush();
// populate the cache
- readData(KEYSPACE1, COLUMN_FAMILY2, 100);
- SchemaLoader.readData(KEYSPACE1, COLUMN_FAMILY2, 0, 100);
++ readData(KEYSPACE1, COLUMN_FAMILY2, 0, 100);
assertKeyCacheSize(100, KEYSPACE1, COLUMN_FAMILY2);
// really? our caches don't implement the map interface? (hence no
.addAll)
@@@ -120,6 -129,56 +126,56 @@@
}
@Test
+ public void testKeyCacheLoadWithLostTable() throws Exception
+ {
+ CompactionManager.instance.disableAutoCompaction();
+
+ ColumnFamilyStore store =
Keyspace.open(KEYSPACE1).getColumnFamilyStore(COLUMN_FAMILY3);
+
+ // empty the cache
+ CacheService.instance.invalidateKeyCache();
+ assertKeyCacheSize(0, KEYSPACE1, COLUMN_FAMILY3);
+
+ // insert data and force to disk
+ SchemaLoader.insertData(KEYSPACE1, COLUMN_FAMILY3, 0, 100);
+ store.forceBlockingFlush();
+
- Collection<SSTableReader> firstFlushTables =
ImmutableList.copyOf(store.getSSTables());
++ Collection<SSTableReader> firstFlushTables =
ImmutableList.copyOf(store.getLiveSSTables());
+
+ // populate the cache
- SchemaLoader.readData(KEYSPACE1, COLUMN_FAMILY3, 0, 100);
++ readData(KEYSPACE1, COLUMN_FAMILY3, 0, 100);
+ assertKeyCacheSize(100, KEYSPACE1, COLUMN_FAMILY3);
+
+ // insert some new data and force to disk
+ SchemaLoader.insertData(KEYSPACE1, COLUMN_FAMILY3, 100, 50);
+ store.forceBlockingFlush();
+
+ // check that it's fine
- SchemaLoader.readData(KEYSPACE1, COLUMN_FAMILY3, 100, 50);
++ readData(KEYSPACE1, COLUMN_FAMILY3, 100, 50);
+ assertKeyCacheSize(150, KEYSPACE1, COLUMN_FAMILY3);
+
+ // force the cache to disk
+ CacheService.instance.keyCache.submitWrite(Integer.MAX_VALUE).get();
+
+ CacheService.instance.invalidateKeyCache();
+ assertKeyCacheSize(0, KEYSPACE1, COLUMN_FAMILY3);
+
+ // check that the content is written correctly
+ CacheService.instance.keyCache.loadSaved();
+ assertKeyCacheSize(150, KEYSPACE1, COLUMN_FAMILY3);
+
+ CacheService.instance.invalidateKeyCache();
+ assertKeyCacheSize(0, KEYSPACE1, COLUMN_FAMILY3);
+
+ // now remove the first sstable from the store to simulate losing the
file
+ store.markObsolete(firstFlushTables, OperationType.UNKNOWN);
+
+ // check that reading now correctly skips over lost table and reads
the rest (CASSANDRA-10219)
+ CacheService.instance.keyCache.loadSaved();
+ assertKeyCacheSize(50, KEYSPACE1, COLUMN_FAMILY3);
+ }
+
+ @Test
public void testKeyCache() throws ExecutionException, InterruptedException
{
CompactionManager.instance.disableAutoCompaction();
@@@ -175,16 -267,6 +231,14 @@@
assertKeyCacheSize(noEarlyOpen ? 4 : 2, KEYSPACE1, COLUMN_FAMILY1);
}
- private static void readData(String keyspace, String columnFamily, int
numberOfRows)
++ private static void readData(String keyspace, String columnFamily, int
startRow, int numberOfRows)
+ {
+ ColumnFamilyStore store =
Keyspace.open(keyspace).getColumnFamilyStore(columnFamily);
- CFMetaData cfm = Schema.instance.getCFMetaData(keyspace,
columnFamily);
-
+ for (int i = 0; i < numberOfRows; i++)
- Util.getAll(Util.cmd(store, "key" + i).includeRow("col" +
i).build());
++ Util.getAll(Util.cmd(store, "key" + (i +
startRow)).includeRow("col" + (i + startRow)).build());
+ }
+
+
private void assertKeyCacheSize(int expected, String keyspace, String
columnFamily)
{
int size = 0;