Author: amitj
Date: Wed Sep 12 08:30:32 2018
New Revision: 1840637
URL: http://svn.apache.org/viewvc?rev=1840637&view=rev
Log:
OAK-7737: Add edge cases checks and tests for metadata related methods
S3/FileDataStore
- Added tests for metadata operations
Added:
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/SharedDataStoreTest.java
(with props)
Modified:
jackrabbit/oak/trunk/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
jackrabbit/oak/trunk/oak-blob-cloud/src/test/java/org/apache/jackrabbit/oak/blob/cloud/s3/TestS3DataStore.java
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStoreTest.java
Modified:
jackrabbit/oak/trunk/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
URL:
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java?rev=1840637&r1=1840636&r2=1840637&view=diff
==============================================================================
---
jackrabbit/oak/trunk/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
(original)
+++
jackrabbit/oak/trunk/oak-blob-cloud/src/main/java/org/apache/jackrabbit/oak/blob/cloud/s3/S3Backend.java
Wed Sep 12 08:30:32 2018
@@ -17,9 +17,6 @@
package org.apache.jackrabbit.oak.blob.cloud.s3;
-import static com.google.common.collect.Iterables.filter;
-import static java.lang.Thread.currentThread;
-
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
@@ -95,6 +92,10 @@ import org.jetbrains.annotations.NotNull
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.collect.Iterables.filter;
+import static java.lang.Thread.currentThread;
+
/**
* A data store backend that stores data on Amazon S3.
*/
@@ -464,6 +465,9 @@ public class S3Backend extends AbstractS
@Override
public void addMetadataRecord(final InputStream input, final String name)
throws DataStoreException {
+ checkArgument(input != null, "input should not be null");
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
ClassLoader contextClassLoader =
Thread.currentThread().getContextClassLoader();
try {
@@ -484,6 +488,9 @@ public class S3Backend extends AbstractS
@Override
public void addMetadataRecord(File input, String name) throws
DataStoreException {
+ checkArgument(input != null, "input should not be null");
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
ClassLoader contextClassLoader =
Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
@@ -503,6 +510,8 @@ public class S3Backend extends AbstractS
@Override
public DataRecord getMetadataRecord(String name) {
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
ClassLoader contextClassLoader =
Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
@@ -519,6 +528,8 @@ public class S3Backend extends AbstractS
@Override
public List<DataRecord> getAllMetadataRecords(String prefix) {
+ checkArgument(null != prefix, "prefix should not be null");
+
List<DataRecord> metadataList = new ArrayList<DataRecord>();
ClassLoader contextClassLoader =
Thread.currentThread().getContextClassLoader();
try {
@@ -542,6 +553,8 @@ public class S3Backend extends AbstractS
@Override
public boolean deleteMetadataRecord(String name) {
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
ClassLoader contextClassLoader =
Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
@@ -557,6 +570,8 @@ public class S3Backend extends AbstractS
@Override
public void deleteAllMetadataRecords(String prefix) {
+ checkArgument(null != prefix, "prefix should not be empty");
+
ClassLoader contextClassLoader =
Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(
Modified:
jackrabbit/oak/trunk/oak-blob-cloud/src/test/java/org/apache/jackrabbit/oak/blob/cloud/s3/TestS3DataStore.java
URL:
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud/src/test/java/org/apache/jackrabbit/oak/blob/cloud/s3/TestS3DataStore.java?rev=1840637&r1=1840636&r2=1840637&view=diff
==============================================================================
---
jackrabbit/oak/trunk/oak-blob-cloud/src/test/java/org/apache/jackrabbit/oak/blob/cloud/s3/TestS3DataStore.java
(original)
+++
jackrabbit/oak/trunk/oak-blob-cloud/src/test/java/org/apache/jackrabbit/oak/blob/cloud/s3/TestS3DataStore.java
Wed Sep 12 08:30:32 2018
@@ -18,9 +18,12 @@ package org.apache.jackrabbit.oak.blob.c
import java.io.ByteArrayInputStream;
import java.io.File;
+import java.io.InputStream;
+import java.io.StringWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
+import java.util.Map;
import java.util.Properties;
import java.util.Random;
@@ -28,6 +31,9 @@ import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import javax.jcr.RepositoryException;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.core.data.DataRecord;
import org.apache.jackrabbit.core.data.DataStore;
@@ -43,12 +49,15 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.commons.codec.binary.Hex.encodeHexString;
+import static org.apache.commons.io.FileUtils.copyInputStreamToFile;
import static
org.apache.jackrabbit.oak.blob.cloud.s3.S3DataStoreUtils.getFixtures;
import static
org.apache.jackrabbit.oak.blob.cloud.s3.S3DataStoreUtils.getS3DataStore;
import static
org.apache.jackrabbit.oak.blob.cloud.s3.S3DataStoreUtils.isS3Configured;
+import static
org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils.randomStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
/**
@@ -80,6 +89,7 @@ public class TestS3DataStore {
}
private String bucket;
+
@Before
public void setUp() throws Exception {
startTime = new Date();
@@ -106,6 +116,7 @@ public class TestS3DataStore {
Random randomGen = new Random();
props = S3DataStoreUtils.getS3Config();
+ props.put("cacheSize", "0");
ds = getS3DataStore(s3Class, props, dataStoreDir.getAbsolutePath());
bucket = props.getProperty(S3Constants.S3_BUCKET);
@@ -114,9 +125,9 @@ public class TestS3DataStore {
DataRecord rec = ds.addRecord(new ByteArrayInputStream(data));
assertEquals(data.length, rec.getLength());
String ref = rec.getReference();
+ assertNotNull(ref);
String id = rec.getIdentifier().toString();
- assertNotNull(ref);
S3DataStore s3 = ((S3DataStore) ds);
byte[] refKey = ((S3Backend)
s3.getBackend()).getOrCreateReferenceKey();
@@ -147,6 +158,7 @@ public class TestS3DataStore {
bucket = props.getProperty(S3Constants.S3_BUCKET);
props.remove(S3Constants.S3_BUCKET);
props.put(S3Constants.S3_CONTAINER, bucket);
+ props.put("cacheSize", "0");
ds = getS3DataStore(s3Class, props, dataStoreDir.getAbsolutePath());
byte[] data = new byte[4096];
@@ -155,6 +167,232 @@ public class TestS3DataStore {
assertEquals(data.length, rec.getLength());
}
+
+ // AddMetadataRecord (Backend)
+
+ @Test
+ public void testBackendAddMetadataRecordsFromInputStream() throws
Exception {
+ assumeTrue(isS3Configured());
+ S3DataStore s3ds = getDataStore();
+
+ for (boolean fromInputStream : Lists.newArrayList(false, true)) {
+ String prefix = String.format("%s.META.",
getClass().getSimpleName());
+ for (int count : Lists.newArrayList(1, 3)) {
+ Map<String, String> records = Maps.newHashMap();
+ for (int i = 0; i < count; i++) {
+ String recordName = String.format("%sname.%d", prefix, i);
+ String data = String.format("testData%d", i);
+ records.put(recordName, data);
+
+ if (fromInputStream) {
+ s3ds.addMetadataRecord(new
ByteArrayInputStream(data.getBytes()), recordName);
+ }
+ else {
+ File testFile = folder.newFile();
+ copyInputStreamToFile(new
ByteArrayInputStream(data.getBytes()), testFile);
+ s3ds.addMetadataRecord(testFile, recordName);
+ }
+ }
+
+ assertEquals(count, s3ds.getAllMetadataRecords(prefix).size());
+
+ for (Map.Entry<String, String> entry : records.entrySet()) {
+ DataRecord record = s3ds.getMetadataRecord(entry.getKey());
+ StringWriter writer = new StringWriter();
+ IOUtils.copy(record.getStream(), writer);
+ s3ds.deleteMetadataRecord(entry.getKey());
+ assertTrue(writer.toString().equals(entry.getValue()));
+ }
+
+ assertEquals(0, s3ds.getAllMetadataRecords(prefix).size());
+ }
+ }
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullInputStreamThrowsNullPointerException() throws
Exception {
+ assumeTrue(isS3Configured());
+
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("input should not be null");
+
+ S3DataStore s3ds = getDataStore();
+ s3ds.addMetadataRecord((InputStream)null, "name");
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullFileThrowsNullPointerException() throws
Exception {
+ assumeTrue(isS3Configured());
+
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("input should not be null");
+
+ S3DataStore s3ds = getDataStore();
+ s3ds.addMetadataRecord((File)null, "name");
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullEmptyNameThrowsIllegalArgumentException()
throws Exception {
+ assumeTrue(isS3Configured());
+
+ S3DataStore s3ds = getDataStore();
+
+ final String data = "testData";
+ for (boolean fromInputStream : Lists.newArrayList(false, true)) {
+ for (String name : Lists.newArrayList(null, "")) {
+ try {
+ if (fromInputStream) {
+ s3ds.addMetadataRecord(new
ByteArrayInputStream(data.getBytes()), name);
+ } else {
+ File testFile = folder.newFile();
+ copyInputStreamToFile(new
ByteArrayInputStream(data.getBytes()), testFile);
+ s3ds.addMetadataRecord(testFile, name);
+ }
+ fail();
+ } catch (IllegalArgumentException e) {
+ assertTrue("name should not be
empty".equals(e.getMessage()));
+ }
+ }
+ }
+ }
+ // GetMetadataRecord (Backend)
+
+ @Test
+ public void testBackendGetMetadataRecordInvalidName() throws Exception {
+ assumeTrue(isS3Configured());
+ S3DataStore s3ds = getDataStore();
+
+ s3ds.addMetadataRecord(randomStream(0, 10), "testRecord");
+ for (String name : Lists.newArrayList("", null)) {
+ try {
+ s3ds.getMetadataRecord(name);
+ fail("Expect to throw");
+ } catch(Exception e) {}
+ }
+ s3ds.deleteMetadataRecord("testRecord");
+ }
+
+ // GetAllMetadataRecords (Backend)
+
+ @Test
+ public void testBackendGetAllMetadataRecordsPrefixMatchesAll() throws
Exception {
+ assumeTrue(isS3Configured());
+ S3DataStore s3ds = getDataStore();
+
+ assertEquals(0, s3ds.getAllMetadataRecords("").size());
+
+ String prefixAll = "prefix1";
+ String prefixSome = "prefix1.prefix2";
+ String prefixOne = "prefix1.prefix3";
+ String prefixNone = "prefix4";
+
+ s3ds.addMetadataRecord(randomStream(1, 10),
String.format("%s.testRecord1", prefixAll));
+ s3ds.addMetadataRecord(randomStream(2, 10),
String.format("%s.testRecord2", prefixSome));
+ s3ds.addMetadataRecord(randomStream(3, 10),
String.format("%s.testRecord3", prefixSome));
+ s3ds.addMetadataRecord(randomStream(4, 10),
String.format("%s.testRecord4", prefixOne));
+ s3ds.addMetadataRecord(randomStream(5, 10), "prefix5.testRecord5");
+
+ assertEquals(5, s3ds.getAllMetadataRecords("").size());
+ assertEquals(4, s3ds.getAllMetadataRecords(prefixAll).size());
+ assertEquals(2, s3ds.getAllMetadataRecords(prefixSome).size());
+ assertEquals(1, s3ds.getAllMetadataRecords(prefixOne).size());
+ assertEquals(0, s3ds.getAllMetadataRecords(prefixNone).size());
+
+ s3ds.deleteAllMetadataRecords("");
+ assertEquals(0, s3ds.getAllMetadataRecords("").size());
+ }
+
+
+ @Test
+ public void
testBackendGetAllMetadataRecordsNullPrefixThrowsNullPointerException() throws
Exception {
+ assumeTrue(isS3Configured());
+
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("prefix should not be null");
+
+ S3DataStore s3ds = getDataStore();
+ s3ds.getAllMetadataRecords(null);
+ }
+
+ // DeleteMetadataRecord (Backend)
+
+ @Test
+ public void testBackendDeleteMetadataRecord() throws Exception {
+ assumeTrue(isS3Configured());
+ S3DataStore s3ds = getDataStore();
+
+ s3ds.addMetadataRecord(randomStream(0, 10), "name");
+ for (String name : Lists.newArrayList("", null)) {
+ if (Strings.isNullOrEmpty(name)) {
+ try {
+ s3ds.deleteMetadataRecord(name);
+ }
+ catch (IllegalArgumentException e) { }
+ }
+ else {
+ s3ds.deleteMetadataRecord(name);
+ fail();
+ }
+ }
+ assertTrue(s3ds.deleteMetadataRecord("name"));
+ }
+
+ // DeleteAllMetadataRecords (Backend)
+
+ @Test
+ public void testBackendDeleteAllMetadataRecordsPrefixMatchesAll() throws
Exception {
+ assumeTrue(isS3Configured());
+ S3DataStore s3ds = getDataStore();
+
+ String prefixAll = "prefix1";
+ String prefixSome = "prefix1.prefix2";
+ String prefixOne = "prefix1.prefix3";
+ String prefixNone = "prefix4";
+
+ Map<String, Integer> prefixCounts = Maps.newHashMap();
+ prefixCounts.put(prefixAll, 4);
+ prefixCounts.put(prefixSome, 2);
+ prefixCounts.put(prefixOne, 1);
+ prefixCounts.put(prefixNone, 0);
+
+ for (Map.Entry<String, Integer> entry : prefixCounts.entrySet()) {
+ s3ds.addMetadataRecord(randomStream(1, 10),
String.format("%s.testRecord1", prefixAll));
+ s3ds.addMetadataRecord(randomStream(2, 10),
String.format("%s.testRecord2", prefixSome));
+ s3ds.addMetadataRecord(randomStream(3, 10),
String.format("%s.testRecord3", prefixSome));
+ s3ds.addMetadataRecord(randomStream(4, 10),
String.format("%s.testRecord4", prefixOne));
+
+ int preCount = s3ds.getAllMetadataRecords("").size();
+
+ s3ds.deleteAllMetadataRecords(entry.getKey());
+
+ int deletedCount = preCount -
s3ds.getAllMetadataRecords("").size();
+ assertEquals(entry.getValue().intValue(), deletedCount);
+
+ s3ds.deleteAllMetadataRecords("");
+ }
+ }
+
+ @Test
+ public void testBackendDeleteAllMetadataRecordsNoRecordsNoChange() throws
Exception {
+ assumeTrue(isS3Configured());
+ S3DataStore s3ds = getDataStore();
+
+ assertEquals(0, s3ds.getAllMetadataRecords("").size());
+
+ s3ds.deleteAllMetadataRecords("");
+
+ assertEquals(0, s3ds.getAllMetadataRecords("").size());
+ }
+
+ private S3DataStore getDataStore() throws Exception {
+ props = S3DataStoreUtils.getS3Config();
+ bucket = props.getProperty(S3Constants.S3_BUCKET);
+ bucket = bucket + "-" + System.currentTimeMillis();
+ props.put(S3Constants.S3_BUCKET, bucket);
+ props.put("cacheSize", "0");
+ return (S3DataStore) getS3DataStore(s3Class, props,
dataStoreDir.getAbsolutePath());
+ }
+
@After
public void tearDown() {
try {
Modified:
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
URL:
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java?rev=1840637&r1=1840636&r2=1840637&view=diff
==============================================================================
---
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
(original)
+++
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/FSBackend.java
Wed Sep 12 08:30:32 2018
@@ -31,6 +31,7 @@ import java.util.Properties;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
@@ -46,6 +47,7 @@ import org.jetbrains.annotations.NotNull
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.commons.io.FilenameUtils.normalizeNoEndSeparator;
/**
@@ -172,6 +174,9 @@ public class FSBackend extends AbstractS
@Override
public void addMetadataRecord(InputStream input, String name)
throws DataStoreException {
+ checkArgument(input != null, "input should not be null");
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
try {
File file = new File(fsPathDir, name);
FileOutputStream os = new FileOutputStream(file);
@@ -190,6 +195,9 @@ public class FSBackend extends AbstractS
@Override
public void addMetadataRecord(File input, String name) throws
DataStoreException {
+ checkArgument(input != null, "input should not be null");
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
try {
File file = new File(fsPathDir, name);
FileUtils.copyFile(input, file);
@@ -202,6 +210,8 @@ public class FSBackend extends AbstractS
@Override
public DataRecord getMetadataRecord(String name) {
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
for (File file : FileFilterUtils
.filter(FileFilterUtils.nameFileFilter(name),
fsPathDir.listFiles())) {
if (!file.isDirectory()) {
@@ -213,6 +223,8 @@ public class FSBackend extends AbstractS
@Override
public List<DataRecord> getAllMetadataRecords(String prefix) {
+ checkArgument(null != prefix, "prefix should not be null");
+
List<DataRecord> rootRecords = new ArrayList<DataRecord>();
for (File file : FileFilterUtils
.filterList(FileFilterUtils.prefixFileFilter(prefix),
fsPathDir.listFiles())) {
@@ -226,6 +238,8 @@ public class FSBackend extends AbstractS
@Override
public boolean deleteMetadataRecord(String name) {
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
for (File file : FileFilterUtils
.filterList(FileFilterUtils.nameFileFilter(name),
fsPathDir.listFiles())) {
if (!file.isDirectory()) { // skip directories which are actual
data store files
@@ -242,6 +256,8 @@ public class FSBackend extends AbstractS
@Override
public void deleteAllMetadataRecords(String prefix) {
+ checkArgument(null != prefix, "prefix should not be empty");
+
for (File file : FileFilterUtils
.filterList(FileFilterUtils.prefixFileFilter(prefix),
fsPathDir.listFiles())) {
if (!file.isDirectory()) { // skip directories which are actual
data store files
Modified:
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
URL:
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java?rev=1840637&r1=1840636&r2=1840637&view=diff
==============================================================================
---
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
(original)
+++
jackrabbit/oak/trunk/oak-blob-plugins/src/main/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStore.java
Wed Sep 12 08:30:32 2018
@@ -34,10 +34,10 @@ import java.util.Set;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
+import com.google.common.base.Strings;
import com.google.common.io.BaseEncoding;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
-
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.filefilter.FileFilterUtils;
@@ -50,6 +50,7 @@ import org.apache.jackrabbit.oak.plugins
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.commons.io.FilenameUtils.normalizeNoEndSeparator;
/**
@@ -145,6 +146,9 @@ public class OakFileDataStore extends Fi
@Override
public void addMetadataRecord(InputStream input, String name)
throws DataStoreException {
+ checkArgument(input != null, "input should not be null");
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
try {
File file = new File(getPath(), name);
FileOutputStream os = new FileOutputStream(file);
@@ -163,6 +167,9 @@ public class OakFileDataStore extends Fi
@Override
public void addMetadataRecord(File input, String name) throws
DataStoreException {
+ checkArgument(input != null, "input should not be null");
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
try {
File file = new File(getPath(), name);
FileUtils.copyFile(input, file);
@@ -175,6 +182,8 @@ public class OakFileDataStore extends Fi
@Override
public DataRecord getMetadataRecord(String name) {
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
File root = new File(getPath());
for (File file :
FileFilterUtils.filter(FileFilterUtils.nameFileFilter(name), root.listFiles()))
{
if (!file.isDirectory()) {
@@ -186,6 +195,8 @@ public class OakFileDataStore extends Fi
@Override
public List<DataRecord> getAllMetadataRecords(String prefix) {
+ checkArgument(null != prefix, "prefix should not be null");
+
File root = new File(getPath());
List<DataRecord> rootRecords = new ArrayList<DataRecord>();
for (File file : FileFilterUtils.filterList(
@@ -201,6 +212,8 @@ public class OakFileDataStore extends Fi
@Override
public boolean deleteMetadataRecord(String name) {
+ checkArgument(!Strings.isNullOrEmpty(name), "name should not be
empty");
+
File root = new File(getPath());
for (File file : FileFilterUtils.filterList(
@@ -220,6 +233,8 @@ public class OakFileDataStore extends Fi
@Override
public void deleteAllMetadataRecords(String prefix) {
+ checkArgument(null != prefix, "prefix should not be empty");
+
File root = new File(getPath());
for (File file : FileFilterUtils.filterList(
Modified:
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStoreTest.java
URL:
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStoreTest.java?rev=1840637&r1=1840636&r2=1840637&view=diff
==============================================================================
---
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStoreTest.java
(original)
+++
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/OakFileDataStoreTest.java
Wed Sep 12 08:30:32 2018
@@ -19,25 +19,48 @@
package org.apache.jackrabbit.oak.plugins.blob.datastore;
+import java.io.ByteArrayInputStream;
import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Function;
+import com.google.common.base.Strings;
import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStoreException;
import org.apache.jackrabbit.core.data.FileDataStore;
import org.jetbrains.annotations.Nullable;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.TemporaryFolder;
+import static org.apache.commons.io.FileUtils.copyInputStreamToFile;
+import static
org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils.randomStream;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
public class OakFileDataStoreTest {
+ @Rule
+ public TemporaryFolder folder = new TemporaryFolder(new File("target"));
+
+ @Rule
+ public ExpectedException expectedEx = ExpectedException.none();
@Test
public void testGetAllIdentifiersRelative1() throws Exception {
@@ -88,4 +111,241 @@ public class OakFileDataStoreTest {
noop.remove("foo");
assertTrue(noop.isEmpty());
}
+
+
+ // AddMetadataRecord (Backend)
+
+ @Test
+ public void testBackendAddMetadataRecordsFromInputStream() throws
Exception {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ for (boolean fromInputStream : Lists.newArrayList(false, true)) {
+ String prefix = String.format("%s.META.",
getClass().getSimpleName());
+ for (int count : Lists.newArrayList(1, 3)) {
+ Map<String, String> records = Maps.newHashMap();
+ for (int i = 0; i < count; i++) {
+ String recordName = String.format("%sname.%d", prefix, i);
+ String data = String.format("testData%d", i);
+ records.put(recordName, data);
+
+ if (fromInputStream) {
+ fds.addMetadataRecord(new
ByteArrayInputStream(data.getBytes()), recordName);
+ }
+ else {
+ File testFile = folder.newFile();
+ copyInputStreamToFile(new
ByteArrayInputStream(data.getBytes()), testFile);
+ fds.addMetadataRecord(testFile, recordName);
+ }
+ }
+
+ assertEquals(count, fds.getAllMetadataRecords(prefix).size());
+
+ for (Map.Entry<String, String> entry : records.entrySet()) {
+ DataRecord record = fds.getMetadataRecord(entry.getKey());
+ StringWriter writer = new StringWriter();
+ IOUtils.copy(record.getStream(), writer);
+ fds.deleteMetadataRecord(entry.getKey());
+ assertTrue(writer.toString().equals(entry.getValue()));
+ }
+
+ assertEquals(0, fds.getAllMetadataRecords(prefix).size());
+ }
+ }
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordFileNotFoundThrowsDataStoreException() throws
IOException {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ File testFile = folder.newFile();
+ copyInputStreamToFile(randomStream(0, 10), testFile);
+ testFile.delete();
+ try {
+ fds.addMetadataRecord(testFile, "name");
+ fail();
+ }
+ catch (DataStoreException e) {
+ assertTrue(e.getCause() instanceof FileNotFoundException);
+ }
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullInputStreamThrowsNullPointerException() throws
DataStoreException {
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("input should not be null");
+
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+ fds.addMetadataRecord((InputStream)null, "name");
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullFileThrowsNullPointerException() throws
DataStoreException {
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("input should not be null");
+
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ fds.addMetadataRecord((File)null, "name");
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullEmptyNameThrowsIllegalArgumentException()
throws DataStoreException, IOException {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ final String data = "testData";
+ for (boolean fromInputStream : Lists.newArrayList(false, true)) {
+ for (String name : Lists.newArrayList(null, "")) {
+ try {
+ if (fromInputStream) {
+ fds.addMetadataRecord(new
ByteArrayInputStream(data.getBytes()), name);
+ } else {
+ File testFile = folder.newFile();
+ copyInputStreamToFile(new
ByteArrayInputStream(data.getBytes()), testFile);
+ fds.addMetadataRecord(testFile, name);
+ }
+ fail();
+ } catch (IllegalArgumentException e) {
+ assertTrue("name should not be
empty".equals(e.getMessage()));
+ }
+ }
+ }
+ }
+
+ // GetMetadataRecord (Backend)
+
+ @Test
+ public void testBackendGetMetadataRecordInvalidName() throws
DataStoreException {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ fds.addMetadataRecord(randomStream(0, 10), "testRecord");
+ assertNull(fds.getMetadataRecord("invalid"));
+ for (String name : Lists.newArrayList("", null)) {
+ try {
+ fds.getMetadataRecord(name);
+ fail("Expect to throw");
+ } catch(Exception e) {}
+ }
+
+ fds.deleteMetadataRecord("testRecord");
+ }
+
+ // GetAllMetadataRecords (Backend)
+
+ @Test
+ public void testBackendGetAllMetadataRecordsPrefixMatchesAll() throws
DataStoreException {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+
+ String prefixAll = "prefix1";
+ String prefixSome = "prefix1.prefix2";
+ String prefixOne = "prefix1.prefix3";
+ String prefixNone = "prefix4";
+
+ fds.addMetadataRecord(randomStream(1, 10),
String.format("%s.testRecord1", prefixAll));
+ fds.addMetadataRecord(randomStream(2, 10),
String.format("%s.testRecord2", prefixSome));
+ fds.addMetadataRecord(randomStream(3, 10),
String.format("%s.testRecord3", prefixSome));
+ fds.addMetadataRecord(randomStream(4, 10),
String.format("%s.testRecord4", prefixOne));
+ fds.addMetadataRecord(randomStream(5, 10), "prefix5.testRecord5");
+
+ assertEquals(5, fds.getAllMetadataRecords("").size());
+ assertEquals(4, fds.getAllMetadataRecords(prefixAll).size());
+ assertEquals(2, fds.getAllMetadataRecords(prefixSome).size());
+ assertEquals(1, fds.getAllMetadataRecords(prefixOne).size());
+ assertEquals(0, fds.getAllMetadataRecords(prefixNone).size());
+
+ fds.deleteAllMetadataRecords("");
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+ }
+
+ @Test
+ public void
testBackendGetAllMetadataRecordsNullPrefixThrowsNullPointerException() {
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("prefix should not be null");
+
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+ fds.getAllMetadataRecords(null);
+ }
+
+ // DeleteMetadataRecord (Backend)
+
+ @Test
+ public void testBackendDeleteMetadataRecord() throws DataStoreException {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ fds.addMetadataRecord(randomStream(0, 10), "name");
+ for (String name : Lists.newArrayList("", null)) {
+ if (Strings.isNullOrEmpty(name)) {
+ try {
+ fds.deleteMetadataRecord(name);
+ }
+ catch (IllegalArgumentException e) { }
+ }
+ else {
+ fds.deleteMetadataRecord(name);
+ fail();
+ }
+ }
+ assertTrue(fds.deleteMetadataRecord("name"));
+ }
+
+ // DeleteAllMetadataRecords (Backend)
+
+ @Test
+ public void testBackendDeleteAllMetadataRecordsPrefixMatchesAll() throws
DataStoreException {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ String prefixAll = "prefix1";
+ String prefixSome = "prefix1.prefix2";
+ String prefixOne = "prefix1.prefix3";
+ String prefixNone = "prefix4";
+
+ Map<String, Integer> prefixCounts = Maps.newHashMap();
+ prefixCounts.put(prefixAll, 4);
+ prefixCounts.put(prefixSome, 2);
+ prefixCounts.put(prefixOne, 1);
+ prefixCounts.put(prefixNone, 0);
+
+ for (Map.Entry<String, Integer> entry : prefixCounts.entrySet()) {
+ fds.addMetadataRecord(randomStream(1, 10),
String.format("%s.testRecord1", prefixAll));
+ fds.addMetadataRecord(randomStream(2, 10),
String.format("%s.testRecord2", prefixSome));
+ fds.addMetadataRecord(randomStream(3, 10),
String.format("%s.testRecord3", prefixSome));
+ fds.addMetadataRecord(randomStream(4, 10),
String.format("%s.testRecord4", prefixOne));
+
+ int preCount = fds.getAllMetadataRecords("").size();
+
+ fds.deleteAllMetadataRecords(entry.getKey());
+
+ int deletedCount = preCount - fds.getAllMetadataRecords("").size();
+ assertEquals(entry.getValue().intValue(), deletedCount);
+
+ fds.deleteAllMetadataRecords("");
+ }
+ }
+
+ @Test
+ public void testBackendDeleteAllMetadataRecordsNoRecordsNoChange() {
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+
+ fds.deleteAllMetadataRecords("");
+
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+ }
+
+ @Test
+ public void
testBackendDeleteAllMetadataRecordsNullPrefixThrowsNullPointerException() {
+ expectedEx.expect(IllegalArgumentException.class);
+
+ OakFileDataStore fds = datastore(folder.getRoot().getAbsolutePath());
+ fds.deleteAllMetadataRecords(null);
+ }
+
+ private static OakFileDataStore datastore(String path) {
+ OakFileDataStore fds = new OakFileDataStore();
+ fds.setPath(path);
+ fds.init(null);
+ return fds;
+ }
}
Added:
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/SharedDataStoreTest.java
URL:
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/SharedDataStoreTest.java?rev=1840637&view=auto
==============================================================================
---
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/SharedDataStoreTest.java
(added)
+++
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/SharedDataStoreTest.java
Wed Sep 12 08:30:32 2018
@@ -0,0 +1,387 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.plugins.blob.datastore;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import com.google.common.base.Function;
+import com.google.common.base.Strings;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.core.data.FileDataStore;
+import org.apache.jackrabbit.oak.commons.PropertiesUtil;
+import org.apache.jackrabbit.oak.plugins.blob.SharedDataStore;
+import
org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreTest.FixtureHelper.DATA_STORE;
+import org.jetbrains.annotations.Nullable;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import static org.apache.commons.io.FileUtils.copyInputStreamToFile;
+import static
org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreUtils.randomStream;
+import static
org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreTest.FixtureHelper.DATA_STORE.CACHING_FDS;
+import static
org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreTest.FixtureHelper.DATA_STORE.FDS;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+@RunWith(Parameterized.class)
+public class SharedDataStoreTest {
+ @Rule
+ public TemporaryFolder folder = new TemporaryFolder(new File("target"));
+
+ @Rule
+ public ExpectedException expectedEx = ExpectedException.none();
+
+
+ private DATA_STORE type;
+
+ @Parameterized.Parameters(name="{index}: ({0})")
+ public static List<Object[]> fixtures() {
+ return FixtureHelper.get();
+ }
+
+ static class FixtureHelper {
+ enum DATA_STORE {
+ CACHING_FDS, FDS
+ }
+
+ static List<Object[]> get() {
+ return Lists.newArrayList(new Object[] {CACHING_FDS}, new Object[]
{FDS});
+ }
+ }
+
+ public SharedDataStoreTest(DATA_STORE type) {
+ this.type = type;
+ }
+
+ @Before
+ public void setup() throws Exception {
+ if (type == CACHING_FDS) {
+ CachingFileDataStore ds = new CachingFileDataStore();
+
+ Properties props = new Properties();
+ props.setProperty("fsBackendPath",
folder.newFolder().getAbsolutePath());
+ PropertiesUtil.populate(ds, Maps.fromProperties(props), false);
+ ds.setProperties(props);
+ ds.init(folder.newFolder().getAbsolutePath());
+ dataStore = ds;
+ } else {
+ OakFileDataStore ds = new OakFileDataStore();
+ ds.init(folder.newFolder().getAbsolutePath());
+ dataStore = ds;
+ }
+ }
+
+ protected SharedDataStore dataStore;
+
+ @Test
+ public void testGetAllIdentifiersRelative1() throws Exception {
+ File f = new File("./target/oak-fds-test1");
+ testGetAllIdentifiers(f.getAbsolutePath(), f.getPath());
+ }
+
+ @Test
+ public void testGetAllIdentifiersRelative2() throws Exception {
+ File f = new File("./target", "/fds/../oak-fds-test2");
+ testGetAllIdentifiers(FilenameUtils.normalize(f.getAbsolutePath()),
f.getPath());
+ }
+
+ @Test
+ public void testGetAllIdentifiers() throws Exception {
+ File f = new File("./target", "oak-fds-test3");
+ testGetAllIdentifiers(f.getAbsolutePath(), f.getPath());
+ }
+
+ private void testGetAllIdentifiers(String path, String unnormalizedPath)
throws Exception {
+ File testDir = new File(path);
+ FileUtils.touch(new File(testDir, "ab/cd/ef/abcdef"));
+ FileUtils.touch(new File(testDir, "bc/de/fg/bcdefg"));
+ FileUtils.touch(new File(testDir, "cd/ef/gh/cdefgh"));
+ FileUtils.touch(new File(testDir, "c"));
+
+ FileDataStore fds = new OakFileDataStore();
+ fds.setPath(unnormalizedPath);
+ fds.init(null);
+
+ Iterator<DataIdentifier> dis = fds.getAllIdentifiers();
+ Set<String> fileNames = Sets.newHashSet(Iterators.transform(dis, new
Function<DataIdentifier, String>() {
+ @Override
+ public String apply(@Nullable DataIdentifier input) {
+ return input.toString();
+ }
+ }));
+
+ Set<String> expectedNames =
Sets.newHashSet("abcdef","bcdefg","cdefgh");
+ assertEquals(expectedNames, fileNames);
+ FileUtils.cleanDirectory(testDir);
+ }
+
+ // AddMetadataRecord (Backend)
+
+ @Test
+ public void testBackendAddMetadataRecordsFromInputStream() throws
Exception {
+ SharedDataStore fds = dataStore;
+
+ for (boolean fromInputStream : Lists.newArrayList(false, true)) {
+ String prefix = String.format("%s.META.",
getClass().getSimpleName());
+ for (int count : Lists.newArrayList(1, 3)) {
+ Map<String, String> records = Maps.newHashMap();
+ for (int i = 0; i < count; i++) {
+ String recordName = String.format("%sname.%d", prefix, i);
+ String data = String.format("testData%d", i);
+ records.put(recordName, data);
+
+ if (fromInputStream) {
+ fds.addMetadataRecord(new
ByteArrayInputStream(data.getBytes()), recordName);
+ }
+ else {
+ File testFile = folder.newFile();
+ copyInputStreamToFile(new
ByteArrayInputStream(data.getBytes()), testFile);
+ fds.addMetadataRecord(testFile, recordName);
+ }
+ }
+
+ assertEquals(count, fds.getAllMetadataRecords(prefix).size());
+
+ for (Map.Entry<String, String> entry : records.entrySet()) {
+ DataRecord record = fds.getMetadataRecord(entry.getKey());
+ StringWriter writer = new StringWriter();
+ IOUtils.copy(record.getStream(), writer);
+ fds.deleteMetadataRecord(entry.getKey());
+ assertTrue(writer.toString().equals(entry.getValue()));
+ }
+
+ assertEquals(0, fds.getAllMetadataRecords(prefix).size());
+ }
+ }
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordFileNotFoundThrowsDataStoreException() throws
IOException {
+ SharedDataStore fds = dataStore;
+
+ File testFile = folder.newFile();
+ copyInputStreamToFile(randomStream(0, 10), testFile);
+ testFile.delete();
+ try {
+ fds.addMetadataRecord(testFile, "name");
+ fail();
+ }
+ catch (DataStoreException e) {
+ assertTrue(e.getCause() instanceof FileNotFoundException);
+ }
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullInputStreamThrowsNullPointerException() throws
DataStoreException {
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("input should not be null");
+
+ SharedDataStore fds = dataStore;
+ fds.addMetadataRecord((InputStream)null, "name");
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullFileThrowsNullPointerException() throws
DataStoreException {
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("input should not be null");
+
+ SharedDataStore fds = dataStore;
+ fds.addMetadataRecord((File)null, "name");
+ }
+
+ @Test
+ public void
testBackendAddMetadataRecordNullEmptyNameThrowsIllegalArgumentException()
throws DataStoreException, IOException {
+ SharedDataStore fds = dataStore;
+
+ final String data = "testData";
+ for (boolean fromInputStream : Lists.newArrayList(false, true)) {
+ for (String name : Lists.newArrayList(null, "")) {
+ try {
+ if (fromInputStream) {
+ fds.addMetadataRecord(new
ByteArrayInputStream(data.getBytes()), name);
+ } else {
+ File testFile = folder.newFile();
+ copyInputStreamToFile(new
ByteArrayInputStream(data.getBytes()), testFile);
+ fds.addMetadataRecord(testFile, name);
+ }
+ fail();
+ } catch (IllegalArgumentException e) {
+ assertTrue("name should not be
empty".equals(e.getMessage()));
+ }
+ }
+ }
+ }
+
+ // GetMetadataRecord (Backend)
+
+ @Test
+ public void testBackendGetMetadataRecordInvalidName() throws
DataStoreException {
+ SharedDataStore fds = dataStore;
+
+ fds.addMetadataRecord(randomStream(0, 10), "testRecord");
+ assertNull(fds.getMetadataRecord("invalid"));
+ for (String name : Lists.newArrayList("", null)) {
+ try {
+ fds.getMetadataRecord(name);
+ fail("Expect to throw");
+ } catch(Exception e) {}
+ }
+
+ fds.deleteMetadataRecord("testRecord");
+ }
+
+ // GetAllMetadataRecords (Backend)
+
+ @Test
+ public void testBackendGetAllMetadataRecordsPrefixMatchesAll() throws
DataStoreException {
+ SharedDataStore fds = dataStore;
+
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+
+ String prefixAll = "prefix1";
+ String prefixSome = "prefix1.prefix2";
+ String prefixOne = "prefix1.prefix3";
+ String prefixNone = "prefix4";
+
+ fds.addMetadataRecord(randomStream(1, 10),
String.format("%s.testRecord1", prefixAll));
+ fds.addMetadataRecord(randomStream(2, 10),
String.format("%s.testRecord2", prefixSome));
+ fds.addMetadataRecord(randomStream(3, 10),
String.format("%s.testRecord3", prefixSome));
+ fds.addMetadataRecord(randomStream(4, 10),
String.format("%s.testRecord4", prefixOne));
+ fds.addMetadataRecord(randomStream(5, 10), "prefix5.testRecord5");
+
+ assertEquals(5, fds.getAllMetadataRecords("").size());
+ assertEquals(4, fds.getAllMetadataRecords(prefixAll).size());
+ assertEquals(2, fds.getAllMetadataRecords(prefixSome).size());
+ assertEquals(1, fds.getAllMetadataRecords(prefixOne).size());
+ assertEquals(0, fds.getAllMetadataRecords(prefixNone).size());
+
+ fds.deleteAllMetadataRecords("");
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+ }
+
+ @Test
+ public void
testBackendGetAllMetadataRecordsNullPrefixThrowsNullPointerException() {
+ expectedEx.expect(IllegalArgumentException.class);
+ expectedEx.expectMessage("prefix should not be null");
+
+ SharedDataStore fds = dataStore;
+ fds.getAllMetadataRecords(null);
+ }
+
+ // DeleteMetadataRecord (Backend)
+
+ @Test
+ public void testBackendDeleteMetadataRecord() throws DataStoreException {
+ SharedDataStore fds = dataStore;
+
+ fds.addMetadataRecord(randomStream(0, 10), "name");
+ for (String name : Lists.newArrayList("", null)) {
+ if (Strings.isNullOrEmpty(name)) {
+ try {
+ fds.deleteMetadataRecord(name);
+ }
+ catch (IllegalArgumentException e) { }
+ }
+ else {
+ fds.deleteMetadataRecord(name);
+ fail();
+ }
+ }
+ assertTrue(fds.deleteMetadataRecord("name"));
+ }
+
+ // DeleteAllMetadataRecords (Backend)
+
+ @Test
+ public void testBackendDeleteAllMetadataRecordsPrefixMatchesAll() throws
DataStoreException {
+ SharedDataStore fds = dataStore;
+
+ String prefixAll = "prefix1";
+ String prefixSome = "prefix1.prefix2";
+ String prefixOne = "prefix1.prefix3";
+ String prefixNone = "prefix4";
+
+ Map<String, Integer> prefixCounts = Maps.newHashMap();
+ prefixCounts.put(prefixAll, 4);
+ prefixCounts.put(prefixSome, 2);
+ prefixCounts.put(prefixOne, 1);
+ prefixCounts.put(prefixNone, 0);
+
+ for (Map.Entry<String, Integer> entry : prefixCounts.entrySet()) {
+ fds.addMetadataRecord(randomStream(1, 10),
String.format("%s.testRecord1", prefixAll));
+ fds.addMetadataRecord(randomStream(2, 10),
String.format("%s.testRecord2", prefixSome));
+ fds.addMetadataRecord(randomStream(3, 10),
String.format("%s.testRecord3", prefixSome));
+ fds.addMetadataRecord(randomStream(4, 10),
String.format("%s.testRecord4", prefixOne));
+
+ int preCount = fds.getAllMetadataRecords("").size();
+
+ fds.deleteAllMetadataRecords(entry.getKey());
+
+ int deletedCount = preCount - fds.getAllMetadataRecords("").size();
+ assertEquals(entry.getValue().intValue(), deletedCount);
+
+ fds.deleteAllMetadataRecords("");
+ }
+ }
+
+ @Test
+ public void testBackendDeleteAllMetadataRecordsNoRecordsNoChange() {
+ SharedDataStore fds = dataStore;
+
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+
+ fds.deleteAllMetadataRecords("");
+
+ assertEquals(0, fds.getAllMetadataRecords("").size());
+ }
+
+ @Test
+ public void
testBackendDeleteAllMetadataRecordsNullPrefixThrowsNullPointerException() {
+ expectedEx.expect(IllegalArgumentException.class);
+
+ SharedDataStore fds = dataStore;
+ fds.deleteAllMetadataRecords(null);
+ }
+}
Propchange:
jackrabbit/oak/trunk/oak-blob-plugins/src/test/java/org/apache/jackrabbit/oak/plugins/blob/datastore/SharedDataStoreTest.java
------------------------------------------------------------------------------
svn:eol-style = native