http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java deleted file mode 100644 index 2813438..0000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/KeyValueToRyaStatementFunction.java +++ /dev/null @@ -1,72 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConfiguration; -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; - -import com.google.common.base.Function; - -/** - * Date: 1/30/13 - * Time: 2:09 PM - */ -public class KeyValueToRyaStatementFunction implements Function<Map.Entry<Key, Value>, RyaStatement> { - - private TABLE_LAYOUT tableLayout; - private RyaTripleContext context; - - public KeyValueToRyaStatementFunction(TABLE_LAYOUT tableLayout, RyaTripleContext context) { - this.tableLayout = tableLayout; - this.context = context; - } - - @Override - public RyaStatement apply(Map.Entry<Key, Value> input) { - Key key = input.getKey(); - Value value = input.getValue(); - RyaStatement statement = null; - try { - statement = context.deserializeTriple(tableLayout, - new TripleRow(key.getRowData().toArray(), - key.getColumnFamilyData().toArray(), - key.getColumnQualifierData().toArray(), - key.getTimestamp(), - key.getColumnVisibilityData().toArray(), - (value != null) ? value.get() : null - )); - } catch (TripleRowResolverException e) { - throw new RuntimeException(e); - } - - return statement; - } -}
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java deleted file mode 100644 index c59cb87..0000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RangeBindingSetEntries.java +++ /dev/null @@ -1,58 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.openrdf.query.BindingSet; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; - -/** - * Class RangeBindingSetCollection - * Date: Feb 23, 2011 - * Time: 10:15:48 AM - */ -public class RangeBindingSetEntries { - public Collection<Map.Entry<Range, BindingSet>> ranges; - - public RangeBindingSetEntries() { - this(new ArrayList<Map.Entry<Range, BindingSet>>()); - } - - public RangeBindingSetEntries(Collection<Map.Entry<Range, BindingSet>> ranges) { - this.ranges = ranges; - } - - public Collection<BindingSet> containsKey(Key key) { - //TODO: need to find a better way to sort these and pull - //TODO: maybe fork/join here - Collection<BindingSet> bss = new ArrayList<BindingSet>(); - for (Map.Entry<Range, BindingSet> entry : ranges) { - if (entry.getKey().contains(key)) - bss.add(entry.getValue()); - } - return bss; - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java deleted file mode 100644 index b4333bd..0000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementBindingSetKeyValueIterator.java +++ /dev/null @@ -1,154 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Collection; -import java.util.Iterator; -import java.util.Map; -import java.util.NoSuchElementException; - -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.ScannerBase; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.openrdf.query.BindingSet; - -/** - * Date: 7/17/12 - * Time: 11:48 AM - */ -public class RyaStatementBindingSetKeyValueIterator implements CloseableIteration<Map.Entry<RyaStatement, BindingSet>, RyaDAOException> { - private Iterator<Map.Entry<Key, Value>> dataIterator; - private TABLE_LAYOUT tableLayout; - private Long maxResults = -1L; - private ScannerBase scanner; - private boolean isBatchScanner; - private RangeBindingSetEntries rangeMap; - private Iterator<BindingSet> bsIter; - private RyaStatement statement; - private RyaTripleContext ryaContext; - - public RyaStatementBindingSetKeyValueIterator(TABLE_LAYOUT tableLayout, RyaTripleContext context, ScannerBase scannerBase, RangeBindingSetEntries rangeMap) { - this(tableLayout, ((scannerBase instanceof BatchScanner) ? ((BatchScanner) scannerBase).iterator() : ((Scanner) scannerBase).iterator()), rangeMap, context); - this.scanner = scannerBase; - isBatchScanner = scanner instanceof BatchScanner; - } - - public RyaStatementBindingSetKeyValueIterator(TABLE_LAYOUT tableLayout, Iterator<Map.Entry<Key, Value>> dataIterator, RangeBindingSetEntries rangeMap, RyaTripleContext ryaContext) { - this.tableLayout = tableLayout; - this.rangeMap = rangeMap; - this.dataIterator = dataIterator; - this.ryaContext = ryaContext; - } - - @Override - public void close() throws RyaDAOException { - dataIterator = null; - if (scanner != null && isBatchScanner) { - ((BatchScanner) scanner).close(); - } - } - - public boolean isClosed() throws RyaDAOException { - return dataIterator == null; - } - - @Override - public boolean hasNext() throws RyaDAOException { - if (isClosed()) { - return false; - } - if (maxResults != 0) { - if (bsIter != null && bsIter.hasNext()) { - return true; - } - if (dataIterator.hasNext()) { - return true; - } else { - maxResults = 0l; - return false; - } - } - return false; - } - - @Override - public Map.Entry<RyaStatement, BindingSet> next() throws RyaDAOException { - if (!hasNext() || isClosed()) { - throw new NoSuchElementException(); - } - - try { - while (true) { - if (bsIter != null && bsIter.hasNext()) { - maxResults--; - return new RdfCloudTripleStoreUtils.CustomEntry<RyaStatement, BindingSet>(statement, bsIter.next()); - } - - if (dataIterator.hasNext()) { - Map.Entry<Key, Value> next = dataIterator.next(); - Key key = next.getKey(); - statement = ryaContext.deserializeTriple(tableLayout, - new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(), - key.getTimestamp(), key.getColumnVisibilityData().toArray(), next.getValue().get())); - if (next.getValue() != null) { - statement.setValue(next.getValue().get()); - } - Collection<BindingSet> bindingSets = rangeMap.containsKey(key); - if (!bindingSets.isEmpty()) { - bsIter = bindingSets.iterator(); - } - } else { - break; - } - } - return null; - } catch (TripleRowResolverException e) { - throw new RyaDAOException(e); - } - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - - public Long getMaxResults() { - return maxResults; - } - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java deleted file mode 100644 index f4c3081..0000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/RyaStatementKeyValueIterator.java +++ /dev/null @@ -1,107 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; -import java.util.Map; - -import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.resolver.RyaContext; -import mvm.rya.api.resolver.RyaTripleContext; -import mvm.rya.api.resolver.triple.TripleRow; -import mvm.rya.api.resolver.triple.TripleRowResolverException; - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; - -/** - * Date: 7/17/12 - * Time: 11:48 AM - */ -public class RyaStatementKeyValueIterator implements CloseableIteration<RyaStatement, RyaDAOException> { - private Iterator<Map.Entry<Key, Value>> dataIterator; - private TABLE_LAYOUT tableLayout; - private Long maxResults = -1L; - private RyaTripleContext context; - - public RyaStatementKeyValueIterator(TABLE_LAYOUT tableLayout, RyaTripleContext context, Iterator<Map.Entry<Key, Value>> dataIterator) { - this.tableLayout = tableLayout; - this.dataIterator = dataIterator; - this.context = context; - } - - @Override - public void close() throws RyaDAOException { - dataIterator = null; - } - - public boolean isClosed() throws RyaDAOException { - return dataIterator == null; - } - - @Override - public boolean hasNext() throws RyaDAOException { - if (isClosed()) { - throw new RyaDAOException("Closed Iterator"); - } - return maxResults != 0 && dataIterator.hasNext(); - } - - @Override - public RyaStatement next() throws RyaDAOException { - if (!hasNext()) { - return null; - } - - try { - Map.Entry<Key, Value> next = dataIterator.next(); - Key key = next.getKey(); - RyaStatement statement = context.deserializeTriple(tableLayout, - new TripleRow(key.getRowData().toArray(), key.getColumnFamilyData().toArray(), key.getColumnQualifierData().toArray(), - key.getTimestamp(), key.getColumnVisibilityData().toArray(), next.getValue().get())); - if (next.getValue() != null) { - statement.setValue(next.getValue().get()); - } - maxResults--; - return statement; - } catch (TripleRowResolverException e) { - throw new RyaDAOException(e); - } - } - - @Override - public void remove() throws RyaDAOException { - next(); - } - - public Long getMaxResults() { - return maxResults; - } - - public void setMaxResults(Long maxResults) { - this.maxResults = maxResults; - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java deleted file mode 100644 index d2dcef9..0000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/query/ScannerBaseCloseableIterable.java +++ /dev/null @@ -1,56 +0,0 @@ -package mvm.rya.accumulo.query; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import com.google.common.base.Preconditions; -import org.apache.accumulo.core.client.ScannerBase; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.calrissian.mango.collect.AbstractCloseableIterable; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Map; - -/** - * Date: 1/30/13 - * Time: 2:15 PM - */ -public class ScannerBaseCloseableIterable extends AbstractCloseableIterable<Map.Entry<Key, Value>> { - - protected ScannerBase scanner; - - public ScannerBaseCloseableIterable(ScannerBase scanner) { - Preconditions.checkNotNull(scanner); - this.scanner = scanner; - } - - @Override - protected void doClose() throws IOException { - scanner.close(); - } - - @Override - protected Iterator<Map.Entry<Key, Value>> retrieveIterator() { - return scanner.iterator(); - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java deleted file mode 100644 index 97d2f54..0000000 --- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/utils/TimeRangeFilter.java +++ /dev/null @@ -1,87 +0,0 @@ -package mvm.rya.accumulo.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.iterators.Filter; -import org.apache.accumulo.core.iterators.IteratorEnvironment; -import org.apache.accumulo.core.iterators.OptionDescriber; -import org.apache.accumulo.core.iterators.SortedKeyValueIterator; - -import java.io.IOException; -import java.util.Map; -import java.util.TreeMap; - -/** - * Set the startTime and timeRange. The filter will only keyValues that - * are within the range [startTime - timeRange, startTime]. - */ -public class TimeRangeFilter extends Filter { - private long timeRange; - private long startTime; - public static final String TIME_RANGE_PROP = "timeRange"; - public static final String START_TIME_PROP = "startTime"; - - @Override - public boolean accept(Key k, Value v) { - long diff = startTime - k.getTimestamp(); - return !(diff > timeRange || diff < 0); - } - - @Override - public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) throws IOException { - super.init(source, options, env); - if (options == null) { - throw new IllegalArgumentException("options must be set for TimeRangeFilter"); - } - - timeRange = -1; - String timeRange_s = options.get(TIME_RANGE_PROP); - if (timeRange_s == null) - throw new IllegalArgumentException("timeRange must be set for TimeRangeFilter"); - - timeRange = Long.parseLong(timeRange_s); - - String time = options.get(START_TIME_PROP); - if (time != null) - startTime = Long.parseLong(time); - else - startTime = System.currentTimeMillis(); - } - - @Override - public OptionDescriber.IteratorOptions describeOptions() { - Map<String, String> options = new TreeMap<String, String>(); - options.put(TIME_RANGE_PROP, "time range from the startTime (milliseconds)"); - options.put(START_TIME_PROP, "if set, use the given value as the absolute time in milliseconds as the start time in the time range."); - return new OptionDescriber.IteratorOptions("timeRangeFilter", "TimeRangeFilter removes entries with timestamps outside of the given time range: " + - "[startTime - timeRange, startTime]", - options, null); - } - - @Override - public boolean validateOptions(Map<String, String> options) { - Long.parseLong(options.get(TIME_RANGE_PROP)); - return true; - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java deleted file mode 100644 index b7c9079..0000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRdfConfigurationTest.java +++ /dev/null @@ -1,59 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import org.apache.accumulo.core.security.Authorizations; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * Date: 1/28/13 - * Time: 8:36 AM - */ -public class AccumuloRdfConfigurationTest { - private static final Logger logger = LoggerFactory.getLogger(AccumuloRdfConfigurationTest.class); - - @Test - public void testAuths() { - String[] arr = {"U", "FOUO"}; - String str = "U,FOUO"; - Authorizations auths = new Authorizations(arr); - - AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - - conf.setAuths(arr); - assertTrue(Arrays.equals(arr, conf.getAuths())); - assertEquals(str, conf.getAuth()); - assertEquals(auths, conf.getAuthorizations()); - - conf.setAuth(str); - assertTrue(Arrays.equals(arr, conf.getAuths())); - assertEquals(str, conf.getAuth()); - assertEquals(auths, conf.getAuthorizations()); - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java deleted file mode 100644 index ab4528b..0000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/AccumuloRyaDAOTest.java +++ /dev/null @@ -1,665 +0,0 @@ -package mvm.rya.accumulo; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import info.aduna.iteration.CloseableIteration; -import mvm.rya.accumulo.query.AccumuloRyaQueryEngine; -import mvm.rya.api.RdfCloudTripleStoreUtils; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaType; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.persist.RyaDAOException; -import mvm.rya.api.persist.query.RyaQuery; -import mvm.rya.api.resolver.RdfToRyaConversions; -import mvm.rya.api.resolver.RyaContext; - -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.calrissian.mango.collect.CloseableIterable; -import org.calrissian.mango.collect.FluentCloseableIterable; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; -import org.openrdf.model.vocabulary.XMLSchema; -import org.openrdf.query.BindingSet; - -import java.util.*; - -import static org.junit.Assert.*; - -/** - * Class AccumuloRdfDAOTest - * Date: Mar 7, 2012 - * Time: 9:42:28 AM - */ -public class AccumuloRyaDAOTest { - - private AccumuloRyaDAO dao; - private ValueFactory vf = new ValueFactoryImpl(); - static String litdupsNS = "urn:test:litdups#"; - private AccumuloRdfConfiguration conf; - private Connector connector; - - @Before - public void setUp() throws Exception { - dao = new AccumuloRyaDAO(); - connector = new MockInstance().getConnector("", ""); - dao.setConnector(connector); - conf = new AccumuloRdfConfiguration(); - dao.setConf(conf); - dao.init(); - } - - @After - public void tearDown() throws Exception { - dao.purge(conf); - dao.destroy(); - } - - @Test - public void testAdd() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1")); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - - CloseableIteration<RyaStatement, RyaDAOException> iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), conf); - int count = 0; - while (iter.hasNext()) { - assertTrue(uri1.equals(iter.next().getObject())); - count++; - } - iter.close(); - assertEquals(1, count); - - dao.delete(new RyaStatement(cpu, loadPerc, null), conf); - - iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), conf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testDeleteDiffVisibility() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1")); - RyaStatement stmt1 = new RyaStatement(cpu, loadPerc, uri1, null, "1", "vis1".getBytes()); - dao.add(stmt1); - RyaStatement stmt2 = new RyaStatement(cpu, loadPerc, uri1, null, "2", "vis2".getBytes()); - dao.add(stmt2); - - AccumuloRdfConfiguration cloneConf = conf.clone(); - cloneConf.setAuth("vis1,vis2"); - - CloseableIteration<RyaStatement, RyaDAOException> iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), cloneConf); - int count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(2, count); - - dao.delete(stmt1, cloneConf); - - iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), cloneConf); - count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(1, count); - } - - @Test - public void testDeleteDiffTimestamp() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaURI uri1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "uri1")); - RyaStatement stmt1 = new RyaStatement(cpu, loadPerc, uri1, null, "1", null, null, 100l); - dao.add(stmt1); - RyaStatement stmt2 = new RyaStatement(cpu, loadPerc, uri1, null, "2", null, null, 100l); - dao.add(stmt2); - - int resultSize = FluentCloseableIterable.from(dao.getQueryEngine().query( - RyaQuery.builder(new RyaStatement(cpu, loadPerc, null)).build())).autoClose().size(); - assertEquals(2, resultSize); - - final RyaStatement addStmt = new RyaStatement(cpu, loadPerc, uri1, null, "1", - null, null, 101l); - dao.delete(stmt1, conf); - dao.add(addStmt); - - resultSize = FluentCloseableIterable.from(dao.getQueryEngine().query( - RyaQuery.builder(new RyaStatement(cpu, loadPerc, null)).build())).autoClose().size(); - assertEquals(2, resultSize); //the delete marker should not delete the new stmt - } - - @Test - public void testDelete() throws Exception { - RyaURI predicate = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred")); - RyaURI subj = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "subj")); - - // create a "bulk load" of 10,000 statements - int statement_count = 10000; - for (int i = 0 ; i < statement_count ; i++){ - //make the statement very large so we will get a lot of random flushes - RyaURI obj = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, String.format("object%050d",i))); - RyaStatement stmt = new RyaStatement(subj, predicate, obj); - dao.add(stmt); - } - - CloseableIteration<RyaStatement, RyaDAOException> iter; - - //check to see if all of the statements made it to the subj table - //delete based on the data in the subj table - RyaStatement subjQuery = new RyaStatement(subj, null, null); - iter = dao.getQueryEngine().query(subjQuery, conf); - List<RyaStatement> stmts = new ArrayList<RyaStatement>(); - while (iter.hasNext()) { - stmts.add(iter.next()); - } - assertEquals(statement_count, stmts.size()); - dao.delete(stmts.iterator(), conf); - - // check statements in the predicate table - RyaStatement predQuery = new RyaStatement(null, predicate, null); - iter = dao.getQueryEngine().query(predQuery, conf); - int count = 0; - while (iter.hasNext()) { - count++; - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testAddEmptyString() throws Exception { - RyaURI cpu = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cpu")); - RyaURI loadPerc = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "loadPerc")); - RyaType empty = new RyaType(""); - dao.add(new RyaStatement(cpu, loadPerc, empty)); - - CloseableIteration<RyaStatement, RyaDAOException> iter = dao.getQueryEngine().query(new RyaStatement(cpu, loadPerc, null), conf); - while (iter.hasNext()) { - assertEquals("", iter.next().getObject().getData()); - } - iter.close(); - } - - @Test - public void testMaxResults() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri3"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri4"))); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5"))); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - long limit = 3l; - queryConf.setLimit(limit); - - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - int count = 0; - while (iter.hasNext()) { - iter.next().getObject(); - count++; - } - iter.close(); - assertEquals(limit, count); - } - - @Test - public void testAddValue() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - String myval = "myval"; - dao.add(new RyaStatement(cpu, loadPerc, uri1, null, null, null, myval.getBytes())); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf); - assertTrue(iter.hasNext()); - assertEquals(myval, new String(iter.next().getValue())); - iter.close(); - } - - @Test - public void testAddCv() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - byte[] colVisABC = "A|B|C".getBytes(); - byte[] colVisAB = "A|B".getBytes(); - byte[] colVisA = "A".getBytes(); - dao.add(new RyaStatement(cpu, loadPerc, uri1, null, null, colVisABC)); - dao.add(new RyaStatement(cpu, loadPerc, uri2, null, null, colVisAB)); - dao.add(new RyaStatement(cpu, loadPerc, uri3, null, null, colVisA)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - //query with no auth - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - assertEquals(0, count); - iter.close(); - - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(); - queryConf.setAuth("B"); - iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(2, count); - - queryConf.setAuth("A"); - iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - count = 0; - while (iter.hasNext()) { - iter.next(); - count++; - } - iter.close(); - assertEquals(3, count); - } - - @Test - public void testTTL() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - long current = System.currentTimeMillis(); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri1"), null, null, null, null, current)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri2"), null, null, null, null, current - 1010l)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri3"), null, null, null, null, current - 2010l)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri4"), null, null, null, null, current - 3010l)); - dao.add(new RyaStatement(cpu, loadPerc, new RyaURI(litdupsNS + "uri5"), null, null, null, null, current - 4010l)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - AccumuloRdfConfiguration queryConf = conf.clone(); - queryConf.setTtl(3000l); - - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - int count = 0; - while (iter.hasNext()) { - iter.next().getObject(); - count++; - } - iter.close(); - assertEquals(3, count); - - queryConf.setStartTime(current - 3000l); - iter = queryEngine.query(new RyaStatement(cpu, loadPerc, null), queryConf); - count = 0; - while (iter.hasNext()) { - iter.next().getObject(); - count++; - } - iter.close(); - assertEquals(2, count); - } - - @Test - public void testGetNamespace() throws Exception { - dao.addNamespace("ns", litdupsNS); - assertEquals(litdupsNS, dao.getNamespace("ns")); - dao.removeNamespace("ns"); - assertNull(dao.getNamespace("ns")); - } - - //TOOD: Add test for set of queries - @Test - public void testQuery() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - RyaURI uri4 = new RyaURI(litdupsNS + "uri4"); - RyaURI uri5 = new RyaURI(litdupsNS + "uri5"); - RyaURI uri6 = new RyaURI(litdupsNS + "uri6"); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - Collection<RyaStatement> coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.batchQuery(coll, conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - //now use batchscanner - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - queryConf.setMaxRangesForScanner(2); - - coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - coll.add(new RyaStatement(null, loadPerc, uri3)); - coll.add(new RyaStatement(null, loadPerc, uri4)); - iter = queryEngine.batchQuery(coll, queryConf); - assertTrue(iter.hasNext()); //old code had a weird behaviour that could not perform hasNext consecutively - assertTrue(iter.hasNext()); - assertTrue(iter.hasNext()); - count = 0; - while (iter.hasNext()) { - count++; - assertTrue(iter.hasNext()); - iter.next(); - } - iter.close(); - assertEquals(4, count); - } - - @Test - public void testQueryDates() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaType uri1 = new RyaType(XMLSchema.DATETIME, "2000-01-01"); - RyaType uri2 = new RyaType(XMLSchema.DATETIME, "2000-01-01TZ"); - RyaType uri3 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01.111Z"); - RyaType uri4 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01"); - RyaType uri5 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01.111"); - RyaType uri6 = new RyaType(XMLSchema.DATETIME, "2000-01-01T00:00:01Z"); - RyaType uri7 = new RyaType(XMLSchema.DATETIME, "-2000-01-01T00:00:01Z"); - RyaType uri8 = new RyaType(XMLSchema.DATETIME, "111-01-01T00:00:01Z"); - RyaType uri9 = new RyaType(XMLSchema.DATETIME, "12345-01-01T00:00:01Z"); - - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - dao.add(new RyaStatement(cpu, loadPerc, uri7)); - dao.add(new RyaStatement(cpu, loadPerc, uri8)); - dao.add(new RyaStatement(cpu, loadPerc, uri9)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - Collection<RyaStatement> coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.batchQuery(coll, conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - //now use batchscanner - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - queryConf.setMaxRangesForScanner(2); - - coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - coll.add(new RyaStatement(null, loadPerc, uri3)); - coll.add(new RyaStatement(null, loadPerc, uri4)); - coll.add(new RyaStatement(null, loadPerc, uri5)); - coll.add(new RyaStatement(null, loadPerc, uri6)); - coll.add(new RyaStatement(null, loadPerc, uri7)); - coll.add(new RyaStatement(null, loadPerc, uri8)); - coll.add(new RyaStatement(null, loadPerc, uri9)); - iter = queryEngine.batchQuery(coll, queryConf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(9, count); - } - - @Test - public void testQueryCollectionRegex() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - RyaURI uri4 = new RyaURI(litdupsNS + "uri4"); - RyaURI uri5 = new RyaURI(litdupsNS + "uri5"); - RyaURI uri6 = new RyaURI(litdupsNS + "uri6"); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - Collection<RyaStatement> coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - conf.setRegexPredicate(loadPerc.getData()); - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.batchQuery(coll, conf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - conf.setRegexPredicate("notLoadPerc"); - iter = queryEngine.batchQuery(coll, conf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testQueryCollectionRegexWBatchScanner() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaURI uri1 = new RyaURI(litdupsNS + "uri1"); - RyaURI uri2 = new RyaURI(litdupsNS + "uri2"); - RyaURI uri3 = new RyaURI(litdupsNS + "uri3"); - RyaURI uri4 = new RyaURI(litdupsNS + "uri4"); - RyaURI uri5 = new RyaURI(litdupsNS + "uri5"); - RyaURI uri6 = new RyaURI(litdupsNS + "uri6"); - dao.add(new RyaStatement(cpu, loadPerc, uri1)); - dao.add(new RyaStatement(cpu, loadPerc, uri2)); - dao.add(new RyaStatement(cpu, loadPerc, uri3)); - dao.add(new RyaStatement(cpu, loadPerc, uri4)); - dao.add(new RyaStatement(cpu, loadPerc, uri5)); - dao.add(new RyaStatement(cpu, loadPerc, uri6)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - AccumuloRdfConfiguration queryConf = new AccumuloRdfConfiguration(conf); - queryConf.setMaxRangesForScanner(1); - - Collection<RyaStatement> coll = new ArrayList(); - coll.add(new RyaStatement(null, loadPerc, uri1)); - coll.add(new RyaStatement(null, loadPerc, uri2)); - conf.setRegexPredicate(loadPerc.getData()); - CloseableIteration<RyaStatement, RyaDAOException> iter = queryEngine.batchQuery(coll, queryConf); - int count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(2, count); - - queryConf.setRegexPredicate("notLoadPerc"); - iter = queryEngine.batchQuery(coll, queryConf); - count = 0; - while (iter.hasNext()) { - count++; - iter.next(); - } - iter.close(); - assertEquals(0, count); - } - - @Test - public void testLiteralTypes() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaType longLit = new RyaType(XMLSchema.LONG, "3"); - - dao.add(new RyaStatement(cpu, loadPerc, longLit)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - CloseableIteration<RyaStatement, RyaDAOException> query = queryEngine.query(new RyaStatement(cpu, null, null), conf); - assertTrue(query.hasNext()); - RyaStatement next = query.next(); - assertEquals(new Long(longLit.getData()), new Long(next.getObject().getData())); - query.close(); - - RyaType doubleLit = new RyaType(XMLSchema.DOUBLE, "2.0"); - - dao.add(new RyaStatement(cpu, loadPerc, doubleLit)); - - query = queryEngine.query(new RyaStatement(cpu, loadPerc, doubleLit), conf); - assertTrue(query.hasNext()); - next = query.next(); - assertEquals(Double.parseDouble(doubleLit.getData()), Double.parseDouble(next.getObject().getData()), 0.001); - query.close(); - } - - @Test - public void testSameLiteralStringTypes() throws Exception { - RyaURI cpu = new RyaURI(litdupsNS + "cpu"); - RyaURI loadPerc = new RyaURI(litdupsNS + "loadPerc"); - RyaType longLit = new RyaType(XMLSchema.LONG, "10"); - RyaType strLit = new RyaType(XMLSchema.STRING, new String(RyaContext.getInstance().serializeType(longLit)[0])); - - RyaStatement expected = new RyaStatement(cpu, loadPerc, longLit); - dao.add(expected); - dao.add(new RyaStatement(cpu, loadPerc, strLit)); - - AccumuloRyaQueryEngine queryEngine = dao.getQueryEngine(); - - CloseableIteration<RyaStatement, RyaDAOException> query = queryEngine.query(new RyaStatement(cpu, loadPerc, longLit), conf); - assertTrue(query.hasNext()); - RyaStatement next = query.next(); - assertEquals(new Long(longLit.getData()), new Long(next.getObject().getData())); - assertEquals(longLit.getDataType(), next.getObject().getDataType()); - assertFalse(query.hasNext()); - query.close(); - } - - @Test - public void testPurge() throws RyaDAOException, TableNotFoundException { - dao.add(newRyaStatement()); - assertFalse("table should not be empty", areTablesEmpty()); - - dao.purge(conf); - assertTrue("table should be empty", areTablesEmpty()); - //assertNotNull(dao.getVersion()); - } - - @Test - public void testPurgeDoesNotBreakBatchWriters() throws TableNotFoundException, RyaDAOException { - dao.purge(conf); - assertTrue("table should be empty", areTablesEmpty()); - - dao.add(newRyaStatement()); - assertFalse("table should not be empty", areTablesEmpty()); - } - - @Test - public void testDropAndDestroy() throws RyaDAOException { - assertTrue(dao.isInitialized()); - dao.dropAndDestroy(); - for (String tableName : dao.getTables()) { - assertFalse(tableExists(tableName)); - } - assertFalse(dao.isInitialized()); - } - - private boolean areTablesEmpty() throws TableNotFoundException { - for (String table : dao.getTables()) { - if (tableExists(table)) { - // TODO: filter out version - if (createScanner(table).iterator().hasNext()) { - return false; - } - } - } - return true; - } - - private boolean tableExists(String tableName) { - return dao.getConnector().tableOperations().exists(tableName); - } - - private Scanner createScanner(String tableName) throws TableNotFoundException { - return dao.getConnector().createScanner(tableName, conf.getAuthorizations()); - } - - private RyaStatement newRyaStatement() { - RyaURI subject = new RyaURI(litdupsNS + randomString()); - RyaURI predicate = new RyaURI(litdupsNS + randomString()); - RyaType object = new RyaType(randomString()); - - return new RyaStatement(subject, predicate, object); - } - - private String randomString() { - return UUID.randomUUID().toString(); - } -} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java deleted file mode 100644 index 7c3331d..0000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/DefineTripleQueryRangeFactoryTest.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -//package mvm.rya.accumulo; - -// -//import junit.framework.TestCase; -//import mvm.rya.accumulo.AccumuloRdfConfiguration; -//import mvm.rya.accumulo.DefineTripleQueryRangeFactory; -//import mvm.rya.accumulo.AccumuloRdfConfiguration; -//import mvm.rya.accumulo.DefineTripleQueryRangeFactory; -//import mvm.rya.api.domain.RangeValue; -//import org.apache.accumulo.core.data.Range; -//import org.openrdf.model.URI; -//import org.openrdf.model.Value; -//import org.openrdf.model.ValueFactory; -//import org.openrdf.model.impl.ValueFactoryImpl; -// -//import java.util.Map; -// -//import static mvm.rya.api.RdfCloudTripleStoreConstants.*; -// -///** -// */ -//public class DefineTripleQueryRangeFactoryTest extends TestCase { -// -// public static final String DELIM_BYTES_STR = new String(DELIM_BYTES); -// public static final String URI_MARKER_STR = "\u0007"; -// public static final String RANGE_ENDKEY_SUFFIX = "\u0000"; -// DefineTripleQueryRangeFactory factory = new DefineTripleQueryRangeFactory(); -// ValueFactory vf = ValueFactoryImpl.getInstance(); -// static String litdupsNS = "urn:test:litdups#"; -// -// private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); -// -// public void testSPOCases() throws Exception { -// URI cpu = vf.createURI(litdupsNS, "cpu"); -// URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -// URI obj = vf.createURI(litdupsNS, "uri1"); -// -// //spo -// Map.Entry<TABLE_LAYOUT, Range> entry = -// factory.defineRange(cpu, loadPerc, obj, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// String expected_start = URI_MARKER_STR + cpu.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + loadPerc.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// -// //sp -// entry = factory.defineRange(cpu, loadPerc, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + cpu.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + loadPerc.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //s -// entry = factory.defineRange(cpu, null, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + cpu.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //all -// entry = factory.defineRange(null, null, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// assertEquals("", -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(new String(new byte[]{Byte.MAX_VALUE}) + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testSPOCasesWithRanges() throws Exception { -// URI subj_start = vf.createURI(litdupsNS, "subj_start"); -// URI subj_end = vf.createURI(litdupsNS, "subj_stop"); -// URI pred_start = vf.createURI(litdupsNS, "pred_start"); -// URI pred_end = vf.createURI(litdupsNS, "pred_stop"); -// URI obj_start = vf.createURI(litdupsNS, "obj_start"); -// URI obj_end = vf.createURI(litdupsNS, "obj_stop"); -// -// Value subj = new RangeValue(subj_start, subj_end); -// Value pred = new RangeValue(pred_start, pred_end); -// Value obj = new RangeValue(obj_start, obj_end); -// -// //spo - o has range -// Map.Entry<TABLE_LAYOUT, Range> entry = -// factory.defineRange(subj_start, pred_start, obj, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// String expected_start = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// String expected_end = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //sp - p has range -// entry = factory.defineRange(subj_start, pred, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + subj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + pred_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //s - s has range -// entry = factory.defineRange(subj, null, null, conf); -// assertEquals(TABLE_LAYOUT.SPO, entry.getKey()); -// expected_start = URI_MARKER_STR + subj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + subj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testPOCases() throws Exception { -// URI loadPerc = vf.createURI(litdupsNS, "loadPerc"); -// URI obj = vf.createURI(litdupsNS, "uri1"); -// -// //po -// Map.Entry<TABLE_LAYOUT, Range> entry = -// factory.defineRange(null, loadPerc, obj, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// String expected_start = URI_MARKER_STR + loadPerc.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //p -// entry = factory.defineRange(null, loadPerc, null, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// expected_start = URI_MARKER_STR + loadPerc.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testPOCasesWithRanges() throws Exception { -// URI pred_start = vf.createURI(litdupsNS, "pred_start"); -// URI pred_end = vf.createURI(litdupsNS, "pred_stop"); -// URI obj_start = vf.createURI(litdupsNS, "obj_start"); -// URI obj_end = vf.createURI(litdupsNS, "obj_stop"); -// -// Value pred = new RangeValue(pred_start, pred_end); -// Value obj = new RangeValue(obj_start, obj_end); -// -// //po -// Map.Entry<TABLE_LAYOUT, Range> entry = -// factory.defineRange(null, pred_start, obj, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// String expected_start = URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// String expected_end = URI_MARKER_STR + pred_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + obj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //p -// entry = factory.defineRange(null, pred, null, conf); -// assertEquals(TABLE_LAYOUT.PO, entry.getKey()); -// expected_start = URI_MARKER_STR + pred_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + pred_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// public void testOSPCases() throws Exception { -// URI cpu = vf.createURI(litdupsNS, "cpu"); -// URI obj = vf.createURI(litdupsNS, "uri1"); -// -// //so -// Map.Entry<TABLE_LAYOUT, Range> entry = -// factory.defineRange(cpu, null, obj, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// String expected_start = URI_MARKER_STR + obj.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + cpu.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //o -// entry = factory.defineRange(null, null, obj, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// expected_start = URI_MARKER_STR + obj.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// assertEquals(expected_start + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -// -// public void testOSPCasesWithRanges() throws Exception { -// URI subj_start = vf.createURI(litdupsNS, "subj_start"); -// URI subj_end = vf.createURI(litdupsNS, "subj_stop"); -// URI obj_start = vf.createURI(litdupsNS, "obj_start"); -// URI obj_end = vf.createURI(litdupsNS, "obj_stop"); -// -// Value subj = new RangeValue(subj_start, subj_end); -// Value obj = new RangeValue(obj_start, obj_end); -// -// //so - s should be the range -// Map.Entry<TABLE_LAYOUT, Range> entry = -// factory.defineRange(subj, null, obj_start, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// String expected_start = URI_MARKER_STR + obj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + subj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// String expected_end = URI_MARKER_STR + obj_start.stringValue() + DELIM_BYTES_STR + -// URI_MARKER_STR + subj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// -// //o - o is range -// entry = factory.defineRange(null, null, obj, conf); -// assertEquals(TABLE_LAYOUT.OSP, entry.getKey()); -// expected_start = URI_MARKER_STR + obj_start.stringValue(); -// assertEquals(expected_start, -// entry.getValue().getStartKey().getRow().toString()); -// expected_end = URI_MARKER_STR + obj_end.stringValue(); -// assertEquals(expected_end + DELIM_STOP + RANGE_ENDKEY_SUFFIX, -// entry.getValue().getEndKey().getRow().toString()); -// } -// -//} http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java ---------------------------------------------------------------------- diff --git a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java b/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java deleted file mode 100644 index bda73e2..0000000 --- a/dao/accumulo.rya/src/test/java/mvm/rya/accumulo/mr/eval/AccumuloRdfCountToolTest.java +++ /dev/null @@ -1,282 +0,0 @@ -package mvm.rya.accumulo.mr.eval; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - - -import mvm.rya.accumulo.AccumuloRdfConfiguration; -import mvm.rya.accumulo.AccumuloRyaDAO; -import mvm.rya.api.RdfCloudTripleStoreConstants; -import mvm.rya.api.domain.RyaStatement; -import mvm.rya.api.domain.RyaURI; -import mvm.rya.api.resolver.RdfToRyaConversions; -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Connector; -import org.apache.accumulo.core.client.Scanner; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.mock.MockInstance; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.PartialKey; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.TablePermission; -import org.apache.hadoop.io.Text; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.openrdf.model.ValueFactory; -import org.openrdf.model.impl.ValueFactoryImpl; - -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * Created by IntelliJ IDEA. - * Date: 4/24/12 - * Time: 5:05 PM - * To change this template use File | Settings | File Templates. - */ -@Ignore -public class AccumuloRdfCountToolTest { - - private String user = "user"; - private String pwd = "pwd"; - private String instance = "myinstance"; - private String tablePrefix = "t_"; - private Authorizations auths = Constants.NO_AUTHS; - private Connector connector; - - private AccumuloRyaDAO dao; - private ValueFactory vf = new ValueFactoryImpl(); - private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); - static String litdupsNS = "urn:test:litdups#"; - - @Before - public void setUp() throws Exception { - connector = new MockInstance(instance).getConnector(user, pwd.getBytes()); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().create(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - SecurityOperations secOps = connector.securityOperations(); - secOps.createUser(user, pwd.getBytes(), auths); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.READ); - secOps.grantTablePermission(user, tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, TablePermission.WRITE); - - dao = new AccumuloRyaDAO(); - dao.setConnector(connector); - conf.setTablePrefix(tablePrefix); - dao.setConf(conf); - dao.init(); - } - - @After - public void tearDown() throws Exception { - dao.destroy(); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_SPO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_PO_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_OSP_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_NS_SUFFIX); - connector.tableOperations().delete(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX); - } - - @Test - public void testMR() throws Exception { - RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1")); - RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1")); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)))); - - AccumuloRdfCountTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - }); - - Map<String, Key> expectedValues = new HashMap<String, Key>(); - String row = test1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.SUBJECT_CF_TXT, - RdfCloudTripleStoreConstants.EMPTY_TEXT)); - row = pred1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.PRED_CF_TXT, - RdfCloudTripleStoreConstants.EMPTY_TEXT)); - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); - scanner.setRange(new Range()); - int count = 0; - for (Map.Entry<Key, Value> entry : scanner) { - assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL)); - assertEquals(11, Long.parseLong(entry.getValue().toString())); - count++; - } - assertEquals(2, count); - } - -// public void testMRObject() throws Exception { -// URI pred1 = vf.createURI(litdupsNS, "pred1"); -// Literal literal = vf.createLiteral(0); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test0"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test1"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test2"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test3"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test4"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test5"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test6"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test7"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test8"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test9"), pred1, literal)); -// dao.add(new StatementImpl(vf.createURI(litdupsNS, "test10"), pred1, literal)); -// dao.commit(); -// -// AccumuloRdfCountTool.main(new String[]{ -// "-Dac.mock=true", -// "-Dac.instance=" + instance, -// "-Dac.username=" + user, -// "-Dac.pwd=" + pwd, -// "-Drdf.tablePrefix=" + tablePrefix, -// }); -// -// Map<String, Key> expectedValues = new HashMap<String, Key>(); -// byte[] row_bytes = RdfCloudTripleStoreUtils.writeValue(literal); -// expectedValues.put(new String(row_bytes), -// new Key(new Text(row_bytes), -// RdfCloudTripleStoreConstants.OBJ_CF_TXT, -// RdfCloudTripleStoreConstants.INFO_TXT)); -// row_bytes = RdfCloudTripleStoreUtils.writeValue(pred1); -// expectedValues.put(new String(row_bytes), -// new Key(new Text(row_bytes), -// RdfCloudTripleStoreConstants.PRED_CF_TXT, -// RdfCloudTripleStoreConstants.INFO_TXT)); -// Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); -// scanner.setRange(new Range()); -// int count = 0; -// for (Map.Entry<Key, Value> entry : scanner) { -// assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL)); -// assertEquals(11, Long.parseLong(entry.getValue().toString())); -// count++; -// } -// assertEquals(2, count); -// } - - @Test - public void testTTL() throws Exception { - RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1")); - RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1")); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)))); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)))); - - AccumuloRdfCountTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Dac.ttl=0", - "-Drdf.tablePrefix=" + tablePrefix, - }); - - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); - scanner.setRange(new Range()); - int count = 0; - for (Map.Entry<Key, Value> entry : scanner) { - count++; - } - assertEquals(0, count); - } - - @Test - public void testContext() throws Exception { - RyaURI test1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "test1")); - RyaURI pred1 = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "pred1")); - RyaURI cntxt = RdfToRyaConversions.convertURI(vf.createURI(litdupsNS, "cntxt")); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(0)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(1)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(2)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(3)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(4)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(5)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(6)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(7)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(8)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(9)), cntxt)); - dao.add(new RyaStatement(test1, pred1, RdfToRyaConversions.convertLiteral(vf.createLiteral(10)), cntxt)); - - AccumuloRdfCountTool.main(new String[]{ - "-Dac.mock=true", - "-Dac.instance=" + instance, - "-Dac.username=" + user, - "-Dac.pwd=" + pwd, - "-Drdf.tablePrefix=" + tablePrefix, - }); - - Map<String, Key> expectedValues = new HashMap<String, Key>(); - String row = test1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.SUBJECT_CF_TXT, - new Text(cntxt.getData()))); - row = pred1.getData(); - expectedValues.put(row, - new Key(new Text(row), - RdfCloudTripleStoreConstants.PRED_CF_TXT, - new Text(cntxt.getData()))); - Scanner scanner = connector.createScanner(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX, auths); - scanner.setRange(new Range()); - int count = 0; - for (Map.Entry<Key, Value> entry : scanner) { - assertTrue(expectedValues.get(entry.getKey().getRow().toString()).equals(entry.getKey(), PartialKey.ROW_COLFAM_COLQUAL)); - assertEquals(11, Long.parseLong(entry.getValue().toString())); - count++; - } - assertEquals(2, count); - } -}
