Hi, The SPARQL standard states that multiple FROM clauses in a single SPARQL query should be interpreted as a graph merge operation. But, I get strange behavior in this case not as specified. At the end of the message I first include results of a illustrative example that indicates that graphs are not merged followed byJava code for the same example. What I am doing wrong? What should be done to make the graph merge working? We use jena-2.6.4 and tdb-0.8.9.
Thanks, Milorad =================================================================== START Deleting directory: C:/DatasetGraphsMergeTest creating directory: C:/DatasetGraphsMergeTest DIR created WARN [main] (SetupTDB.java:755) - No BGP optimizer First graph created. -------------------------------------------------------------------------------------------------------------------------- | s | p | o | ========================================================================================================================== | <http://test/r1> | <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> | <http://www.w3.org/2000/01/rdf-schema#label> | -------------------------------------------------------------------------------------------------------------------------- Second graph created. ---------------------------------------------------------------------------------------------- | s | p | o | ============================================================================================== | <http://test/r2> | <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> | <http://test/r1> | ---------------------------------------------------------------------------------------------- Inter graph query results. --------- | s | l | ========= --------- END =================================================================== import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.shared.Lock; import com.hp.hpl.jena.tdb.TDB; import com.hp.hpl.jena.tdb.TDBFactory; import com.hp.hpl.jena.update.UpdateAction; import com.hp.hpl.jena.update.UpdateFactory; import com.hp.hpl.jena.update.UpdateRequest; public class DatasetGraphsMergeSPARQL { public static final String TDB_DIR = "C:/DatasetGraphsMergeTest";//path on locval HDD for TDB store public static final boolean DELETE_EXISTING_TDB = true;//if TDB already exist should we delete it public static final String Q_CreateGraph1 = "INSERT DATA { " + " GRAPH <http://g1> { " + " <http://test/r1> <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> " + " <http://www.w3.org/2000/01/rdf-schema#label> . " + " }" + "}"; public static final String Q_CreateGraph2 = "INSERT DATA { " + " GRAPH <http://g2> { " + " <http://test/r2> <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> " + " <http://test/r1> . " + " }" + "}"; public static final String Q_TestEmpty1 = "SELECT ?s ?p ?o " + "FROM <http://g1> " + "{ " + " ?s ?p ?o . " + "}"; public static final String Q_TestEmpty2 = "SELECT ?s ?p ?o " + "FROM <http://g2> " + "{ " + " ?s ?p ?o . " + "}"; public static final String Q_TestInterGraphPath = "SELECT ?s ?l " + "FROM <http://g1> " + "FROM <http://g2> " + "{ " + " ?s <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> ?l . " + " ?l <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> " + " <http://www.w3.org/2000/01/rdf-schema#label> . " + "}"; public static void main(String[] args) throws Exception{ Dataset m_dataset; System.out.println("START"); m_dataset = initDataset(); updateDataset(m_dataset, Q_CreateGraph1); System.out.println("First graph created."); selectDataset(m_dataset, Q_TestEmpty1); updateDataset(m_dataset, Q_CreateGraph2); System.out.println("Second graph created."); selectDataset(m_dataset, Q_TestEmpty2); System.out.println("Inter graph query results."); selectDataset(m_dataset, Q_TestInterGraphPath); System.out.println("END"); } public static Dataset initDataset() throws IOException{ //Initializing Dataset//Creating Directory for TDB File tdbDir = new File(TDB_DIR); if(DELETE_EXISTING_TDB && tdbDir.exists()){ System.out.println("Deleting directory: " + TDB_DIR); FileUtils.deleteDirectory(tdbDir); } if (!tdbDir.exists()){ System.out.println("creating directory: " + TDB_DIR); boolean result = tdbDir.mkdir(); if(result){ System.out.println("DIR created"); } } //Initializing Dataset return TDBFactory.createDataset(tdbDir.getPath()); } public static void updateDataset(Dataset dataset, String querystr){ dataset.getLock().enterCriticalSection(Lock.WRITE); try { UpdateRequest updateRequest = UpdateFactory.create(querystr); UpdateAction.execute(updateRequest, dataset); }catch (Exception e){ System.out.println(e); } finally { TDB.sync(dataset); dataset.getLock().leaveCriticalSection(); } } public static void selectDataset(Dataset dataset, String querystr){ //SPARQL QUERY is handled here Query query = QueryFactory.create(querystr); dataset.getLock().enterCriticalSection(Lock.READ) ; try { QueryExecution qexec = QueryExecutionFactory.create(query, dataset) ; ResultSet results = qexec.execSelect() ; ResultSetFormatter.out(System.out, results); qexec.close(); } finally { dataset.getLock().leaveCriticalSection(); } } }
