jenkins-bot has submitted this change and it was merged.

Change subject: Handle wikidata's dates
......................................................................


Handle wikidata's dates

Wikidata's dates represent values out of the range that blazegraph properly
supports so we implement our own inline value for them.  They also have
the advantage of doing a bit of sanity munging on the blazegraph side.

Change-Id: I083e4feefd752253c141de896e36c38a229639d8
---
M blazegraph/pom.xml
D blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/Dummy.java
A 
blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseDateExtension.java
A 
blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseExtensionFactory.java
M common/pom.xml
A common/src/main/java/org/wikidata/query/rdf/common/WikibaseDate.java
D common/src/test/java/org/wikidata/query/rdf/common/DummyUnitTest.java
A common/src/test/java/org/wikidata/query/rdf/common/WikibaseDateUnitTest.java
M tools/pom.xml
M tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
A 
tools/src/test/java/org/wikidata/query/rdf/tool/AbstractRdfRepositoryIntegrationTestBase.java
A 
tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdateIntegrationTestBase.java
A 
tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
M 
tools/src/test/java/org/wikidata/query/rdf/tool/rdf/RdfRepositoryIntegrationTest.java
M tools/src/test/resources/blazegraph/RWStore.properties
15 files changed, 946 insertions(+), 102 deletions(-)

Approvals:
  Smalyshev: Looks good to me, approved
  Manybubbles: Looks good to me, approved
  jenkins-bot: Verified



diff --git a/blazegraph/pom.xml b/blazegraph/pom.xml
index 116a910..cbcb386 100644
--- a/blazegraph/pom.xml
+++ b/blazegraph/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.wikidata.query.rdf</groupId>
@@ -23,7 +24,13 @@
       <artifactId>bigdata</artifactId>
       <version>${blazegraph.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.wikidata.query.rdf</groupId>
+      <artifactId>common</artifactId>
+      <version>${project.parent.version}</version>
+    </dependency>
   </dependencies>
+
   <build>
     <plugins>
       <plugin>
diff --git 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/Dummy.java 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/Dummy.java
deleted file mode 100644
index ec39444..0000000
--- a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/Dummy.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package org.wikidata.query.rdf.blazegraph;
-
-public class Dummy {
-    // TODO remove me when we have stuff to put in here.
-}
diff --git 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseDateExtension.java
 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseDateExtension.java
new file mode 100644
index 0000000..b980126
--- /dev/null
+++ 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseDateExtension.java
@@ -0,0 +1,137 @@
+package org.wikidata.query.rdf.blazegraph;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.log4j.Logger;
+import org.openrdf.model.Literal;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.vocabulary.XMLSchema;
+import org.wikidata.query.rdf.common.WikibaseDate;
+import org.wikidata.query.rdf.common.WikibaseDate.ToStringFormat;
+
+import com.bigdata.rdf.internal.IDatatypeURIResolver;
+import com.bigdata.rdf.internal.IExtension;
+import com.bigdata.rdf.internal.IV;
+import com.bigdata.rdf.internal.impl.literal.AbstractLiteralIV;
+import com.bigdata.rdf.internal.impl.literal.LiteralExtensionIV;
+import com.bigdata.rdf.internal.impl.literal.XSDNumericIV;
+import com.bigdata.rdf.model.BigdataURI;
+import com.bigdata.rdf.model.BigdataValue;
+import com.bigdata.rdf.model.BigdataValueFactory;
+import com.bigdata.util.InnerCause;
+
+/**
+ * This implementation of {@link IExtension} implements inlining for literals
+ * that represent xsd:dateTime literals. Unlike
+ * {@link com.bigdata.rdf.internal.impl.extensions.DateTimeExtension} on which
+ * this is based, it stores the literals as time in <strong>seconds</strong>
+ * since the epoch. The seconds are encoded as an inline long. Also unlike
+ * DateTimeExtension it only supports UTC as the default time zone because UTC
+ * is king. This is needed because Wikidata contains dates that who's
+ * <strong>milliseconds</strong> since epoch don't fit into a long.
+ */
+public class WikibaseDateExtension<V extends BigdataValue> implements 
IExtension<V> {
+    private static final Logger log = 
Logger.getLogger(WikibaseDateExtension.class);
+
+    private static final List<URI> SUPPORTED_DATA_TYPES = 
Collections.unmodifiableList(Arrays.asList(
+            XMLSchema.DATETIME, XMLSchema.DATE));
+
+    @SuppressWarnings("rawtypes")
+    private final Map<IV, BigdataURI> dataTypes;
+    private final Set<BigdataURI> dataTypesSet;
+
+    public WikibaseDateExtension(final IDatatypeURIResolver resolver) {
+        @SuppressWarnings("rawtypes")
+        Map<IV, BigdataURI> dataTypes = new HashMap<>();
+        for (URI uri : SUPPORTED_DATA_TYPES) {
+            BigdataURI val = resolver.resolve(uri);
+            dataTypes.put(val.getIV(), val);
+        }
+        this.dataTypes = Collections.unmodifiableMap(dataTypes);
+        dataTypesSet = Collections.unmodifiableSet(new 
HashSet<>(this.dataTypes.values()));
+    }
+
+    @Override
+    public Set<BigdataURI> getDatatypes() {
+        return dataTypesSet;
+    }
+
+    /**
+     * Attempts to convert the supplied value into an epoch representation and
+     * encodes the long in a delegate {@link XSDNumericIV}, and returns an
+     * {@link LiteralExtensionIV} to wrap the native type.
+     */
+    @Override
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public LiteralExtensionIV createIV(Value value) {
+        if (!(value instanceof Literal)) {
+            throw new IllegalArgumentException("Expected a literal but got " + 
value);
+        }
+        try {
+            Literal literal = (Literal) value;
+            BigdataURI dataType = resolveDataType(literal);
+            WikibaseDate date = 
WikibaseDate.fromString(value.stringValue()).cleanWeirdStuff();
+            AbstractLiteralIV delegate = new 
XSDNumericIV(date.secondsSinceEpoch());
+            return new LiteralExtensionIV(delegate, dataType.getIV());
+        } catch (Exception e) {
+            /*
+             * Exception logging in blazegraph isn't great for this so we log
+             * here as well
+             */
+            log.warn("Couldn't create IV", e);
+            throw e;
+        }
+    }
+
+    private BigdataURI resolveDataType(Literal literal) {
+        URI dt = literal.getDatatype();
+        if (dt == null) {
+            throw new IllegalArgumentException("Literal doesn't have a data 
type:  " + literal);
+        }
+
+        // TODO why loop instead of use a hash set or something?
+        for (BigdataURI val : dataTypes.values()) {
+            // Note: URI.stringValue() is efficient....
+            if (val.stringValue().equals(dt.stringValue())) {
+                return val;
+            }
+        }
+        throw new IllegalArgumentException("Unrecognized data type:  " + dt);
+    }
+
+    /**
+     * Use the long value of the {@link XSDNumericIV} delegate which represents
+     * seconds since the epoch to create a WikibaseDate and then represent that
+     * properly using xsd's string representations.
+     */
+    @Override
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public V asValue(final LiteralExtensionIV iv, final BigdataValueFactory 
vf) {
+        if (!dataTypes.containsKey(iv.getExtensionIV())) {
+            throw new IllegalArgumentException("Unrecognized datatype:  " + 
iv.getExtensionIV());
+        }
+
+        WikibaseDate date = 
WikibaseDate.fromSecondsSinceEpoch(iv.getDelegate().longValue());
+        try {
+            BigdataURI dt = dataTypes.get(iv.getExtensionIV());
+
+            if (dt.equals(XMLSchema.DATE)) {
+                return (V) 
vf.createLiteral(date.toString(ToStringFormat.DATE), dt);
+            }
+
+            return (V) 
vf.createLiteral(date.toString(ToStringFormat.DATE_TIME), dt);
+        } catch (RuntimeException ex) {
+            if (InnerCause.isInnerCause(ex, InterruptedException.class)) {
+                throw ex;
+            }
+            throw new IllegalArgumentException("bad iv: " + iv, ex);
+        }
+    }
+}
diff --git 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseExtensionFactory.java
 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseExtensionFactory.java
new file mode 100644
index 0000000..1b72abc
--- /dev/null
+++ 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseExtensionFactory.java
@@ -0,0 +1,29 @@
+package org.wikidata.query.rdf.blazegraph;
+
+import java.util.Collection;
+import java.util.Iterator;
+
+import com.bigdata.rdf.internal.DefaultExtensionFactory;
+import com.bigdata.rdf.internal.IDatatypeURIResolver;
+import com.bigdata.rdf.internal.IExtension;
+import com.bigdata.rdf.internal.ILexiconConfiguration;
+import com.bigdata.rdf.internal.impl.extensions.DateTimeExtension;
+import com.bigdata.rdf.model.BigdataLiteral;
+import com.bigdata.rdf.model.BigdataValue;
+
+public class WikibaseExtensionFactory extends DefaultExtensionFactory {
+    @Override
+    @SuppressWarnings("rawtypes")
+    protected void _init(IDatatypeURIResolver resolver, 
ILexiconConfiguration<BigdataValue> config,
+            Collection<IExtension> extensions) {
+        if (config.isInlineDateTimes()) {
+            Iterator<IExtension> extensionsItr = extensions.iterator();
+            while (extensionsItr.hasNext()) {
+                if (extensionsItr.next() instanceof DateTimeExtension) {
+                    extensionsItr.remove();
+                }
+            }
+            extensions.add(new 
WikibaseDateExtension<BigdataLiteral>(resolver));
+        }
+    }
+}
diff --git a/common/pom.xml b/common/pom.xml
index 1406621..23959bb 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -18,6 +18,15 @@
     </license>
   </licenses>
 
+  <dependencies>
+    <dependency>
+      <groupId>joda-time</groupId>
+      <artifactId>joda-time</artifactId>
+      <version>2.7</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
   <build>
     <plugins>
       <plugin>
diff --git 
a/common/src/main/java/org/wikidata/query/rdf/common/WikibaseDate.java 
b/common/src/main/java/org/wikidata/query/rdf/common/WikibaseDate.java
new file mode 100644
index 0000000..505194d
--- /dev/null
+++ b/common/src/main/java/org/wikidata/query/rdf/common/WikibaseDate.java
@@ -0,0 +1,375 @@
+package org.wikidata.query.rdf.common;
+
+import static java.lang.Integer.parseInt;
+import static java.lang.Long.parseLong;
+import static java.util.concurrent.TimeUnit.DAYS;
+import static java.util.concurrent.TimeUnit.HOURS;
+import static java.util.concurrent.TimeUnit.MINUTES;
+
+import java.util.Arrays;
+import java.util.Locale;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Handles wikidata dates. Note that this ignores leap seconds. This isn't ok
+ * but its what joda time does so it where we're starting.
+ */
+public class WikibaseDate {
+    private static final Pattern FORMAT_PATTERN = Pattern
+            
.compile("(?<year>[+-]?0+)-(?<month>0?0)-(?<day>0?0)(?:T(?<hour>0?0):(?<minute>0?0)(?::(?<second>0?0))?)?Z?"
+                    .replace("0", "\\d"));
+
+    /**
+     * Build a WikibaseDate from the string representation. Supported:
+     * <ul>
+     * <li>+YYYYYYYYYYYY-MM-DDThh:mm:ssZ (Wikidata's default)
+     * <li>YYYY-MM-DDThh:mm:ssZ (xsd:dateTime)
+     * <li>YYYY-MM-DD (xsd:date with time assumed to be 00:00:00)
+     * <li>
+     * </ul>
+     */
+    public static WikibaseDate fromString(String string) {
+        // TODO timezones
+        Matcher m = FORMAT_PATTERN.matcher(string);
+        if (!m.matches()) {
+            throw new IllegalArgumentException("Invalid date format:  " + 
string);
+        }
+        long year = parseLong(m.group("year"));
+        // TODO two digit years without leading zeros might mean something 
other
+        // than the year 20.
+        int month = parseInt(m.group("month"));
+        int day = parseInt(m.group("day"));
+        int hour = parseOr0(m, "hour");
+        int minute = parseOr0(m, "minute");
+        int second = parseOr0(m, "second");
+        return new WikibaseDate(year, month, day, hour, minute, second);
+    }
+
+    private static int parseOr0(Matcher m, String group) {
+        String matched = m.group(group);
+        if (matched == null) {
+            return 0;
+        }
+        return parseInt(matched);
+    }
+
+    /**
+     * Build a WikibaseDAte from seconds since epoch.
+     */
+    public static WikibaseDate fromSecondsSinceEpoch(long secondsSinceEpoch) {
+        long year = yearFromSecondsSinceEpoch(secondsSinceEpoch);
+        int second = (int) (secondsSinceEpoch - calculateFirstDayOfYear(year) 
* SECONDS_PER_DAY);
+        int month = 1;
+        long[] secondsPerMonthCumulative = secondsPerMonthCumulative(year);
+        while (month < 12 && second >= secondsPerMonthCumulative[month]) {
+            month++;
+        }
+        second -= secondsPerMonthCumulative[month - 1];
+        int day = second / SECONDS_PER_DAY + 1;
+        second %= SECONDS_PER_DAY;
+        int hour = second / SECONDS_PER_HOUR;
+        second %= SECONDS_PER_HOUR;
+        int minute = second / SECONDS_PER_MINUTE;
+        second %= SECONDS_PER_MINUTE;
+        return new WikibaseDate(year, month, day, hour, minute, second);
+    }
+
+    private static final int DAYS_0000_TO_1970 = 719527;
+    private static final int SECONDS_PER_MINUTE = (int) MINUTES.toSeconds(1);
+    private static final int SECONDS_PER_HOUR = (int) HOURS.toSeconds(1);
+    private static final int SECONDS_PER_DAY = (int) DAYS.toSeconds(1);
+    private static final long AVERAGE_SECONDS_PER_YEAR = (SECONDS_PER_DAY * 
365 * 3 + SECONDS_PER_DAY * 366) / 4;
+    private static final long SECONDS_AT_EPOCH = 1970 * 
AVERAGE_SECONDS_PER_YEAR;
+    /**
+     * Days per month in non-leap-years.
+     */
+    static final int[] DAYS_PER_MONTH = new int[] { 31, 28, 31, 30, 31, 30, 
31, 31, 30, 31, 30, 31 };
+    private static final long[] SECONDS_PER_MONTH = new long[12];
+    private static final long[] SECONDS_PER_MONTH_CUMULATIVE = new long[12];
+    private static final long[] SECONDS_PER_MONTH_CUMULATIVE_LEAP_YEAR;
+    static {
+        long total = 0;
+        for (int i = 0; i < DAYS_PER_MONTH.length; i++) {
+            SECONDS_PER_MONTH[i] = DAYS.toSeconds(DAYS_PER_MONTH[i]);
+            SECONDS_PER_MONTH_CUMULATIVE[i] = total;
+            total += SECONDS_PER_MONTH[i];
+        }
+        SECONDS_PER_MONTH_CUMULATIVE_LEAP_YEAR = 
Arrays.copyOf(SECONDS_PER_MONTH_CUMULATIVE,
+                SECONDS_PER_MONTH_CUMULATIVE.length);
+        for (int i = 2; i < SECONDS_PER_MONTH_CUMULATIVE_LEAP_YEAR.length; 
i++) {
+            SECONDS_PER_MONTH_CUMULATIVE_LEAP_YEAR[i] += SECONDS_PER_DAY;
+        }
+    }
+
+    // TODO it'll be faster to keep it in seconds since epoch form
+    private final long year;
+    private final int month;
+    private final int day;
+    private final int hour;
+    private final int minute;
+    private final int second;
+
+    public WikibaseDate(long year, int month, int day, int hour, int minute, 
int second) {
+        this.year = year;
+        this.month = month;
+        this.day = day;
+        this.hour = hour;
+        this.minute = minute;
+        this.second = second;
+    }
+
+    /**
+     * Wikidata contains some odd dates like -13798000000-00-00T00:00:00Z and
+     * February 30th. We simply guess what they mean here.
+     *
+     * @return his if the date is fine, a new date if we modified it
+     */
+    public WikibaseDate cleanWeirdStuff() {
+        long newYear = year;
+        int newMonth = month;
+        int newDay = day;
+        int newHour = hour;
+        int newMinute = minute;
+        int newSecond = second;
+        if (month == 0) {
+            newMonth = 1;
+        }
+        if (day == 0) {
+            newDay = 1;
+        } else {
+            int maxDaysInMonth = DAYS_PER_MONTH[newMonth - 1];
+            if (isLeapYear(newYear) && newMonth == 2) {
+                maxDaysInMonth++;
+            }
+            if (newDay > maxDaysInMonth) {
+                newMonth++;
+                newDay = newDay - maxDaysInMonth + 1;
+                if (newMonth > 12) {
+                    newMonth = newMonth - 12;
+                    newYear++;
+                }
+            }
+        }
+        if (newYear == year && newMonth == month && newDay == day && newHour 
== hour && newMinute == minute
+                && newSecond == second) {
+            return this;
+        }
+        return new WikibaseDate(newYear, newMonth, newDay, newHour, newMinute, 
newSecond);
+    }
+
+    public long secondsSinceEpoch() {
+        long seconds = calculateFirstDayOfYear(year) * SECONDS_PER_DAY;
+        seconds += SECONDS_PER_MONTH_CUMULATIVE[month - 1];
+        seconds += (day - 1) * SECONDS_PER_DAY;
+        seconds += hour * SECONDS_PER_HOUR;
+        seconds += minute * SECONDS_PER_MINUTE;
+        seconds += second;
+        if (month > 2 && isLeapYear(year)) {
+            seconds += SECONDS_PER_DAY;
+        }
+        return seconds;
+    }
+
+    /**
+     * Build a WikibaseDate from the string representation. See ToStringFormat
+     * for more.
+     */
+    public String toString(ToStringFormat format) {
+        return format.format(this);
+    }
+
+    @Override
+    public String toString() {
+        return toString(ToStringFormat.WIKIDATA);
+    }
+
+    /**
+     * Year component of the date.
+     */
+    public long year() {
+        return year;
+    }
+
+    /**
+     * Month component of the date.
+     */
+    public int month() {
+        return month;
+    }
+
+    /**
+     * Day component of the date.
+     */
+    public int day() {
+        return day;
+    }
+
+    /**
+     * Hour component of the date.
+     */
+    public int hour() {
+        return hour;
+    }
+
+    /**
+     * Minute component of the date.
+     */
+    public int minute() {
+        return minute;
+    }
+
+    /**
+     * Second component of the date.
+     */
+    public int second() {
+        return second;
+    }
+
+    @Override
+    public int hashCode() {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + day;
+        result = prime * result + hour;
+        result = prime * result + minute;
+        result = prime * result + month;
+        result = prime * result + second;
+        result = prime * result + (int) (year ^ (year >>> 32));
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        WikibaseDate other = (WikibaseDate) obj;
+        if (day != other.day) {
+            return false;
+        }
+        if (hour != other.hour) {
+            return false;
+        }
+        if (minute != other.minute) {
+            return false;
+        }
+        if (month != other.month) {
+            return false;
+        }
+        if (second != other.second) {
+            return false;
+        }
+        if (year != other.year) {
+            return false;
+        }
+        return true;
+    }
+
+    /**
+     * Format for toString.
+     */
+    public static enum ToStringFormat {
+        /**
+         * Wikidata style (+YYYYYYYYYYYY-MM-DDThh:mm:ssZ).
+         */
+        WIKIDATA {
+            @Override
+            public String format(WikibaseDate date) {
+                return String.format(Locale.ROOT, 
"%+012d-%02d-%02dT%02d:%02d:%02dZ", date.year, date.month, date.day,
+                        date.hour, date.minute, date.second);
+            }
+        },
+        /**
+         * xsd:dateTime style (YYYY-MM-DDThh:mm:ssZ).
+         */
+        DATE_TIME {
+            @Override
+            public String format(WikibaseDate date) {
+                return String.format(Locale.ROOT, 
"%04d-%02d-%02dT%02d:%02d:%02dZ", date.year, date.month, date.day,
+                        date.hour, date.minute, date.second);
+            }
+        },
+        /**
+         * xsd:date style (YYYY-MM-DD).
+         */
+        DATE {
+            @Override
+            public String format(WikibaseDate date) {
+                return String.format(Locale.ROOT, "%04d-%02d-%02d", date.year, 
date.month, date.day);
+            }
+        };
+
+        public abstract String format(WikibaseDate date);
+    }
+
+    static boolean isLeapYear(long year) {
+        // Borrowed from joda-time's GregorianChronology
+        return ((year & 3) == 0) && ((year % 100) != 0 || (year % 400) == 0);
+    }
+
+    static long calculateFirstDayOfYear(long year) {
+        /*
+         * This is a clever hack for getting the number of leap years that 
works
+         * properly for negative years borrowed from JodaTime's
+         * GregorianChronology.
+         */
+        long leapYears = year / 100;
+        if (year < 0) {
+            leapYears = ((year + 3) >> 2) - leapYears + ((leapYears + 3) >> 2) 
- 1;
+        } else {
+            leapYears = (year >> 2) - leapYears + (leapYears >> 2);
+            if (isLeapYear(year)) {
+                leapYears--;
+            }
+        }
+        return year * 365L + leapYears - DAYS_0000_TO_1970;
+    }
+
+    static long yearFromSecondsSinceEpoch(long secondsSinceEpoch) {
+        /*
+         * Similar to Joda-Time's way of getting year from date - estimate and
+         * then fix the estimate. Except our estimates can be really off.
+         */
+        long unitSeconds = AVERAGE_SECONDS_PER_YEAR / 2;
+        long i2 = secondsSinceEpoch / 2 + SECONDS_AT_EPOCH / 2;
+        if (i2 < 0) {
+            i2 = i2 - unitSeconds + 1;
+        }
+        long year = i2 / unitSeconds;
+        while (true) {
+            // Rerunning calculateFirstDayOfYear isn't going to be efficient
+            // here.
+            long yearStart = calculateFirstDayOfYear(year) * SECONDS_PER_DAY;
+            long diff = secondsSinceEpoch - yearStart;
+            if (diff < 0) {
+                year--;
+                continue;
+            }
+            if (diff >= SECONDS_PER_DAY * 365) {
+                yearStart += SECONDS_PER_DAY * 365;
+                if (isLeapYear(year)) {
+                    yearStart += SECONDS_PER_DAY;
+                }
+                if (yearStart <= secondsSinceEpoch) {
+                    year++;
+                    continue;
+                }
+            }
+            return year;
+        }
+    }
+
+    static long[] secondsPerMonthCumulative(long year) {
+        if (isLeapYear(year)) {
+            return SECONDS_PER_MONTH_CUMULATIVE_LEAP_YEAR;
+        }
+        return SECONDS_PER_MONTH_CUMULATIVE;
+    }
+}
diff --git 
a/common/src/test/java/org/wikidata/query/rdf/common/DummyUnitTest.java 
b/common/src/test/java/org/wikidata/query/rdf/common/DummyUnitTest.java
deleted file mode 100644
index 9ed73d9..0000000
--- a/common/src/test/java/org/wikidata/query/rdf/common/DummyUnitTest.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package org.wikidata.query.rdf.common;
-
-import static org.hamcrest.Matchers.lessThan;
-
-import org.junit.Test;
-import org.junit.runner.RunWith;
-
-import com.carrotsearch.randomizedtesting.RandomizedRunner;
-import com.carrotsearch.randomizedtesting.RandomizedTest;
-
-@RunWith(RandomizedRunner.class)
-public class DummyUnitTest extends RandomizedTest {
-    @Test
-    public void dummy() {
-        // TODO remove me when there are real tests here
-        assertThat(randomIntBetween(0, 10), lessThan(11));
-    }
-}
diff --git 
a/common/src/test/java/org/wikidata/query/rdf/common/WikibaseDateUnitTest.java 
b/common/src/test/java/org/wikidata/query/rdf/common/WikibaseDateUnitTest.java
new file mode 100644
index 0000000..2562568
--- /dev/null
+++ 
b/common/src/test/java/org/wikidata/query/rdf/common/WikibaseDateUnitTest.java
@@ -0,0 +1,174 @@
+package org.wikidata.query.rdf.common;
+
+import static org.wikidata.query.rdf.common.WikibaseDate.DAYS_PER_MONTH;
+import static org.wikidata.query.rdf.common.WikibaseDate.fromSecondsSinceEpoch;
+import static org.wikidata.query.rdf.common.WikibaseDate.fromString;
+import static org.wikidata.query.rdf.common.WikibaseDate.isLeapYear;
+import static org.wikidata.query.rdf.common.WikibaseDate.ToStringFormat.DATE;
+import static 
org.wikidata.query.rdf.common.WikibaseDate.ToStringFormat.DATE_TIME;
+import static 
org.wikidata.query.rdf.common.WikibaseDate.ToStringFormat.WIKIDATA;
+
+import org.joda.time.chrono.GregorianChronology;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.wikidata.query.rdf.common.WikibaseDate.ToStringFormat;
+
+import com.carrotsearch.randomizedtesting.RandomizedRunner;
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+import com.carrotsearch.randomizedtesting.annotations.Repeat;
+
+@RunWith(RandomizedRunner.class)
+public class WikibaseDateUnitTest extends RandomizedTest {
+    /**
+     * Round trips epoch and explicitly tests some output and input formats.
+     */
+    @Test
+    public void epoch() {
+        WikibaseDate wbDate = check(1970, 1, 1, 0, 0, 0);
+        assertEquals("+00000001970-01-01T00:00:00Z", 
wbDate.toString(ToStringFormat.WIKIDATA));
+        assertEquals("1970-01-01T00:00:00Z", 
wbDate.toString(ToStringFormat.DATE_TIME));
+        assertEquals("1970-01-01", wbDate.toString(ToStringFormat.DATE));
+        assertEquals(wbDate, fromString("1970-1-1"));
+        assertEquals(wbDate, fromString("1970-1-1T00:00"));
+        assertEquals(wbDate, fromString("1970-1-1T00:00:00"));
+        assertEquals(wbDate, fromString("1970-1-1T00:00:00Z"));
+    }
+
+    @Test
+    public void yearOne() {
+        check(1, 1, 1, 0, 0, 0);
+    }
+
+    @Test
+    public void yearMinusOne() {
+        check(-1, 1, 1, 0, 0, 0);
+    }
+
+    @Test
+    public void yearZero() {
+        check(0, 1, 1, 0, 0, 0);
+    }
+
+    @Test
+    public void whenIWroteThis() {
+        check(2015, 4, 1, 13, 53, 40);
+    }
+
+    @Test
+    public void onLeapYear() {
+        check(2000, 11, 1, 0, 0, 0);
+    }
+
+    @Test
+    public void negativeLeapYear() {
+        check(-4, 11, 1, 0, 0, 0);
+    }
+
+    @Test
+    public void onLeapYearBeforeLeapDay() {
+        check(2000, 2, 28, 13, 53, 40);
+    }
+
+    @Test
+    public void onLeapYearOnLeapDay() {
+        check(2000, 2, 29, 13, 53, 40);
+    }
+
+    @Test
+    public void onLeapYearAfterLeapDay() {
+        check(2000, 3, 1, 13, 53, 40);
+    }
+
+    @Test
+    public void offLeapYearBeforeLeapDay() {
+        check(2001, 2, 28, 13, 53, 40);
+    }
+
+    @Test
+    public void offLeapYearAfterLeapDay() {
+        check(2001, 3, 1, 13, 53, 40);
+    }
+
+    @Test
+    public void veryNegativeYear() {
+        check(-286893830, 1, 1, 0, 0, 0);
+    }
+
+    @Test
+    public void bigBang() {
+        WikibaseDate wbDate = 
fromString("-13798000000-00-00T00:00:00Z").cleanWeirdStuff();
+        assertEquals(wbDate, fromString("-13798000000-01-01T00:00:00Z"));
+        assertEquals(-435422885863219200L, wbDate.secondsSinceEpoch());
+        checkRoundTrip(wbDate);
+    }
+
+    @Test
+    @Repeat(iterations = 100)
+    public void randomDate() {
+        // Build a valid random date
+
+        // Joda doesn't work outside these years
+        int year = randomIntBetween(-292275054, 292278993);
+        int month = randomIntBetween(1, 12);
+        int day;
+        if (isLeapYear(year) && month == 2) {
+            day = randomIntBetween(1, 29);
+        } else {
+            day = randomIntBetween(1, DAYS_PER_MONTH[month - 1]);
+        }
+        int hour = randomIntBetween(0, 23);
+        int minute = randomIntBetween(0, 59);
+        int second = randomIntBetween(0, 59);
+        check(year, month, day, hour, minute, second);
+    }
+
+    /**
+     * Checks that the dates resolve the same way joda-time resolves dates and
+     * that they round trip.
+     */
+    private WikibaseDate check(int year, int month, int day, int hour, int 
minute, int second) {
+        WikibaseDate wbDate = new WikibaseDate(year, month, day, hour, minute, 
second);
+        assertEquals(wbDate.toString(), jodaSeconds(year, month, day, hour, 
minute, second), wbDate.secondsSinceEpoch());
+        checkRoundTrip(wbDate);
+        return wbDate;
+    }
+
+    /**
+     * Round trips the date through secondsSinceEpoch and all the toString and
+     * fromString formats.
+     */
+    private void checkRoundTrip(WikibaseDate wbDate) {
+        long seconds = wbDate.secondsSinceEpoch();
+        WikibaseDate roundDate = fromSecondsSinceEpoch(seconds);
+        assertEquals(wbDate, roundDate);
+        long roundSeconds = roundDate.secondsSinceEpoch();
+        assertEquals(seconds, roundSeconds);
+
+        String string = wbDate.toString(WIKIDATA);
+        roundDate = fromString(string);
+        assertEquals(wbDate, roundDate);
+        String roundString = roundDate.toString(WIKIDATA);
+        assertEquals(string, roundString);
+
+        string = wbDate.toString(DATE_TIME);
+        roundDate = fromString(string);
+        assertEquals(wbDate, roundDate);
+        roundString = roundDate.toString(DATE_TIME);
+        assertEquals(string, roundString);
+
+        string = wbDate.toString(DATE);
+        roundDate = fromString(string);
+        if (wbDate.hour() == 0 && wbDate.minute() == 0 && wbDate.second() == 
0) {
+            assertEquals(wbDate, roundDate);
+        }
+        roundString = roundDate.toString(DATE);
+        assertEquals(string, roundString);
+    }
+
+    /**
+     * Get the seconds since epoch for a time according to Joda-Time.
+     */
+    private long jodaSeconds(int year, int month, int day, int hour, int 
minute, int second) {
+        return GregorianChronology.getInstanceUTC().getDateTimeMillis(year, 
month, day, hour, minute, second, 0) / 1000;
+    }
+}
diff --git a/tools/pom.xml b/tools/pom.xml
index 5e30852..fc38c6a 100644
--- a/tools/pom.xml
+++ b/tools/pom.xml
@@ -9,7 +9,8 @@
   <artifactId>tools</artifactId>
   <packaging>jar</packaging>
 
-  <name>Tools to sync Wikibase to RDF stores</name>
+  <name>Wikidata Query RDF Tools</name>
+  <description>Tools to sync Wikibase to RDF stores.  Also contains overall 
integration tests that rely on everything else.</description>
   <licenses>
     <license>
       <name>The Apache Software License, Version 2.0</name>
@@ -100,7 +101,7 @@
         <version>2.10</version>
         <executions>
           <execution>
-            <id>unpack</id>
+            <id>unpack-blazegraph</id>
             <phase>pre-integration-test</phase>
             <goals>
               <goal>unpack</goal>
@@ -115,6 +116,28 @@
                 </artifactItem>
               </artifactItems>
               
<outputDirectory>${project.build.directory}/blazegraph</outputDirectory>
+            </configuration>
+          </execution>
+          <execution>
+            <id>drop-in-blazegraph-extension</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>org.wikidata.query.rdf</groupId>
+                  <artifactId>common</artifactId>
+                  <version>${project.parent.version}</version>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.wikidata.query.rdf</groupId>
+                  <artifactId>blazegraph</artifactId>
+                  <version>${project.parent.version}</version>
+                </artifactItem>
+              </artifactItems>
+              
<outputDirectory>${project.build.directory}/blazegraph/WEB-INF/lib</outputDirectory>
             </configuration>
           </execution>
         </executions>
@@ -171,7 +194,7 @@
         </configuration>
         <executions>
           <execution>
-            <id>start-blzegraph</id>
+            <id>start-blazegraph</id>
             <phase>pre-integration-test</phase>
             <!-- This complains some because this project _isn't_ a war. But 
we want to start the Blazegraph war. -->
             <goals>
@@ -226,7 +249,7 @@
             <artifactId>jetty-maven-plugin</artifactId>
             <executions>
               <execution>
-                <id>run-blzegraph</id>
+                <id>run-blazegraph</id>
                 <phase>generate-sources</phase>
                 <goals>
                   <goal>run</goal>
diff --git a/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java 
b/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
index 87400a9..4ac3dce 100644
--- a/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
+++ b/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
@@ -194,13 +194,6 @@
         return new RecentChangesPoller(wikibaseRepository, new 
Date(startTime));
     }
 
-    /**
-     * Polls updates.
-     */
-    public static interface Source {
-
-    }
-
     private final Meter updateMeter = new Meter();
     private final Meter batchAdvanced = new Meter();
 
diff --git 
a/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractRdfRepositoryIntegrationTestBase.java
 
b/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractRdfRepositoryIntegrationTestBase.java
new file mode 100644
index 0000000..2056a44
--- /dev/null
+++ 
b/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractRdfRepositoryIntegrationTestBase.java
@@ -0,0 +1,56 @@
+package org.wikidata.query.rdf.tool;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.junit.Before;
+import org.junit.runner.RunWith;
+import org.wikidata.query.rdf.common.uri.Entity;
+import org.wikidata.query.rdf.tool.rdf.RdfRepository;
+import org.wikidata.query.rdf.tool.rdf.UpdateBuilder;
+
+import com.carrotsearch.randomizedtesting.RandomizedRunner;
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+
+/**
+ * Superclass of integration tests that an RDF repository and clear it between
+ * test methods.
+ */
+@RunWith(RandomizedRunner.class)
+public abstract class AbstractRdfRepositoryIntegrationTestBase extends 
RandomizedTest {
+    protected final RdfRepositoryForTesting rdfRepository;
+
+    public AbstractRdfRepositoryIntegrationTestBase() {
+        try {
+            rdfRepository = new RdfRepositoryForTesting(new 
URI("http://localhost:9999/bigdata/namespace/kb/sparql";),
+                    Entity.WIKIDATA);
+        } catch (URISyntaxException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Before
+    public void clear() {
+        rdfRepository.clear();
+    }
+
+    /**
+     * RdfRepository extension used for testing. We don't want to anyone to
+     * accidentally use clear() so we don't put it in the repository.
+     */
+    public static class RdfRepositoryForTesting extends RdfRepository {
+        public RdfRepositoryForTesting(URI uri, Entity entityUris) {
+            super(uri, entityUris);
+        }
+
+        /**
+         * Clear's the whole repository.
+         */
+        public void clear() {
+            UpdateBuilder b = new UpdateBuilder();
+            b.where("?s", "?p", "?o");
+            b.delete("?s", "?p", "?o");
+            execute("update", RdfRepository.IGNORE_RESPONSE, b.toString());
+        }
+    }
+}
diff --git 
a/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdateIntegrationTestBase.java
 
b/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdateIntegrationTestBase.java
new file mode 100644
index 0000000..010a9b0
--- /dev/null
+++ 
b/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdateIntegrationTestBase.java
@@ -0,0 +1,39 @@
+package org.wikidata.query.rdf.tool;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import org.wikidata.query.rdf.common.uri.Entity;
+import org.wikidata.query.rdf.common.uri.EntityData;
+import org.wikidata.query.rdf.tool.change.Change;
+import org.wikidata.query.rdf.tool.change.IdChangeSource;
+import org.wikidata.query.rdf.tool.rdf.Munger;
+import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository;
+
+/**
+ * Superclass for tests that need to run a full update.
+ */
+public class AbstractUpdateIntegrationTestBase extends 
AbstractRdfRepositoryIntegrationTestBase {
+    private WikibaseRepository wikibaseRepository = new 
WikibaseRepository("https", "www.wikidata.org");
+    private final Munger munger = new Munger(EntityData.WIKIDATA, 
Entity.WIKIDATA).removeSiteLinks();
+    /**
+     * Update all ids from from to to.
+     */
+    public void update(int from, int to) {
+        Change.Source<?> source = IdChangeSource.forItems(from, to, 30);
+        ExecutorService executorService = new ThreadPoolExecutor(0, 10, 0, 
TimeUnit.SECONDS,
+                new LinkedBlockingQueue<Runnable>());
+        Update<?> update = new Update<>(source, wikibaseRepository, 
rdfRepository, munger, executorService);
+        update.run();
+        executorService.shutdown();
+    }
+
+    /**
+     * Update the specified id.
+     */
+    public void update(int id) {
+        update(id, id);
+    }
+}
diff --git 
a/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
 
b/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
new file mode 100644
index 0000000..e760e20
--- /dev/null
+++ 
b/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
@@ -0,0 +1,58 @@
+package org.wikidata.query.rdf.tool;
+
+import static org.wikidata.query.rdf.tool.Matchers.binds;
+import static org.wikidata.query.rdf.tool.StatementHelper.statement;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.junit.Test;
+import org.openrdf.model.Statement;
+import org.openrdf.model.impl.LiteralImpl;
+import org.openrdf.model.vocabulary.XMLSchema;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.TupleQueryResult;
+
+/**
+ * Validates the WikibaseDateExtension over the Blazegraph API.
+ */
+public class WikibaseDateExtensionIntegrationTest extends 
AbstractUpdateIntegrationTestBase {
+    /**
+     * Loads Q1 (universe) and validates that it can find it by searching for
+     * things before some date very far in the past. Without our date extension
+     * the start time doesn't properly parse in Blazegraph and doesn't allow
+     * less than operations.
+     */
+    @Test
+    public void bigBang() throws QueryEvaluationException {
+        update(1, 1);
+        StringBuilder query = new StringBuilder();
+        query.append("PREFIX assert: 
<http://www.wikidata.org/entity/assert/>\n");
+        query.append("SELECT * WHERE {\n");
+        query.append("?s assert:P580 ?startTime .\n");
+        query.append("FILTER (?startTime < \"-04540000000-01-01");
+        if (randomBoolean()) {
+            query.append("T00:00:00Z");
+        }
+        query.append("\"^^xsd:dateTime)\n");
+        query.append("}");
+        TupleQueryResult results = rdfRepository.query(query.toString());
+        assertTrue(results.hasNext());
+        BindingSet result = results.next();
+        assertThat(result, binds("s", "Q1"));
+        assertThat(result, binds("startTime", new 
LiteralImpl("-13798000000-01-01T00:00:00Z", XMLSchema.DATETIME)));
+    }
+
+    @Test
+    public void date() throws QueryEvaluationException {
+        List<Statement> statements = new ArrayList<>();
+        statements.add(statement("Q23", "P569", new LiteralImpl("1732-02-22", 
XMLSchema.DATE)));
+        rdfRepository.sync("Q23", statements);
+        TupleQueryResult results = rdfRepository.query("SELECT * WHERE {?s ?p 
?o}");
+        BindingSet result = results.next();
+        assertThat(result, binds("s", "Q23"));
+        assertThat(result, binds("p", "P569"));
+        assertThat(result, binds("o", new LiteralImpl("1732-02-22", 
XMLSchema.DATE)));
+    }
+}
diff --git 
a/tools/src/test/java/org/wikidata/query/rdf/tool/rdf/RdfRepositoryIntegrationTest.java
 
b/tools/src/test/java/org/wikidata/query/rdf/tool/rdf/RdfRepositoryIntegrationTest.java
index 5a39928..250feab 100644
--- 
a/tools/src/test/java/org/wikidata/query/rdf/tool/rdf/RdfRepositoryIntegrationTest.java
+++ 
b/tools/src/test/java/org/wikidata/query/rdf/tool/rdf/RdfRepositoryIntegrationTest.java
@@ -1,55 +1,37 @@
 package org.wikidata.query.rdf.tool.rdf;
 
 import static org.hamcrest.Matchers.allOf;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
 import static org.wikidata.query.rdf.tool.Matchers.binds;
 import static org.wikidata.query.rdf.tool.StatementHelper.siteLink;
 import static org.wikidata.query.rdf.tool.StatementHelper.statement;
 
 import java.math.BigInteger;
-import java.net.URI;
-import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
 
-import org.junit.Before;
 import org.junit.Test;
 import org.openrdf.model.Statement;
 import org.openrdf.model.impl.IntegerLiteralImpl;
 import org.openrdf.model.impl.LiteralImpl;
 import org.openrdf.query.QueryEvaluationException;
 import org.openrdf.query.TupleQueryResult;
-import org.wikidata.query.rdf.common.uri.Entity;
 import org.wikidata.query.rdf.common.uri.RDF;
 import org.wikidata.query.rdf.common.uri.RDFS;
 import org.wikidata.query.rdf.common.uri.SchemaDotOrg;
+import org.wikidata.query.rdf.tool.AbstractRdfRepositoryIntegrationTestBase;
 
 import com.google.common.collect.ImmutableList;
 
 /**
  * Tests RdfRepository against a live RDF repository.
  */
-public class RdfRepositoryIntegrationTest {
-    private final RdfRepositoryForTesting repository;
-
-    public RdfRepositoryIntegrationTest() throws URISyntaxException {
-        repository = new RdfRepositoryForTesting(new 
URI("http://localhost:9999/bigdata/namespace/kb/sparql";),
-                Entity.WIKIDATA);
-    }
-
-    @Before
-    public void clear() {
-        repository.clear();
-    }
-
+public class RdfRepositoryIntegrationTest extends 
AbstractRdfRepositoryIntegrationTestBase {
     @Test
     public void newSiteLink() throws QueryEvaluationException {
-        repository.sync("Q23", siteLink("Q23", 
"http://en.wikipedia.org/wiki/George_Washington";, "en"));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s 
<http://schema.org/about> ?o}");
+        rdfRepository.sync("Q23", siteLink("Q23", 
"http://en.wikipedia.org/wiki/George_Washington";, "en"));
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s 
<http://schema.org/about> ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", "http://en.wikipedia.org/wiki/George_Washington";),//
@@ -60,8 +42,8 @@
     @Test
     public void moveSiteLink() throws QueryEvaluationException {
         newSiteLink();
-        repository.sync("Q23", siteLink("Q23", 
"http://en.wikipedia.org/wiki/George_Washingmoved";, "en"));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s 
<http://schema.org/about> ?o}");
+        rdfRepository.sync("Q23", siteLink("Q23", 
"http://en.wikipedia.org/wiki/George_Washingmoved";, "en"));
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s 
<http://schema.org/about> ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", 
"http://en.wikipedia.org/wiki/George_Washingmoved";),//
@@ -71,9 +53,9 @@
 
     @Test
     public void newLabel() throws QueryEvaluationException {
-        repository.sync("Q23", ImmutableList.of(//
+        rdfRepository.sync("Q23", ImmutableList.of(//
                 statement("Q23", RDFS.LABEL, new LiteralImpl("George 
Washington", "en"))));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s ?p ?o}");
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s ?p ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", "Q23"),//
@@ -85,9 +67,9 @@
     @Test
     public void changedLabel() throws QueryEvaluationException {
         newLabel();
-        repository.sync("Q23", ImmutableList.of(//
+        rdfRepository.sync("Q23", ImmutableList.of(//
                 statement("Q23", RDFS.LABEL, new LiteralImpl("George 
Washingmoved", "en"))));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s ?p ?o}");
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s ?p ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", "Q23"),//
@@ -98,9 +80,9 @@
 
     @Test
     public void newLabelWithQuotes() throws QueryEvaluationException {
-        repository.sync("Q23", ImmutableList.of(//
+        rdfRepository.sync("Q23", ImmutableList.of(//
                 statement("Q23", RDFS.LABEL, new LiteralImpl("George \"Cherry 
Tree\" Washington", "en"))));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s ?p ?o}");
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s ?p ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", "Q23"),//
@@ -111,9 +93,9 @@
 
     @Test
     public void statementWithBackslash() throws QueryEvaluationException {
-        repository.sync("Q42", ImmutableList.of(//
+        rdfRepository.sync("Q42", ImmutableList.of(//
                 statement("Q42", "P396", new 
LiteralImpl("IT\\ICCU\\RAVV\\034417"))));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s ?p ?o}");
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s ?p ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", "Q42"),//
@@ -125,10 +107,10 @@
     @Test
     public void newLabelLanguage() throws QueryEvaluationException {
         newLabel();
-        repository.sync("Q23", ImmutableList.of(//
+        rdfRepository.sync("Q23", ImmutableList.of(//
                 statement("Q23", RDFS.LABEL, new LiteralImpl("George 
Washington", "en")),//
                 statement("Q23", RDFS.LABEL, new LiteralImpl("George 
Washington", "de"))));
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s ?p ?o} ORDER 
BY ?o");
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s ?p ?o} 
ORDER BY ?o");
         assertTrue(r.hasNext());
         assertThat(r.next(), allOf(//
                 binds("s", "Q23"),//
@@ -144,25 +126,25 @@
 
     @Test
     public void hasRevisionFalseIfNotPresent() {
-        assertFalse(repository.hasRevision("Q23", 10));
+        assertFalse(rdfRepository.hasRevision("Q23", 10));
     }
 
     @Test
     public void hasRevisionFalseIfTooEarly() {
         syncJustVersion("Q23", 1);
-        assertFalse(repository.hasRevision("Q23", 10));
+        assertFalse(rdfRepository.hasRevision("Q23", 10));
     }
 
     @Test
     public void hasRevisionTrueIfMatch() {
         syncJustVersion("Q23", 10);
-        assertTrue(repository.hasRevision("Q23", 10));
+        assertTrue(rdfRepository.hasRevision("Q23", 10));
     }
 
     @Test
     public void hasRevisionTrueIfAfter() {
         syncJustVersion("Q23", 10);
-        assertTrue(repository.hasRevision("Q23", 9));
+        assertTrue(rdfRepository.hasRevision("Q23", 9));
     }
 
     /**
@@ -177,9 +159,9 @@
             statements.add(statement(link, SchemaDotOrg.IN_LANGUAGE, new 
LiteralImpl(Integer.toString(i))));
             statements.add(statement(link, SchemaDotOrg.ABOUT, "Q80"));
         }
-        repository.sync("Q80", statements);
-        repository.sync("Q80", statements);
-        TupleQueryResult r = repository
+        rdfRepository.sync("Q80", statements);
+        rdfRepository.sync("Q80", statements);
+        TupleQueryResult r = rdfRepository
                 .query("PREFIX entity: 
<http://www.wikidata.org/entity/>\nSELECT (COUNT(?s) as ?sc) WHERE {?s ?p 
entity:Q80}");
         assertTrue(r.hasNext());
         assertThat(r.next(), binds("sc", new 
IntegerLiteralImpl(BigInteger.valueOf(800))));
@@ -195,9 +177,9 @@
         for (int i = 0; i < 1000; i++) {
             statements.add(statement("Q80", "P" + i, new 
IntegerLiteralImpl(BigInteger.valueOf(i))));
         }
-        repository.sync("Q80", statements);
-        repository.sync("Q80", statements);
-        TupleQueryResult r = repository
+        rdfRepository.sync("Q80", statements);
+        rdfRepository.sync("Q80", statements);
+        TupleQueryResult r = rdfRepository
                 .query("PREFIX entity: 
<http://www.wikidata.org/entity/>\nSELECT (COUNT(?p) as ?sc) WHERE {entity:Q80 
?p ?o}");
         assertTrue(r.hasNext());
         assertThat(r.next(), binds("sc", new 
IntegerLiteralImpl(BigInteger.valueOf(1000))));
@@ -207,34 +189,14 @@
     @Test
     public void delete() throws QueryEvaluationException {
         newSiteLink();
-        repository.sync("Q23", Collections.<Statement> emptyList());
-        TupleQueryResult r = repository.query("SELECT * WHERE {?s ?p ?o}");
+        rdfRepository.sync("Q23", Collections.<Statement> emptyList());
+        TupleQueryResult r = rdfRepository.query("SELECT * WHERE {?s ?p ?o}");
         assertFalse(r.hasNext());
     }
 
     private void syncJustVersion(String entityId, int version) {
         Statement statement = statement(entityId, SchemaDotOrg.VERSION,
                 new IntegerLiteralImpl(new 
BigInteger(Integer.toString(version))));
-        repository.sync(entityId, ImmutableList.of(statement));
-    }
-
-    /**
-     * RdfRepository extension used for testing. We don't want to anyone to
-     * accidentally use clear() so we don't put it in the repository.
-     */
-    public static class RdfRepositoryForTesting extends RdfRepository {
-        public RdfRepositoryForTesting(URI uri, Entity entityUris) {
-            super(uri, entityUris);
-        }
-
-        /**
-         * Clear's the whole repository.
-         */
-        public void clear() {
-            UpdateBuilder b = new UpdateBuilder();
-            b.where("?s", "?p", "?o");
-            b.delete("?s", "?p", "?o");
-            execute("update", RdfRepository.IGNORE_RESPONSE, b.toString());
-        }
+        rdfRepository.sync(entityId, ImmutableList.of(statement));
     }
 }
diff --git a/tools/src/test/resources/blazegraph/RWStore.properties 
b/tools/src/test/resources/blazegraph/RWStore.properties
index 0bf8332..3a59411 100644
--- a/tools/src/test/resources/blazegraph/RWStore.properties
+++ b/tools/src/test/resources/blazegraph/RWStore.properties
@@ -1,3 +1,7 @@
+# NOTE NOTE NOTE NOTE NOTE
+# When you change this file you must remove target/blazegraph/blazegraph.jnl 
and restart Blazegraph.  Or clean
+# NOTE NOTE NOTE NOTE NOTE
+
 # Dump data in target.
 com.bigdata.journal.AbstractJournal.file=target/blazegraph/blazegraph.jnl
 com.bigdata.journal.AbstractJournal.bufferMode=DiskRW
@@ -20,6 +24,7 @@
 
 # Use the default vocabulary for now.
 
com.bigdata.rdf.store.AbstractTripleStore.vocabularyClass=com.bigdata.rdf.vocab.DefaultBigdataVocabulary
+com.bigdata.rdf.store.AbstractTripleStore.extensionFactoryClass=org.wikidata.query.rdf.blazegraph.WikibaseExtensionFactory
 
 # These seem to be ubiquitous overwrites.  Not sure why they aren't the 
default but it works.
 com.bigdata.namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor=400

-- 
To view, visit https://gerrit.wikimedia.org/r/201242
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I083e4feefd752253c141de896e36c38a229639d8
Gerrit-PatchSet: 5
Gerrit-Project: wikidata/query/rdf
Gerrit-Branch: master
Gerrit-Owner: Manybubbles <never...@wikimedia.org>
Gerrit-Reviewer: Jdouglas <jdoug...@wikimedia.org>
Gerrit-Reviewer: Manybubbles <never...@wikimedia.org>
Gerrit-Reviewer: Smalyshev <smalys...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to