jenkins-bot has submitted this change and it was merged.

Change subject: Override date functions to process WikibaseDate
......................................................................


Override date functions to process WikibaseDate

Bug: T109803
Change-Id: I9435af7160b5af02cc8959789ab629d7a002329b
---
M 
blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseContextListener.java
A 
blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseDateBOp.java
A 
blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseNowBOp.java
M pom.xml
A testTools/src/config/web.xml
M tools/pom.xml
M 
tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
7 files changed, 558 insertions(+), 1 deletion(-)

Approvals:
  Smalyshev: Looks good to me, approved
  jenkins-bot: Verified



diff --git 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseContextListener.java
 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseContextListener.java
index 61a3eb9..d212716 100644
--- 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseContextListener.java
+++ 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/WikibaseContextListener.java
@@ -1,10 +1,25 @@
 package org.wikidata.query.rdf.blazegraph;
 
+import java.util.Map;
+
 import javax.servlet.ServletContextEvent;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.wikidata.query.rdf.blazegraph.constraints.WikibaseDateBOp;
+import org.wikidata.query.rdf.blazegraph.constraints.WikibaseNowBOp;
 import org.wikidata.query.rdf.blazegraph.label.LabelService;
 
+import com.bigdata.bop.BOpContextBase;
+import com.bigdata.bop.IValueExpression;
+import com.bigdata.rdf.internal.IV;
+import com.bigdata.rdf.internal.constraints.DateBOp.DateOp;
 import com.bigdata.rdf.sail.webapp.BigdataRDFServletContextListener;
+import com.bigdata.rdf.sparql.ast.FunctionRegistry;
+import com.bigdata.rdf.sparql.ast.GlobalAnnotations;
+import com.bigdata.rdf.sparql.ast.ValueExpressionNode;
+import com.bigdata.rdf.sparql.ast.FunctionRegistry.Factory;
+import com.bigdata.rdf.sparql.ast.eval.AST2BOpUtility;
 import com.bigdata.rdf.sparql.ast.eval.AbstractServiceFactoryBase;
 import com.bigdata.rdf.sparql.ast.service.IServiceOptions;
 import com.bigdata.rdf.sparql.ast.service.ServiceCall;
@@ -15,6 +30,9 @@
  * Context listener to enact configurations we need on initialization.
  */
 public class WikibaseContextListener extends BigdataRDFServletContextListener {
+
+    private static final transient Logger log = 
LoggerFactory.getLogger(WikibaseContextListener.class);
+
     /**
      * Replaces the default Blazegraph services with ones that do not allow
      * remote services and a label resolution service.
@@ -22,12 +40,68 @@
     public static void initializeServices() {
         ServiceRegistry.getInstance().setDefaultServiceFactory(new 
DisableRemotesServiceFactory());
         LabelService.register();
+
+        // Override date functions so that we can handle them
+        // via WikibaseDate
+        FunctionRegistry.remove(FunctionRegistry.YEAR);
+        FunctionRegistry.add(FunctionRegistry.YEAR, 
getWikibaseDateBOpFactory(DateOp.YEAR));
+
+        FunctionRegistry.remove(FunctionRegistry.MONTH);
+        FunctionRegistry.add(FunctionRegistry.MONTH, 
getWikibaseDateBOpFactory(DateOp.MONTH));
+
+        FunctionRegistry.remove(FunctionRegistry.DAY);
+        FunctionRegistry.add(FunctionRegistry.DAY, 
getWikibaseDateBOpFactory(DateOp.DAY));
+
+        FunctionRegistry.remove(FunctionRegistry.HOURS);
+        FunctionRegistry.add(FunctionRegistry.HOURS, 
getWikibaseDateBOpFactory(DateOp.HOURS));
+
+        FunctionRegistry.remove(FunctionRegistry.MINUTES);
+        FunctionRegistry.add(FunctionRegistry.MINUTES, 
getWikibaseDateBOpFactory(DateOp.MINUTES));
+
+        FunctionRegistry.remove(FunctionRegistry.SECONDS);
+        FunctionRegistry.add(FunctionRegistry.SECONDS, 
getWikibaseDateBOpFactory(DateOp.SECONDS));
+
+        FunctionRegistry.remove(FunctionRegistry.NOW);
+        FunctionRegistry.add(FunctionRegistry.NOW, new Factory() {
+            public IValueExpression<? extends IV> create(final BOpContextBase 
context, final GlobalAnnotations globals,
+                    Map<String, Object> scalarValues, final 
ValueExpressionNode... args) {
+
+                if (args != null && args.length > 0)
+                    throw new IllegalArgumentException("no args for NOW()");
+
+                return new WikibaseNowBOp(globals);
+            }
+        });
+
+
+        log.warn("Wikibase services initialized.");
     }
 
     @Override
     public void contextInitialized(final ServletContextEvent e) {
         super.contextInitialized(e);
         initializeServices();
+    }
+
+    /**
+     * Create factory for specific WikibaseDateOp operation.
+     * @param dateop
+     * @return Factory object to create WikibaseDateBOp
+     */
+    private static Factory getWikibaseDateBOpFactory(final DateOp dateop) {
+        return new Factory() {
+            public IValueExpression<? extends IV> create(final BOpContextBase 
context,
+                    final GlobalAnnotations globals, Map<String, Object> 
scalarValues, final ValueExpressionNode... args) {
+
+                FunctionRegistry.checkArgs(args,
+                        ValueExpressionNode.class);
+
+                final IValueExpression<? extends IV> left =
+                    AST2BOpUtility.toVE(context, globals, args[0]);
+
+                return new WikibaseDateBOp(left, dateop, globals);
+            }
+        };
     }
 
     /**
@@ -46,4 +120,5 @@
         }
 
     }
+
 }
diff --git 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseDateBOp.java
 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseDateBOp.java
new file mode 100644
index 0000000..7ac76d7
--- /dev/null
+++ 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseDateBOp.java
@@ -0,0 +1,179 @@
+package org.wikidata.query.rdf.blazegraph.constraints;
+
+import java.math.BigInteger;
+import java.util.Map;
+
+import org.wikidata.query.rdf.common.WikibaseDate;
+
+import com.bigdata.rdf.internal.impl.literal.LiteralExtensionIV;
+import com.bigdata.bop.BOp;
+import com.bigdata.bop.IBindingSet;
+import com.bigdata.bop.IValueExpression;
+import com.bigdata.bop.NV;
+import com.bigdata.rdf.error.SparqlTypeErrorException;
+import com.bigdata.rdf.internal.IV;
+import com.bigdata.rdf.internal.XSD;
+import com.bigdata.rdf.internal.constraints.DateBOp;
+import com.bigdata.rdf.internal.constraints.DateBOp.DateOp;
+import com.bigdata.rdf.internal.constraints.INeedsMaterialization;
+import com.bigdata.rdf.internal.constraints.IVValueExpression;
+import com.bigdata.rdf.internal.impl.literal.XSDIntegerIV;
+import com.bigdata.rdf.model.BigdataLiteral;
+import com.bigdata.rdf.sparql.ast.GlobalAnnotations;
+
+/**
+ * A date expression involving a left IValueExpression operand.
+ * The operation to be applied to the operands is specified by the {@link 
Annotations#OP}
+ * annotation.
+ * @see com.bigdata.rdf.internal.constraints.DateBOp
+ * We are not extending com.bigdata.rdf.internal.constraints since get() is 
final there.
+ */
+public class WikibaseDateBOp extends IVValueExpression<IV> implements 
INeedsMaterialization{
+
+    /**
+        *
+        */
+    private static final long serialVersionUID = 9136864442064392445L;
+
+    /**
+     * Backup DateBOp for dates that aren't ours.
+     */
+    private final DateBOp originalOp;
+
+    /**
+     *
+     * @param left  The left operand.
+     * @param op    The annotation specifying the operation to be performed on 
those operands.
+     */
+    public WikibaseDateBOp(final IValueExpression<? extends IV> left,
+            final DateOp op, final GlobalAnnotations globals) {
+
+        this(new BOp[] {left}, anns(globals, new NV(DateBOp.Annotations.OP, 
op)));
+
+    }
+
+    /**
+     * Required shallow copy constructor.
+     *
+     * @param args
+     *            The operands.
+     * @param op
+     *            The operation.
+     */
+    public WikibaseDateBOp(final BOp[] args, Map<String, Object> anns) {
+
+        super(args, anns);
+
+        if (args.length != 1 || args[0] == null || 
getProperty(DateBOp.Annotations.OP) == null) {
+
+            throw new IllegalArgumentException();
+
+        }
+        originalOp = new DateBOp(args, anns);
+    }
+
+    /**
+     * Constructor required for {@link 
com.bigdata.bop.BOpUtility#deepCopy(FilterNode)}.
+     *
+     * @param op
+     */
+    public WikibaseDateBOp(final WikibaseDateBOp op) {
+
+        super(op);
+        originalOp = new DateBOp(op.originalOp);
+    }
+
+    /**
+     * Get Wikibase date from IV.
+     * @param iv
+     * @return Wikibase date object
+     */
+    private WikibaseDate getWikibaseDate(IV iv) {
+
+        return 
WikibaseDate.fromSecondsSinceEpoch(((LiteralExtensionIV)iv).getDelegate().longValue());
+
+    }
+
+    /**
+     * Get expression value.
+     */
+    @SuppressWarnings("checkstyle:cyclomaticcomplexity")
+    public IV get(final IBindingSet bs) {
+
+        final IV left = left().get(bs);
+
+        // not yet bound?
+        if (left == null) {
+            throw new SparqlTypeErrorException.UnboundVarException();
+        }
+
+        if (!(left instanceof LiteralExtensionIV)) {
+            return originalOp.get(bs);
+        }
+
+        if (left.isLiteral()) {
+
+            BigdataLiteral bl = (BigdataLiteral) left.getValue();
+            if (XSD.DATETIME.equals(bl.getDatatype())) {
+                WikibaseDate date = getWikibaseDate(left);
+
+                switch (op()) {
+                case YEAR:
+                    return new XSDIntegerIV(BigInteger.valueOf(date.year()));
+                case MONTH:
+                    return new XSDIntegerIV(BigInteger.valueOf(date.month()));
+                case DAY:
+                    return new XSDIntegerIV(BigInteger.valueOf(date.day()));
+                case HOURS:
+                    return new XSDIntegerIV(BigInteger.valueOf(date.hour()));
+                case MINUTES:
+                    return new XSDIntegerIV(BigInteger.valueOf(date.minute()));
+                case SECONDS:
+                    return new XSDIntegerIV(BigInteger.valueOf(date.second()));
+                default:
+                    throw new UnsupportedOperationException();
+                }
+            } else {
+                return originalOp.get(bs);
+            }
+        }
+        throw new SparqlTypeErrorException();
+    }
+
+    /**
+     * Get left operand.
+     * @return
+     */
+    public IValueExpression<? extends IV> left() {
+        return get(0);
+    }
+
+    /**
+     * Get annotated operation.
+     * @return
+     */
+    public DateOp op() {
+        return (DateOp) getRequiredProperty(DateBOp.Annotations.OP);
+    }
+
+    /**
+     * Convert to string.
+     */
+    public String toString() {
+
+        final StringBuilder sb = new StringBuilder();
+        sb.append(op());
+        sb.append("(").append(left()).append(")");
+        return sb.toString();
+
+    }
+
+    /**
+     * Materialization requirements.
+     */
+    public Requirement getRequirement() {
+        return Requirement.SOMETIMES;
+    }
+
+}
+
diff --git 
a/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseNowBOp.java
 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseNowBOp.java
new file mode 100644
index 0000000..41646ed
--- /dev/null
+++ 
b/blazegraph/src/main/java/org/wikidata/query/rdf/blazegraph/constraints/WikibaseNowBOp.java
@@ -0,0 +1,94 @@
+package org.wikidata.query.rdf.blazegraph.constraints;
+
+import java.util.Calendar;
+import java.util.Locale;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.openrdf.model.vocabulary.XMLSchema;
+import org.wikidata.query.rdf.common.WikibaseDate;
+import org.wikidata.query.rdf.common.WikibaseDate.ToStringFormat;
+
+import com.bigdata.bop.BOp;
+import com.bigdata.bop.IBindingSet;
+import com.bigdata.rdf.internal.IV;
+import com.bigdata.rdf.internal.constraints.INeedsMaterialization;
+import com.bigdata.rdf.internal.constraints.IVValueExpression;
+import com.bigdata.rdf.sparql.ast.GlobalAnnotations;
+
+/**
+ * Implements the now() operator.
+ */
+public class WikibaseNowBOp extends IVValueExpression<IV> implements 
INeedsMaterialization{
+
+    /**
+        *
+        */
+    private static final long serialVersionUID = 9136864442064392445L;
+
+    /**
+     * Ctor.
+     * @param globals
+     */
+    public WikibaseNowBOp(final GlobalAnnotations globals) {
+
+        this(BOp.NOARGS, anns(globals));
+
+    }
+
+    /**
+     * Required shallow copy constructor.
+     *
+     * @param args
+     *            The operands.
+     * @param op
+     *            The operation.
+     */
+    public WikibaseNowBOp(final BOp[] args, Map<String, Object> anns) {
+
+        super(args, anns);
+
+    }
+
+    /**
+     * Constructor required for {@link 
com.bigdata.bop.BOpUtility#deepCopy(FilterNode)}.
+     *
+     * @param op
+     */
+    public WikibaseNowBOp(final WikibaseNowBOp op) {
+
+        super(op);
+
+    }
+
+    /**
+     * Get expression value.
+     */
+    public IV get(final IBindingSet bs) {
+
+        final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"), 
Locale.ROOT);
+        WikibaseDate wd = 
WikibaseDate.fromSecondsSinceEpoch(cal.getTimeInMillis() / 1000);
+        return super.asIV(getValueFactory().createLiteral(
+                wd.toString(ToStringFormat.DATE_TIME),
+                XMLSchema.DATETIME
+               ), bs);
+    }
+
+     /**
+      * Convert operation to string now().
+      */
+    public String toString() {
+
+        return "now()";
+
+    }
+
+    /**
+     * Never needs materialization.
+     */
+    public Requirement getRequirement() {
+        return Requirement.NEVER;
+    }
+
+}
+
diff --git a/pom.xml b/pom.xml
index 50f4e67..c5820b2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -57,7 +57,7 @@
 
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    <blazegraph.version>1.5.2</blazegraph.version>
+    <blazegraph.version>1.5.3-wmf-1</blazegraph.version>
     <!-- This Blazegraph version has backports onto the 1.5.1 branch and is 
hosted at WMF. Documentation on how to do that
       is in the backport_blazegraph.txt file. -->
     <sesame.version>2.8.1</sesame.version>
diff --git a/testTools/src/config/web.xml b/testTools/src/config/web.xml
new file mode 100644
index 0000000..6451204
--- /dev/null
+++ b/testTools/src/config/web.xml
@@ -0,0 +1,140 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<web-app xmlns="http://java.sun.com/xml/ns/javaee";
+      xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+      xsi:schemaLocation="http://java.sun.com/xml/ns/javaee 
http://java.sun.com/xml/ns/javaee/web-app_3_1.xsd";
+      version="3.1">
+  <display-name>Bigdata</display-name>
+  <description>Bigdata</description>
+  <context-param>
+   <description>The property file (for a standalone database instance) or the
+   jini configuration file (for a federation).  The file MUST end with either
+   ".properties" or ".config".  This path is relative to the directory from
+   which you start the servlet container so you may have to edit it for your
+   installation, e.g., by specifying an absolution path.  Also, it is a good
+   idea to review the RWStore.properties file as well and specify the location
+   of the database file on which it will persist your data.
+
+   Note: You MAY override this parameter using
+   "-Dcom.bigdata.rdf.sail.webapp.ConfigParams.propertyFile=FILE"
+   when starting the servlet container.
+   </description>
+   <!-- Note: This path is relative to the directory in which you start -->
+   <!-- the servlet container. For the IDE, this is generally the root  -->
+   <!-- of the bigdata project. For the WAR, it depends where you start -->
+   <!-- the servlet container. The "ant war" target rewrites this to be -->
+   <!-- relative to the root of the servlet container by default.       -->
+   <param-name>propertyFile</param-name>
+   <param-value>RWStore.properties</param-value>
+  </context-param>
+  <context-param>
+   <description>The default bigdata namespace of for the triple or quad store
+   instance to be exposed.</description>
+   <param-name>namespace</param-name>
+   <param-value>kb</param-value>
+  </context-param>
+  <context-param>
+   <description>When true a new triple or quads store instance will be created
+   if none is found at that namespace.</description>
+   <param-name>create</param-name>
+   <param-value>true</param-value>
+  </context-param>
+  <context-param>
+   <description>The size of the thread pool used to service SPARQL queries -OR-
+    ZERO (0) for an unbounded thread pool.</description>
+   <param-name>queryThreadPoolSize</param-name>
+   <param-value>32</param-value>
+  </context-param>
+  <context-param>
+   <description>When true, the REST API will not permit mutation 
operations.</description>
+   <param-name>readOnly</param-name>
+   <param-value>false</param-value>
+  </context-param>
+  <context-param>
+   <description>When non-zero, the timeout for queries 
(milliseconds).</description>
+   <param-name>queryTimeout</param-name>
+   <param-value>30000</param-value>
+  </context-param>
+  <!-- We can't use the builtin whitelist because it breaks label relation. 
But we enable our own whitelist so its all good. -->
+  <!--
+  <context-param>
+   <description>List of allowed services.</description>
+   <param-name>serviceWhitelist</param-name>
+   <param-value>http://www.bigdata.com/rdf#describe</param-value>
+  </context-param>
+  -->
+  <listener>
+   
<listener-class>org.wikidata.query.rdf.blazegraph.WikibaseContextListener</listener-class>
+  </listener>
+  <servlet>
+   <servlet-name>REST API</servlet-name>
+   <display-name>REST API</display-name>
+   <description>The REST API, including a SPARQL end point, as described at
+   
https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer
+   </description>
+   <servlet-class>com.bigdata.rdf.sail.webapp.RESTServlet</servlet-class>
+   <load-on-startup>0</load-on-startup>
+   <async-supported>true</async-supported>
+  </servlet>
+  <servlet>
+   <servlet-name>Multi-Tenancy API</servlet-name>
+   <display-name>Multi-Tenancy API</display-name>
+   <description>The REST API for managing multiple KBs in a single Journal
+   or Fedetation.
+   </description>
+   
<servlet-class>com.bigdata.rdf.sail.webapp.MultiTenancyServlet</servlet-class>
+   <async-supported>true</async-supported>
+  </servlet>
+  <servlet>
+   <servlet-name>Status</servlet-name>
+   <display-name>Status</display-name>
+   <description>A status page.</description>
+   <servlet-class>com.bigdata.rdf.sail.webapp.StatusServlet</servlet-class>
+   <async-supported>true</async-supported>
+  </servlet>
+  <servlet>
+   <servlet-name>Counters</servlet-name>
+   <display-name>Performance counters</display-name>
+   <description>Performance counters.</description>
+   <servlet-class>com.bigdata.rdf.sail.webapp.CountersServlet</servlet-class>
+   <async-supported>true</async-supported>
+  </servlet>
+  <!-- Note: The HALoadBalancerServlet is deployed from override-web.xml -->
+  <!-- Serve anything under /html/* as a simple file. -->
+  <servlet-mapping>
+    <servlet-name>default</servlet-name>
+    <url-pattern>/html/*</url-pattern>
+  </servlet-mapping>
+  <!-- Mapping for the default KB namespace (as configured above). -->
+  <servlet-mapping>
+  <servlet-name>REST API</servlet-name>
+  <url-pattern>/sparql</url-pattern>
+  </servlet-mapping>
+  <!-- Mapping for access to non-default KB namespaces.
+  <servlet-mapping>
+  <servlet-name>REST API</servlet-name>
+  <url-pattern>/sparql/*</url-pattern>
+  </servlet-mapping> -->
+  <!-- Mappings for the multi-tenancy API. -->
+  <servlet-mapping>
+  <servlet-name>Multi-Tenancy API</servlet-name>
+  <url-pattern>/namespace</url-pattern>
+  </servlet-mapping>
+  <servlet-mapping>
+  <servlet-name>Multi-Tenancy API</servlet-name>
+  <url-pattern>/namespace/*</url-pattern>
+  </servlet-mapping>
+  <!-- Mapping for the status page. -->
+  <servlet-mapping>
+  <servlet-name>Status</servlet-name>
+  <url-pattern>/status</url-pattern>
+  </servlet-mapping>
+  <!-- Mapping for the performance counters page. -->
+  <servlet-mapping>
+  <servlet-name>Counters</servlet-name>
+  <url-pattern>/counters</url-pattern>
+  </servlet-mapping>
+  <!-- Map the initial request into the UI. -->
+  <welcome-file-list>
+    <welcome-file>html/index.html</welcome-file>
+  </welcome-file-list>
+</web-app>
diff --git a/tools/pom.xml b/tools/pom.xml
index 414eb84..f098a49 100644
--- a/tools/pom.xml
+++ b/tools/pom.xml
@@ -201,6 +201,28 @@
         </executions>
       </plugin>
       <plugin>
+        <artifactId>maven-resources-plugin</artifactId>
+        <!-- Copy web.xml from testTools to blazegraph webapp -->
+        <executions>
+          <execution>
+            <id>copy-resources</id>
+            <phase>pre-integration-test</phase>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <configuration>
+              
<outputDirectory>${basedir}/target/blazegraph/WEB-INF</outputDirectory>
+              <resources>
+                <resource>
+                  <directory>${basedir}/../testTools/src/config</directory>
+                  <filtering>false</filtering>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
         <groupId>org.eclipse.jetty</groupId>
         <artifactId>jetty-maven-plugin</artifactId>
         <version>9.2.10.v20150310</version>
diff --git 
a/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
 
b/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
index a976c2a..72378af 100644
--- 
a/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
+++ 
b/tools/src/test/java/org/wikidata/query/rdf/tool/WikibaseDateExtensionIntegrationTest.java
@@ -4,7 +4,10 @@
 import static org.wikidata.query.rdf.test.StatementHelper.statement;
 
 import java.util.ArrayList;
+import java.util.Calendar;
 import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
 
 import org.junit.Test;
 import org.openrdf.model.Statement;
@@ -92,4 +95,48 @@
         result = results.next();
         assertThat(result, binds("diff", new LiteralImpl("-5.039616015E12", 
XMLSchema.DOUBLE)));
     }
+
+    @Test
+    public void dateFunctions() throws QueryEvaluationException {
+        List<Statement> statements = new ArrayList<>();
+        statements.add(statement("Q23", "P569", new 
LiteralImpl("0000-01-01T00:00:00Z", XMLSchema.DATETIME)));
+        rdfRepository().sync("Q23", statements);
+        TupleQueryResult results = rdfRepository().query("SELECT (year(?date) 
as ?year) WHERE { ?s ?p ?date }");
+        BindingSet result = results.next();
+        assertThat(result, binds("year", new LiteralImpl("0", 
XMLSchema.INTEGER)));
+        results = rdfRepository().query("SELECT (day(?date) as ?day) WHERE { 
?s ?p ?date FILTER (year(?date) != year(now())) }");
+        result = results.next();
+        assertThat(result, binds("day", new LiteralImpl("1", 
XMLSchema.INTEGER)));
+    }
+
+    @Test
+    public void dateFunctionsMore() throws QueryEvaluationException {
+        List<Statement> statements = new ArrayList<>();
+        statements.add(statement("Q23", "P569", new 
LiteralImpl("0000-01-02T03:04:05Z", XMLSchema.DATETIME)));
+        rdfRepository().sync("Q23", statements);
+        TupleQueryResult results = rdfRepository().query("SELECT " +
+            "(year(?date) as ?year) " +
+            "(month(?date) as ?month) " +
+            "(day(?date) as ?day) " +
+            "(hours(?date) as ?hour) " +
+            "(minutes(?date) as ?min) " +
+            "(seconds(?date) as ?sec) " +
+            " WHERE { ?s ?p ?date }");
+        BindingSet result = results.next();
+        assertThat(result, binds("year", new LiteralImpl("0", 
XMLSchema.INTEGER)));
+        assertThat(result, binds("month", new LiteralImpl("1", 
XMLSchema.INTEGER)));
+        assertThat(result, binds("day", new LiteralImpl("2", 
XMLSchema.INTEGER)));
+        assertThat(result, binds("hour", new LiteralImpl("3", 
XMLSchema.INTEGER)));
+        assertThat(result, binds("min", new LiteralImpl("4", 
XMLSchema.INTEGER)));
+        assertThat(result, binds("sec", new LiteralImpl("5", 
XMLSchema.INTEGER)));
+    }
+
+    @Test
+    public void dateNow() throws QueryEvaluationException {
+        TupleQueryResult results = rdfRepository().query("SELECT (year(now()) 
as ?year) WHERE {  }");
+        BindingSet result = results.next();
+        int year = Calendar.getInstance(TimeZone.getTimeZone("UTC"), 
Locale.ROOT).get(Calendar.YEAR);
+        assertThat(result, binds("year", new LiteralImpl(String.valueOf(year), 
XMLSchema.INTEGER)));
+    }
+
 }

-- 
To view, visit https://gerrit.wikimedia.org/r/232892
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I9435af7160b5af02cc8959789ab629d7a002329b
Gerrit-PatchSet: 8
Gerrit-Project: wikidata/query/rdf
Gerrit-Branch: master
Gerrit-Owner: Smalyshev <[email protected]>
Gerrit-Reviewer: DCausse <[email protected]>
Gerrit-Reviewer: Smalyshev <[email protected]>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to