Gehel has uploaded a new change for review. (
https://gerrit.wikimedia.org/r/402395 )
Change subject: Extract collaborators of WikibaseRepository
......................................................................
Extract collaborators of WikibaseRepository
This is a preliminary step that will allow sharing HttpClient between
WikibaseRepository and RdfRepository.
Change-Id: I2bb98d5b1c9ff8996a219f398641e9b228fc85aa
---
M tools/src/main/java/org/wikidata/query/rdf/tool/HttpClientUtils.java
M tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
M
tools/src/main/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepository.java
M
tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdaterIntegrationTestBase.java
M
tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryIntegrationTest.java
M
tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryWireIntegrationTest.java
6 files changed, 180 insertions(+), 181 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/wikidata/query/rdf
refs/changes/95/402395/5
diff --git
a/tools/src/main/java/org/wikidata/query/rdf/tool/HttpClientUtils.java
b/tools/src/main/java/org/wikidata/query/rdf/tool/HttpClientUtils.java
index 805a379..eaec48d 100644
--- a/tools/src/main/java/org/wikidata/query/rdf/tool/HttpClientUtils.java
+++ b/tools/src/main/java/org/wikidata/query/rdf/tool/HttpClientUtils.java
@@ -1,13 +1,44 @@
package org.wikidata.query.rdf.tool;
+import java.io.InterruptedIOException;
+import java.net.UnknownHostException;
+
+import javax.net.ssl.SSLException;
+
+import org.apache.http.HttpEntityEnclosingRequest;
+import org.apache.http.HttpRequest;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.HttpRequestRetryHandler;
+import org.apache.http.client.ServiceUnavailableRetryStrategy;
import org.apache.http.client.config.CookieSpecs;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.conn.ConnectTimeoutException;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.DefaultServiceUnavailableRetryStrategy;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.protocol.HttpContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Utilities for dealing with HttpClient.
*/
public final class HttpClientUtils {
+
+ private static final Logger log =
LoggerFactory.getLogger(HttpClientUtils.class);
+
+ /**
+ * How many retries allowed on error.
+ */
+ public static final int RETRIES = 3;
+ /**
+ * Retry interval, in ms.
+ */
+ public static final int RETRY_INTERVAL = 500;
+
/**
* Configure request to ignore cookies.
*/
@@ -19,4 +50,82 @@
private HttpClientUtils() {
// Uncallable utility constructor
}
+
+ public static CloseableHttpClient createHttpClient() {
+ return HttpClients.custom()
+ .setMaxConnPerRoute(100).setMaxConnTotal(100)
+ .setRetryHandler(getRetryHandler(RETRIES))
+ .setServiceUnavailableRetryStrategy(getRetryStrategy(RETRIES,
RETRY_INTERVAL))
+ .disableCookieManagement()
+ .setUserAgent("Wikidata Query Service Updater")
+ .build();
+ }
+
+ /**
+ * Return retry strategy for "service unavailable".
+ * This one handles 503 and 429 by retrying it after a fixed period.
+ * TODO: 429 may contain header that we may want to use for retrying?
+ * @param max Maximum number of retries.
+ * @param interval Interval between retries, ms.
+ * @see DefaultServiceUnavailableRetryStrategy
+ * @return
+ */
+ public static ServiceUnavailableRetryStrategy getRetryStrategy(final int
max, final int interval) {
+ // This is the same as DefaultServiceUnavailableRetryStrategy but also
handles 429
+ return new ServiceUnavailableRetryStrategy() {
+ @Override
+ public boolean retryRequest(final HttpResponse response, final int
executionCount, final HttpContext context) {
+ return executionCount <= max &&
+ (response.getStatusLine().getStatusCode() ==
HttpStatus.SC_SERVICE_UNAVAILABLE ||
+ response.getStatusLine().getStatusCode() == 429);
+ }
+
+ @Override
+ public long getRetryInterval() {
+ return interval;
+ }
+ };
+ }
+
+ /**
+ * Create retry handler.
+ * Note: this is for retrying I/O exceptions.
+ * @param max Maximum retries number.
+ * @return
+ */
+ public static HttpRequestRetryHandler getRetryHandler(final int max) {
+ return (exception, executionCount, context) -> {
+ log.debug("Exception in attempt {}", executionCount, exception);
+ if (executionCount >= max) {
+ // Do not retry if over max retry count
+ return false;
+ }
+ if (exception instanceof InterruptedIOException) {
+ // Timeout
+ return true;
+ }
+ if (exception instanceof UnknownHostException) {
+ // Unknown host
+ return false;
+ }
+ if (exception instanceof ConnectTimeoutException) {
+ // Connection refused
+ return true;
+ }
+ if (exception instanceof SSLException) {
+ // SSL handshake exception
+ return false;
+ }
+
+ HttpClientContext clientContext = HttpClientContext.adapt(context);
+ HttpRequest request = clientContext.getRequest();
+ boolean idempotent = !(request instanceof
HttpEntityEnclosingRequest);
+ if (idempotent) {
+ // Retry if the request is considered idempotent
+ return true;
+ }
+
+ return false;
+ };
+ }
}
diff --git a/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
b/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
index 5d768ac..3ed90cd 100644
--- a/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
+++ b/tools/src/main/java/org/wikidata/query/rdf/tool/Update.java
@@ -16,6 +16,7 @@
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import org.apache.http.impl.client.CloseableHttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.query.rdf.common.uri.WikibaseUris;
@@ -27,6 +28,7 @@
import org.wikidata.query.rdf.tool.rdf.Munger;
import org.wikidata.query.rdf.tool.rdf.RdfRepository;
import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository;
+import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository.Uris;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -44,39 +46,30 @@
* Run updates configured from the command line.
* @throws Exception on error
*/
- // Catching exception is OK in a main exception handler, more so since the
- // exception is rethrown
+ // Catching exception is OK in a main exception handler
@SuppressWarnings("checkstyle:IllegalCatch")
- public static void main(String[] args) throws Exception {
- RdfRepository rdfRepository = null;
- Updater<? extends Change.Batch> updater;
- WikibaseRepository wikibaseRepository;
+ public static void main(String[] args) {
+ UpdateOptions options = handleOptions(UpdateOptions.class, args);
+ URI sparqlUri = sparqlUri(options);
+ WikibaseUris uris = new WikibaseUris(options.wikibaseHost());
- try {
- UpdateOptions options = handleOptions(UpdateOptions.class, args);
- wikibaseRepository = buildWikibaseRepository(options);
- URI sparqlUri = sparqlUri(options);
- WikibaseUris uris = new WikibaseUris(options.wikibaseHost());
- rdfRepository = new RdfRepository(sparqlUri, uris);
+ try (CloseableHttpClient httpClient =
HttpClientUtils.createHttpClient();
+ RdfRepository rdfRepository = new RdfRepository(sparqlUri, uris)
+ ) {
+ WikibaseRepository wikibaseRepository = new
WikibaseRepository(getUris(options), httpClient);
Change.Source<? extends Change.Batch> changeSource =
buildChangeSource(options, rdfRepository,
wikibaseRepository);
- updater = createUpdater(options, wikibaseRepository, uris,
rdfRepository, changeSource);
+
+ try (Updater<? extends Change.Batch> updater =
createUpdater(options, wikibaseRepository, uris, rdfRepository, changeSource)) {
+ updater.run();
+ } catch (Exception e) {
+ log.error("Error during updater run.", e);
+ System.exit(-1);
+ }
+
} catch (Exception e) {
log.error("Error during initialization.", e);
- if (rdfRepository != null) {
- rdfRepository.close();
- }
- throw e;
- }
- try (
- WikibaseRepository w = wikibaseRepository;
- RdfRepository r = rdfRepository;
- Updater u = updater
- ) {
- updater.run();
- } catch (Exception e) {
- log.error("Error during updater run.", e);
- throw e;
+ System.exit(-1);
}
}
@@ -195,15 +188,9 @@
return new RecentChangesPoller(wikibaseRepository, new
Date(startTime), options.batchSize(), options.tailPollerOffset());
}
- /**
- * Build WikibaseRepository object.
- *
- * @return null if non can be built - its ok to just exit - errors have
been
- * logged to the user
- */
- private static WikibaseRepository buildWikibaseRepository(UpdateOptions
options) {
+ private static Uris getUris(UpdateOptions options) {
if (options.entityNamespaces() == null) {
- return new WikibaseRepository(options.wikibaseScheme(),
options.wikibaseHost());
+ return new Uris(options.wikibaseScheme(), options.wikibaseHost());
}
String[] strEntityNamespaces = options.entityNamespaces().split(",");
// FIXME use OptionsUtils.splitByComma(options.entityNamespaces())
@@ -215,6 +202,6 @@
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid value for
--entityNamespaces. Namespace index should be an integer.", e);
}
- return new WikibaseRepository(options.wikibaseScheme(),
options.wikibaseHost(), 0, longEntityNamespaces);
+ return new Uris(options.wikibaseScheme(), options.wikibaseHost(), 0,
longEntityNamespaces);
}
}
diff --git
a/tools/src/main/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepository.java
b/tools/src/main/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepository.java
index b7d3e16..abacf47 100644
---
a/tools/src/main/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepository.java
+++
b/tools/src/main/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepository.java
@@ -1,9 +1,7 @@
package org.wikidata.query.rdf.tool.wikibase;
-import java.io.Closeable;
import java.io.IOException;
import java.io.InputStreamReader;
-import java.io.InterruptedIOException;
import java.net.SocketException;
import java.net.URI;
import java.net.URISyntaxException;
@@ -18,31 +16,19 @@
import java.util.Locale;
import java.util.TimeZone;
-import javax.net.ssl.SSLException;
import javax.net.ssl.SSLHandshakeException;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.http.Consts;
-import org.apache.http.HttpEntityEnclosingRequest;
-import org.apache.http.HttpRequest;
-import org.apache.http.HttpResponse;
-import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
-import org.apache.http.client.HttpRequestRetryHandler;
-import org.apache.http.client.ServiceUnavailableRetryStrategy;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
-import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
-import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.DefaultServiceUnavailableRetryStrategy;
-import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
-import org.apache.http.protocol.HttpContext;
import org.openrdf.model.Statement;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
@@ -75,18 +61,9 @@
*/
// TODO fan out complexity
@SuppressWarnings("checkstyle:classfanoutcomplexity")
-public class WikibaseRepository implements Closeable {
+public class WikibaseRepository {
private static final Logger log =
LoggerFactory.getLogger(WikibaseRepository.class);
- /**
- * How many retries allowed on error.
- */
- private static final int RETRIES = 3;
-
- /**
- * Retry interval, in ms.
- */
- private static final int RETRY_INTERVAL = 500;
/**
* Standard representation of dates in Mediawiki API (ISO 8601).
@@ -96,13 +73,7 @@
/**
* HTTP client for wikibase.
*/
- private final CloseableHttpClient client = HttpClients.custom()
- .setMaxConnPerRoute(100).setMaxConnTotal(100)
- .setRetryHandler(getRetryHandler(RETRIES))
- .setServiceUnavailableRetryStrategy(getRetryStrategy(RETRIES,
RETRY_INTERVAL))
- .disableCookieManagement()
- .setUserAgent("Wikidata Query Service Updater")
- .build();
+ private final CloseableHttpClient client;
/**
* Builds uris to get stuff from wikibase.
@@ -116,91 +87,14 @@
*/
private final ObjectMapper mapper = new ObjectMapper();
- public WikibaseRepository(String scheme, String host) {
- uris = new Uris(scheme, host);
+ public WikibaseRepository(Uris uris, CloseableHttpClient httpClient) {
+ this.uris = uris;
configureObjectMapper(mapper);
- }
-
- public WikibaseRepository(String scheme, String host, int port) {
- uris = new Uris(scheme, host, port);
- configureObjectMapper(mapper);
- }
-
- public WikibaseRepository(String scheme, String host, int port, long[]
entityNamespaces) {
- uris = new Uris(scheme, host, port, entityNamespaces);
- configureObjectMapper(mapper);
+ client = httpClient;
}
private void configureObjectMapper(ObjectMapper mapper) {
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
- }
-
- /**
- * Return retry strategy for "service unavailable".
- * This one handles 503 and 429 by retrying it after a fixed period.
- * TODO: 429 may contain header that we may want to use for retrying?
- * @param max Maximum number of retries.
- * @param interval Interval between retries, ms.
- * @see DefaultServiceUnavailableRetryStrategy
- * @return
- */
- private static ServiceUnavailableRetryStrategy getRetryStrategy(final int
max, final int interval) {
- // This is the same as DefaultServiceUnavailableRetryStrategy but also
handles 429
- return new ServiceUnavailableRetryStrategy() {
- @Override
- public boolean retryRequest(final HttpResponse response, final int
executionCount, final HttpContext context) {
- return executionCount <= max &&
- (response.getStatusLine().getStatusCode() ==
HttpStatus.SC_SERVICE_UNAVAILABLE ||
- response.getStatusLine().getStatusCode() == 429);
- }
-
- @Override
- public long getRetryInterval() {
- return interval;
- }
- };
- }
-
- /**
- * Create retry handler.
- * Note: this is for retrying I/O exceptions.
- * @param max Maximum retries number.
- * @return
- */
- private static HttpRequestRetryHandler getRetryHandler(final int max) {
- return (exception, executionCount, context) -> {
- log.debug("Exception in attempt {}", executionCount, exception);
- if (executionCount >= max) {
- // Do not retry if over max retry count
- return false;
- }
- if (exception instanceof InterruptedIOException) {
- // Timeout
- return true;
- }
- if (exception instanceof UnknownHostException) {
- // Unknown host
- return false;
- }
- if (exception instanceof ConnectTimeoutException) {
- // Connection refused
- return true;
- }
- if (exception instanceof SSLException) {
- // SSL handshake exception
- return false;
- }
-
- HttpClientContext clientContext = HttpClientContext.adapt(context);
- HttpRequest request = clientContext.getRequest();
- boolean idempotent = !(request instanceof
HttpEntityEnclosingRequest);
- if (idempotent) {
- // Retry if the request is considered idempotent
- return true;
- }
-
- return false;
- };
}
/**
@@ -431,11 +325,6 @@
*/
public boolean isValidEntity(String name) {
return name.matches("^[A-Za-z0-9:]+$");
- }
-
- @Override
- public void close() throws IOException {
- client.close();
}
/**
diff --git
a/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdaterIntegrationTestBase.java
b/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdaterIntegrationTestBase.java
index 7851a48..3b3a36e 100644
---
a/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdaterIntegrationTestBase.java
+++
b/tools/src/test/java/org/wikidata/query/rdf/tool/AbstractUpdaterIntegrationTestBase.java
@@ -7,6 +7,7 @@
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import org.apache.http.impl.client.CloseableHttpClient;
import org.junit.Rule;
import org.junit.runner.RunWith;
import org.wikidata.query.rdf.common.uri.WikibaseUris;
@@ -15,6 +16,7 @@
import org.wikidata.query.rdf.tool.change.IdRangeChangeSource;
import org.wikidata.query.rdf.tool.rdf.Munger;
import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository;
+import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository.Uris;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
import com.carrotsearch.randomizedtesting.RandomizedTest;
@@ -24,11 +26,15 @@
*/
@RunWith(RandomizedRunner.class)
public class AbstractUpdaterIntegrationTestBase extends RandomizedTest {
+
+ @Rule
+ public final CloseableRule<CloseableHttpClient> httpClient =
autoClose(HttpClientUtils.createHttpClient());
/**
* Wikibase test against.
*/
- @Rule
- public final CloseableRule<WikibaseRepository> wikibaseRepository =
autoClose(new WikibaseRepository("https", "www.wikidata.org"));
+ public final WikibaseRepository wikibaseRepository =
+ new WikibaseRepository(new Uris("https", "www.wikidata.org"),
httpClient.get());
+
/**
* Munger to test against.
*/
@@ -48,7 +54,7 @@
Change.Source<?> source = IdRangeChangeSource.forItems(from, to, 30);
ExecutorService executorService = new ThreadPoolExecutor(0, 10, 0,
TimeUnit.SECONDS, new LinkedBlockingQueue<>());
WikibaseUris uris = new WikibaseUris("www.wikidata.org");
- try (Updater<?> updater = new Updater<>(source,
wikibaseRepository.get(), rdfRepository, munger, executorService, 0, uris,
false)) {
+ try (Updater<?> updater = new Updater<>(source, wikibaseRepository,
rdfRepository, munger, executorService, 0, uris, false)) {
updater.run();
}
}
diff --git
a/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryIntegrationTest.java
b/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryIntegrationTest.java
index 973c8bf..0d4e258 100644
---
a/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryIntegrationTest.java
+++
b/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryIntegrationTest.java
@@ -13,15 +13,18 @@
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.time.DateUtils;
+import org.apache.http.impl.client.CloseableHttpClient;
import org.junit.Rule;
import org.junit.Test;
import org.openrdf.model.Statement;
import org.wikidata.query.rdf.common.uri.WikibaseUris;
import org.wikidata.query.rdf.test.CloseableRule;
+import org.wikidata.query.rdf.tool.HttpClientUtils;
import org.wikidata.query.rdf.tool.change.Change;
import org.wikidata.query.rdf.tool.exception.ContainedException;
import org.wikidata.query.rdf.tool.exception.RetryableException;
import org.wikidata.query.rdf.tool.wikibase.RecentChangeResponse.RecentChange;
+import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository.Uris;
import com.carrotsearch.randomizedtesting.RandomizedTest;
@@ -32,8 +35,9 @@
public class WikibaseRepositoryIntegrationTest extends RandomizedTest {
private static final String HOST = "test.wikidata.org";
@Rule
- public final CloseableRule<WikibaseRepository> repo = autoClose(new
WikibaseRepository("https", HOST));
- private final CloseableRule<WikibaseRepository> proxyRepo = autoClose(new
WikibaseRepository("http", "localhost", 8812));
+ public final CloseableRule<CloseableHttpClient> httpClient =
autoClose(HttpClientUtils.createHttpClient());
+ private final WikibaseRepository repo = new WikibaseRepository(new
Uris("https", HOST), httpClient.get());
+ private final WikibaseRepository proxyRepo = new WikibaseRepository(new
Uris("http", "localhost", 8812), httpClient.get());
private final WikibaseUris uris = new WikibaseUris(HOST);
@Test
@@ -44,7 +48,7 @@
* is probably ok.
*/
int batchSize = randomIntBetween(3, 30);
- RecentChangeResponse changes = repo.get().fetchRecentChanges(new
Date(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(30)),
+ RecentChangeResponse changes = repo.fetchRecentChanges(new
Date(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(30)),
null, batchSize);
assertNotNull(changes.getContinue());
assertNotNull(changes.getContinue());
@@ -59,8 +63,8 @@
assertNotNull(rc.getTimestamp());
assertNotNull(rc.getRevId());
}
- final Date nextDate =
repo.get().getChangeFromContinue(changes.getContinue()).timestamp();
- changes = repo.get().fetchRecentChanges(nextDate, null, batchSize);
+ final Date nextDate =
repo.getChangeFromContinue(changes.getContinue()).timestamp();
+ changes = repo.fetchRecentChanges(nextDate, null, batchSize);
assertNotNull(changes.getQuery());
assertNotNull(changes.getQuery().getRecentChanges());
}
@@ -72,7 +76,7 @@
* This relies on there being very few changes in the current
* second.
*/
- RecentChangeResponse changes = repo.get().fetchRecentChanges(new
Date(System.currentTimeMillis()), null, 500);
+ RecentChangeResponse changes = repo.fetchRecentChanges(new
Date(System.currentTimeMillis()), null, 500);
assertNull(changes.getContinue());
assertNotNull(changes.getQuery());
assertNotNull(changes.getQuery().getRecentChanges());
@@ -96,7 +100,7 @@
} catch (InterruptedException e) {
// nothing to do here, sorry. I know it looks bad.
}
- RecentChangeResponse result = repo.get().fetchRecentChanges(date,
null, batchSize);
+ RecentChangeResponse result = repo.fetchRecentChanges(date, null,
batchSize);
return result.getQuery().getRecentChanges();
}
@@ -104,8 +108,8 @@
private void editShowsUpInRecentChangesTestCase(String label, String type)
throws RetryableException,
ContainedException {
long now = System.currentTimeMillis();
- String entityId = repo.get().firstEntityIdForLabelStartingWith(label,
"en", type);
- repo.get().setLabel(entityId, type, label + now, "en");
+ String entityId = repo.firstEntityIdForLabelStartingWith(label, "en",
type);
+ repo.setLabel(entityId, type, label + now, "en");
List<RecentChange> changes = getRecentChanges(new Date(now - 10000),
10);
boolean found = false;
String title = entityId;
@@ -120,7 +124,7 @@
}
}
assertTrue("Didn't find new page in recent changes", found);
- Collection<Statement> statements =
repo.get().fetchRdfForEntity(entityId);
+ Collection<Statement> statements = repo.fetchRdfForEntity(entityId);
found = false;
for (Statement statement : statements) {
if (statement.getSubject().stringValue().equals(uris.entity() +
entityId)) {
@@ -134,9 +138,10 @@
@Test
public void fetchIsNormalized() throws RetryableException,
ContainedException, IOException {
long now = System.currentTimeMillis();
- try (WikibaseRepository proxyRepo = new WikibaseRepository("http",
"localhost", 8812)) {
- String entityId =
repo.get().firstEntityIdForLabelStartingWith("QueryTestItem", "en", "item");
- repo.get().setLabel(entityId, "item", "QueryTestItem" + now, "en");
+ try (CloseableHttpClient httpClient =
HttpClientUtils.createHttpClient()) {
+ WikibaseRepository proxyRepo = new WikibaseRepository(new
Uris("http", "localhost", 8812), httpClient);
+ String entityId =
repo.firstEntityIdForLabelStartingWith("QueryTestItem", "en", "item");
+ repo.setLabel(entityId, "item", "QueryTestItem" + now, "en");
Collection<Statement> statements =
proxyRepo.fetchRdfForEntity(entityId);
boolean foundBad = false;
boolean foundGood = false;
@@ -162,8 +167,8 @@
@Test
public void continueWorks() throws RetryableException, ContainedException,
ParseException, InterruptedException {
long now = System.currentTimeMillis();
- String entityId =
repo.get().firstEntityIdForLabelStartingWith("QueryTestItem", "en", "item");
- repo.get().setLabel(entityId, "item", "QueryTestItem" + now, "en");
+ String entityId =
repo.firstEntityIdForLabelStartingWith("QueryTestItem", "en", "item");
+ repo.setLabel(entityId, "item", "QueryTestItem" + now, "en");
List<RecentChange> changes = getRecentChanges(new Date(now - 10000),
10);
Change change = null;
Long oldRevid = 0L;
@@ -181,7 +186,7 @@
// Ensure this change is in different second
Thread.sleep(1000);
// make new edit now
- repo.get().setLabel(entityId, "item", "QueryTestItem" + now +
"updated", "en");
+ repo.setLabel(entityId, "item", "QueryTestItem" + now + "updated",
"en");
changes = getRecentChanges(DateUtils.addSeconds(change.timestamp(),
1), 10);
// check that new result does not contain old edit but contains new
edit
boolean found = false;
@@ -198,7 +203,7 @@
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void recentChangesWithErrors() throws RetryableException,
ContainedException {
- RecentChangeResponse changes = proxyRepo.get().fetchRecentChanges(new
Date(System.currentTimeMillis()), null, 500);
+ RecentChangeResponse changes = proxyRepo.fetchRecentChanges(new
Date(System.currentTimeMillis()), null, 500);
assertNull(changes.getContinue());
assertNotNull(changes.getQuery());
assertNotNull(changes.getQuery().getRecentChanges());
diff --git
a/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryWireIntegrationTest.java
b/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryWireIntegrationTest.java
index fe31c65..9a9abf4 100644
---
a/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryWireIntegrationTest.java
+++
b/tools/src/test/java/org/wikidata/query/rdf/tool/wikibase/WikibaseRepositoryWireIntegrationTest.java
@@ -10,37 +10,40 @@
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
+import static org.wikidata.query.rdf.test.CloseableRule.autoClose;
import static
org.wikidata.query.rdf.tool.wikibase.WikibaseRepository.inputDateFormat;
import java.io.IOException;
import java.text.ParseException;
import java.util.Date;
-import org.junit.After;
+import org.apache.http.impl.client.CloseableHttpClient;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
+import org.wikidata.query.rdf.test.CloseableRule;
+import org.wikidata.query.rdf.tool.HttpClientUtils;
import org.wikidata.query.rdf.tool.exception.RetryableException;
+import org.wikidata.query.rdf.tool.wikibase.WikibaseRepository.Uris;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.google.common.io.Resources;
public class WikibaseRepositoryWireIntegrationTest {
+ @Rule
+ public final CloseableRule<CloseableHttpClient> httpClient =
autoClose(HttpClientUtils.createHttpClient());
+
@Rule public WireMockRule wireMockRule = new WireMockRule(wireMockConfig()
.dynamicPort()
.dynamicHttpsPort());
+
+
private WikibaseRepository repository;
-
@Before
- public void createWikibaseRepository() {
- repository = new WikibaseRepository("http", "localhost",
wireMockRule.port());
- }
-
- @After
- public void shutdownWikibaseRepository() throws IOException {
- repository.close();
+ public void createRepository() {
+ repository = new WikibaseRepository(new Uris("http", "localhost",
wireMockRule.port()), httpClient.get());
}
@Test
--
To view, visit https://gerrit.wikimedia.org/r/402395
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I2bb98d5b1c9ff8996a219f398641e9b228fc85aa
Gerrit-PatchSet: 5
Gerrit-Project: wikidata/query/rdf
Gerrit-Branch: master
Gerrit-Owner: Gehel <[email protected]>
Gerrit-Reviewer: Gehel <[email protected]>
Gerrit-Reviewer: jenkins-bot <>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits