Mock HTTP calls in EntityCache

This commit is contained in:
Antonin Delpeuch 2019-12-21 19:09:03 +01:00
parent 909d347650
commit 1355a373c9
12 changed files with 103 additions and 9 deletions

View File

@ -46,6 +46,7 @@ import org.openrefine.wikidata.qa.scrutinizers.UnsourcedScrutinizer;
import org.openrefine.wikidata.qa.scrutinizers.WhitespaceScrutinizer;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.scheduler.WikibaseAPIUpdateScheduler;
import org.openrefine.wikidata.utils.EntityCache;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
/**
@ -62,7 +63,7 @@ public class EditInspector {
public EditInspector(QAWarningStore warningStore) {
this.scrutinizers = new HashMap<>();
this.fetcher = new WikidataConstraintFetcher();
this.fetcher = new WikidataConstraintFetcher(EntityCache.getEntityCache());
this.warningStore = warningStore;
// Register all known scrutinizers here

View File

@ -102,6 +102,12 @@ public class WikidataConstraintFetcher implements ConstraintFetcher {
public static String TYPE_CONSTRAINT_QID = "Q21503250";
protected EntityCache entityCache;
public WikidataConstraintFetcher(EntityCache cache) {
entityCache = cache;
}
@Override
public String getFormatRegex(PropertyIdValue pid) {
List<SnakGroup> specs = getSingleConstraint(pid, FORMAT_CONSTRAINT_QID);
@ -304,7 +310,7 @@ public class WikidataConstraintFetcher implements ConstraintFetcher {
* @return the list of constraint statements
*/
protected List<Statement> getConstraintStatements(PropertyIdValue pid) {
PropertyDocument doc = (PropertyDocument) EntityCache.getEntityDocument(pid);
PropertyDocument doc = (PropertyDocument) entityCache.get(pid);
StatementGroup group = doc.findStatementGroup(WIKIDATA_CONSTRAINT_PID);
if (group != null) {
return group.getStatements().stream()

View File

@ -39,14 +39,17 @@ import com.google.common.cache.LoadingCache;
public class EntityCache {
private static EntityCache _entityCache = new EntityCache();
private static EntityCache _entityCache = new EntityCache(BasicApiConnection.getWikidataApiConnection());
private LoadingCache<String, EntityDocument> _cache = null;
private WikibaseDataFetcher _fetcher;
private EntityCache() {
ApiConnection connection = BasicApiConnection.getWikidataApiConnection();
_fetcher = new WikibaseDataFetcher(connection, Datamodel.SITE_WIKIDATA);
protected EntityCache(ApiConnection connection) {
this(new WikibaseDataFetcher(connection, Datamodel.SITE_WIKIDATA));
}
protected EntityCache(WikibaseDataFetcher fetcher) {
_fetcher = fetcher;
_cache = CacheBuilder.newBuilder().maximumSize(4096).expireAfterWrite(1, TimeUnit.HOURS)
.build(new CacheLoader<String, EntityDocument>() {
@ -69,7 +72,7 @@ public class EntityCache {
public static EntityCache getEntityCache() {
if (_entityCache == null) {
_entityCache = new EntityCache();
_entityCache = new EntityCache(BasicApiConnection.getWikidataApiConnection());
}
return _entityCache;
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -25,6 +25,7 @@ package org.openrefine.wikidata.qa;
import java.util.regex.Pattern;
import org.openrefine.wikidata.utils.EntityCacheStub;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -44,7 +45,7 @@ public class WikidataConstraintFetcherTests {
private PropertyIdValue child;
public WikidataConstraintFetcherTests() {
fetcher = new WikidataConstraintFetcher();
fetcher = new WikidataConstraintFetcher(new EntityCacheStub());
headOfGovernment = Datamodel.makeWikidataPropertyIdValue("P6");
startTime = Datamodel.makeWikidataPropertyIdValue("P580");
endTime = Datamodel.makeWikidataPropertyIdValue("P582");
@ -89,7 +90,7 @@ public class WikidataConstraintFetcherTests {
@Test
public void testSingleValue() {
Assert.assertFalse(fetcher.hasSingleValue(headOfGovernment));
Assert.assertTrue(fetcher.hasSingleValue(gridId));
Assert.assertTrue(fetcher.hasSingleValue(mother));
}
@Test

View File

@ -0,0 +1,41 @@
package org.openrefine.wikidata.utils;
import java.io.InputStream;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper;
import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl;
import org.wikidata.wdtk.datamodel.interfaces.EntityDocument;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.wikibaseapi.BasicApiConnection;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Stub of EntityCache class, to fetch entities from a local cache instead
* of the live site.
*
* @author Antonin Delpeuch
*/
public class EntityCacheStub extends EntityCache {
private ObjectMapper mapper = new DatamodelMapper(Datamodel.SITE_WIKIDATA);
public EntityCacheStub() {
super(BasicApiConnection.getWikidataApiConnection());
}
@Override
public EntityDocument get(EntityIdValue id) {
String filename = "entitycache/entitycache-"+id.getId()+".json";
InputStream stream = EntityCacheStub.class.getClassLoader().getResourceAsStream(filename);
try {
// TODO This should ideally be hidden in a helper:
// https://github.com/Wikidata/Wikidata-Toolkit/issues/471
return mapper.readValue(stream, EntityDocumentImpl.class);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}

View File

@ -0,0 +1,36 @@
package org.openrefine.wikidata.utils;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.io.IOException;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue;
import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher;
import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException;
public class EntityCacheTests {
@Test
public void testGet() throws MediaWikiApiErrorException, IOException {
WikibaseDataFetcher fetcher = mock(WikibaseDataFetcher.class);
PropertyIdValue id = Datamodel.makeWikidataPropertyIdValue("P42");
PropertyDocument doc = Datamodel.makePropertyDocument(id, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_GEO_SHAPE));
when(fetcher.getEntityDocument(id.getId())).thenReturn(doc);
EntityCache SUT = new EntityCache(fetcher);
Assert.assertEquals(SUT.get(id), doc);
// try another time, it is now cached
Assert.assertEquals(SUT.get(id), doc);
// the fetcher was only called once thanks to caching
verify(fetcher, times(1)).getEntityDocument(id.getId());
}
}