Make Wikibase extension compatible with other entity types than Items (#4289)

* Document clone command, and Java requirements

* Make extension compatible with other types

Fixes #4268: Refactor Wikibase extension methods for editing other entity
types than Items.
Add test case for property updates on existing entities.
This commit is contained in:
Joey 2021-11-16 17:35:01 +01:00 committed by GitHub
parent bd23966b09
commit 21fc4375bd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
60 changed files with 474 additions and 341 deletions

View File

@ -4,7 +4,7 @@ import java.util.List;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.qa.QAWarning.Severity;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
@ -16,7 +16,7 @@ public class PreviewResults {
protected Severity maxSeverity;
protected int nbWarnings;
protected int editCount;
protected List<ItemUpdate> editsPreview;
protected List<TermedStatementEntityUpdate> editsPreview;
@JsonProperty("warnings")
public List<QAWarning> getWarnings() {
@ -39,7 +39,7 @@ public class PreviewResults {
}
@JsonProperty("edits_preview")
public List<ItemUpdate> getEditsPreview() {
public List<TermedStatementEntityUpdate> getEditsPreview() {
return editsPreview;
}
@ -48,7 +48,7 @@ public class PreviewResults {
Severity maxSeverity,
int nbWarnings,
int editCount,
List<ItemUpdate> editsPreview) {
List<TermedStatementEntityUpdate> editsPreview) {
this.warnings = warnings;
this.maxSeverity = maxSeverity;
this.nbWarnings = nbWarnings;

View File

@ -40,7 +40,7 @@ import org.openrefine.wikidata.manifests.ManifestParser;
import org.openrefine.wikidata.qa.EditInspector;
import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.scheduler.WikibaseAPIUpdateScheduler;
import com.google.refine.browsing.Engine;
@ -103,7 +103,7 @@ public class PreviewWikibaseSchemaCommand extends Command {
// Evaluate project
Engine engine = getEngine(request, project);
List<ItemUpdate> editBatch = schema.evaluate(project, engine, warningStore);
List<TermedStatementEntityUpdate> editBatch = schema.evaluate(project, engine, warningStore);
// Inspect the edits and generate warnings
EditInspector inspector = new EditInspector(warningStore, manifest);
@ -111,10 +111,10 @@ public class PreviewWikibaseSchemaCommand extends Command {
// Dump the first 10 edits, scheduled with the default scheduler
WikibaseAPIUpdateScheduler scheduler = new WikibaseAPIUpdateScheduler();
List<ItemUpdate> nonNullEdits = scheduler.schedule(editBatch).stream()
List<TermedStatementEntityUpdate> nonNullEdits = scheduler.schedule(editBatch).stream()
.filter(e -> !e.isNull())
.collect(Collectors.toList());
List<ItemUpdate> firstEdits = nonNullEdits.stream()
List<TermedStatementEntityUpdate> firstEdits = nonNullEdits.stream()
.limit(10)
.collect(Collectors.toList());

View File

@ -32,13 +32,14 @@ import java.util.stream.Collectors;
import org.openrefine.wikidata.schema.entityvalues.ReconEntityIdValue;
import org.openrefine.wikidata.schema.exceptions.NewItemNotCreatedYetException;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.scheduler.WikibaseAPIUpdateScheduler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.EntityDocument;
import org.wikidata.wdtk.datamodel.interfaces.ItemDocument;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.wikibaseapi.WikibaseDataEditor;
import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher;
@ -58,12 +59,12 @@ public class EditBatchProcessor {
private WikibaseDataFetcher fetcher;
private WikibaseDataEditor editor;
private NewItemLibrary library;
private List<ItemUpdate> scheduled;
private List<TermedStatementEntityUpdate> scheduled;
private String summary;
private List<String> tags;
private List<ItemUpdate> remainingUpdates;
private List<ItemUpdate> currentBatch;
private List<TermedStatementEntityUpdate> remainingUpdates;
private List<TermedStatementEntityUpdate> currentBatch;
private int batchCursor;
private int globalCursor;
private Map<String, EntityDocument> currentDocs;
@ -90,7 +91,7 @@ public class EditBatchProcessor {
* the number of items that should be retrieved in one go from the
* API
*/
public EditBatchProcessor(WikibaseDataFetcher fetcher, WikibaseDataEditor editor, List<ItemUpdate> updates,
public EditBatchProcessor(WikibaseDataFetcher fetcher, WikibaseDataEditor editor, List<TermedStatementEntityUpdate> updates,
NewItemLibrary library, String summary, int maxLag, List<String> tags, int batchSize) {
this.fetcher = fetcher;
this.editor = editor;
@ -132,7 +133,7 @@ public class EditBatchProcessor {
if (batchCursor == currentBatch.size()) {
prepareNewBatch();
}
ItemUpdate update = currentBatch.get(batchCursor);
TermedStatementEntityUpdate update = currentBatch.get(batchCursor);
// Rewrite mentions to new items
ReconEntityRewriter rewriter = new ReconEntityRewriter(library, update.getItemId());
@ -150,7 +151,7 @@ public class EditBatchProcessor {
ReconEntityIdValue newCell = (ReconEntityIdValue) update.getItemId();
update = update.normalizeLabelsAndAliases();
ItemDocument itemDocument = Datamodel.makeItemDocument(update.getItemId(),
ItemDocument itemDocument = Datamodel.makeItemDocument((ItemIdValue) update.getItemId(),
update.getLabels().stream().collect(Collectors.toList()),
update.getDescriptions().stream().collect(Collectors.toList()),
update.getAliases().stream().collect(Collectors.toList()), update.getAddedStatementGroups(),

View File

@ -27,19 +27,24 @@ import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.openrefine.wikidata.schema.entityvalues.ReconEntityIdValue;
import org.openrefine.wikidata.schema.entityvalues.ReconItemIdValue;
import org.openrefine.wikidata.schema.entityvalues.ReconMediaInfoIdValue;
import org.openrefine.wikidata.schema.entityvalues.ReconPropertyIdValue;
import org.openrefine.wikidata.schema.exceptions.NewItemNotCreatedYetException;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.helpers.DatamodelConverter;
import org.wikidata.wdtk.datamodel.implementation.DataObjectFactoryImpl;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
/**
* A class that rewrites an {@link ItemUpdate}, replacing reconciled entity id
* A class that rewrites an {@link TermedStatementEntityUpdate}, replacing reconciled entity id
* values by their concrete values after creation of all the new items involved.
*
* If an item has not been created yet, an {@link IllegalArgumentException} will
@ -56,7 +61,7 @@ import org.wikidata.wdtk.datamodel.interfaces.Statement;
public class ReconEntityRewriter extends DatamodelConverter {
private final NewItemLibrary library;
private final ItemIdValue subject;
private final EntityIdValue subject;
protected static final String notCreatedYetMessage = "Trying to rewrite an update where a new item was not created yet.";
@ -69,7 +74,7 @@ public class ReconEntityRewriter extends DatamodelConverter {
* @param subject
* the subject id of the entity to rewrite
*/
public ReconEntityRewriter(NewItemLibrary library, ItemIdValue subject) {
public ReconEntityRewriter(NewItemLibrary library, EntityIdValue subject) {
super(new DataObjectFactoryImpl());
this.library = library;
this.subject = subject;
@ -83,7 +88,7 @@ public class ReconEntityRewriter extends DatamodelConverter {
String newId = library.getQid(recon.getReconInternalId());
if (newId == null) {
if (subject.equals(recon)) {
return subject;
return (ItemIdValue) subject;
} else {
throw new MissingEntityIdFound(recon);
}
@ -94,6 +99,43 @@ public class ReconEntityRewriter extends DatamodelConverter {
return super.copy(value);
}
public MediaInfoIdValue copy(MediaInfoIdValue value) {
if (value instanceof ReconMediaInfoIdValue) {
ReconMediaInfoIdValue recon = (ReconMediaInfoIdValue) value;
if (recon.isNew()) {
String newId = library.getQid(recon.getReconInternalId());
if (newId == null) {
if (subject.equals(recon)) {
return (MediaInfoIdValue) subject;
} else {
throw new MissingEntityIdFound(recon);
}
}
return Datamodel.makeMediaInfoIdValue(newId, recon.getRecon().identifierSpace);
}
}
return (MediaInfoIdValue) super.copy((ItemIdValue) value);
}
@Override
public PropertyIdValue copy(PropertyIdValue value) {
if (value instanceof ReconPropertyIdValue) {
ReconPropertyIdValue recon = (ReconPropertyIdValue) value;
if (recon.isNew()) {
String newId = library.getQid(recon.getReconInternalId());
if (newId == null) {
if (subject.equals(recon)) {
return (PropertyIdValue) subject;
} else {
throw new MissingEntityIdFound(recon);
}
}
return Datamodel.makePropertyIdValue(newId, recon.getRecon().identifierSpace);
}
}
return super.copy((PropertyIdValue) value);
}
/**
* Rewrite an update, replacing references to all entities already
* created by their fresh identifiers. The subject id might not have been
@ -107,9 +149,9 @@ public class ReconEntityRewriter extends DatamodelConverter {
* @throws NewItemNotCreatedYetException
* if any non-subject entity had not been created yet
*/
public ItemUpdate rewrite(ItemUpdate update) throws NewItemNotCreatedYetException {
public TermedStatementEntityUpdate rewrite(TermedStatementEntityUpdate update) throws NewItemNotCreatedYetException {
try {
ItemIdValue subject = copy(update.getItemId());
EntityIdValue subject = (EntityIdValue) copyValue(update.getItemId());
Set<MonolingualTextValue> labels = update.getLabels().stream().map(l -> copy(l)).collect(Collectors.toSet());
Set<MonolingualTextValue> labelsIfNew = update.getLabelsIfNew().stream().map(l -> copy(l)).collect(Collectors.toSet());
Set<MonolingualTextValue> descriptions = update.getDescriptions().stream().map(l -> copy(l))
@ -121,7 +163,7 @@ public class ReconEntityRewriter extends DatamodelConverter {
.collect(Collectors.toList());
Set<Statement> deletedStatements = update.getDeletedStatements().stream().map(l -> copy(l))
.collect(Collectors.toSet());
return new ItemUpdate(subject, addedStatements, deletedStatements, labels, labelsIfNew, descriptions, descriptionsIfNew, aliases);
return new TermedStatementEntityUpdate(subject, addedStatements, deletedStatements, labels, labelsIfNew, descriptions, descriptionsIfNew, aliases);
} catch(MissingEntityIdFound e) {
throw new NewItemNotCreatedYetException(e.value);
}

View File

@ -30,13 +30,13 @@ import java.util.Properties;
import java.util.Set;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.scheduler.ImpossibleSchedulingException;
import org.openrefine.wikidata.updates.scheduler.QuickStatementsUpdateScheduler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.datamodel.interfaces.Claim;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.Reference;
import org.wikidata.wdtk.datamodel.interfaces.Snak;
@ -93,16 +93,16 @@ public class QuickStatementsExporter implements WriterExporter {
*/
public void translateSchema(Project project, Engine engine, WikibaseSchema schema, Writer writer)
throws IOException {
List<ItemUpdate> items = schema.evaluate(project, engine);
List<TermedStatementEntityUpdate> items = schema.evaluate(project, engine);
translateItemList(items, writer);
}
public void translateItemList(List<ItemUpdate> updates, Writer writer)
public void translateItemList(List<TermedStatementEntityUpdate> updates, Writer writer)
throws IOException {
QuickStatementsUpdateScheduler scheduler = new QuickStatementsUpdateScheduler();
try {
List<ItemUpdate> scheduled = scheduler.schedule(updates);
for (ItemUpdate item : scheduled) {
List<TermedStatementEntityUpdate> scheduled = scheduler.schedule(updates);
for (TermedStatementEntityUpdate item : scheduled) {
translateItem(item, writer);
}
} catch (ImpossibleSchedulingException e) {
@ -111,7 +111,7 @@ public class QuickStatementsExporter implements WriterExporter {
}
protected void translateNameDescr(String qid, Set<MonolingualTextValue> values, String prefix, ItemIdValue id,
protected void translateNameDescr(String qid, Set<MonolingualTextValue> values, String prefix, EntityIdValue id,
Writer writer)
throws IOException {
for (MonolingualTextValue value : values) {
@ -124,7 +124,7 @@ public class QuickStatementsExporter implements WriterExporter {
}
}
protected void translateItem(ItemUpdate item, Writer writer)
protected void translateItem(TermedStatementEntityUpdate item, Writer writer)
throws IOException {
String qid = item.getItemId().getId();
if (item.isNew()) {

View File

@ -39,7 +39,7 @@ import org.openrefine.wikidata.commands.ConnectionManager;
import org.openrefine.wikidata.editing.EditBatchProcessor;
import org.openrefine.wikidata.editing.NewItemLibrary;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.util.WebResourceFetcherImpl;
@ -231,7 +231,7 @@ public class PerformWikibaseEditsOperation extends EngineDependentOperation {
}
// Evaluate the schema
List<ItemUpdate> itemDocuments = _schema.evaluate(_project, _engine);
List<TermedStatementEntityUpdate> itemDocuments = _schema.evaluate(_project, _engine);
// Prepare the edits
NewItemLibrary newItemLibrary = new NewItemLibrary();

View File

@ -26,7 +26,7 @@ package org.openrefine.wikidata.qa;
import org.openrefine.wikidata.manifests.Manifest;
import org.openrefine.wikidata.qa.scrutinizers.*;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.scheduler.WikibaseAPIUpdateScheduler;
import org.openrefine.wikidata.utils.EntityCache;
import org.slf4j.Logger;
@ -119,7 +119,7 @@ public class EditInspector {
*
* @param editBatch
*/
public void inspect(List<ItemUpdate> editBatch, WikibaseSchema schema) throws ExecutionException {
public void inspect(List<TermedStatementEntityUpdate> editBatch, WikibaseSchema schema) throws ExecutionException {
// First, schedule them with some scheduler,
// so that all newly created entities appear in the batch
SchemaPropertyExtractor fetcher = new SchemaPropertyExtractor();
@ -131,14 +131,14 @@ public class EditInspector {
WikibaseAPIUpdateScheduler scheduler = new WikibaseAPIUpdateScheduler();
editBatch = scheduler.schedule(editBatch);
Map<EntityIdValue, ItemUpdate> updates = ItemUpdate.groupBySubject(editBatch);
List<ItemUpdate> mergedUpdates = updates.values().stream().collect(Collectors.toList());
Map<EntityIdValue, TermedStatementEntityUpdate> updates = TermedStatementEntityUpdate.groupBySubject(editBatch);
List<TermedStatementEntityUpdate> mergedUpdates = updates.values().stream().collect(Collectors.toList());
for (EditScrutinizer scrutinizer : scrutinizers.values()) {
scrutinizer.batchIsBeginning();
}
for(ItemUpdate update : mergedUpdates) {
for(TermedStatementEntityUpdate update : mergedUpdates) {
if(!update.isNull()) {
for (EditScrutinizer scrutinizer : scrutinizers.values()) {
scrutinizer.scrutinize(update);

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import java.util.Set;
@ -15,14 +15,14 @@ public class CommonDescriptionScrutinizer extends DescriptionScrutinizer {
public static final String descIdenticalWithLabel = "item-description-identical-with-label";
@Override
public void scrutinize(ItemUpdate update, String descText, String lang) {
public void scrutinize(TermedStatementEntityUpdate update, String descText, String lang) {
checkLength(update, descText, lang);
checkLabel(update, descText, lang);
}
// Descriptions are not full sentences, but small bits of information.
// In most cases, the proper length is between two and twelve words.
protected void checkLength(ItemUpdate update, String descText, String lang) {
protected void checkLength(TermedStatementEntityUpdate update, String descText, String lang) {
final int maxLength = 250;
if (descText.length() > maxLength) {
QAWarning issue = new QAWarning(descTooLongType, null, QAWarning.Severity.CRITICAL, 1);
@ -36,7 +36,7 @@ public class CommonDescriptionScrutinizer extends DescriptionScrutinizer {
}
// Description are expected to be more specific than labels.
protected void checkLabel(ItemUpdate update, String descText, String lang) {
protected void checkLabel(TermedStatementEntityUpdate update, String descText, String lang) {
Set<MonolingualTextValue> labels = update.getLabels();
labels.addAll(update.getLabelsIfNew()); // merge
for (MonolingualTextValue label : labels) {

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Snak;
import org.wikidata.wdtk.datamodel.interfaces.SnakGroup;
@ -56,7 +56,7 @@ public class ConflictsWithScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
Map<PropertyIdValue, Set<Value>> propertyIdValueValueMap = new HashMap<>();
for (Statement statement : update.getAddedStatements()){
PropertyIdValue pid = statement.getClaim().getMainSnak().getPropertyId();

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import java.util.Set;
@ -11,7 +11,7 @@ import java.util.Set;
public abstract class DescriptionScrutinizer extends EditScrutinizer {
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
Set<MonolingualTextValue> descriptions = update.getDescriptions();
descriptions.addAll(update.getDescriptionsIfNew()); // merge
for (MonolingualTextValue description : descriptions) {
@ -28,6 +28,6 @@ public abstract class DescriptionScrutinizer extends EditScrutinizer {
}
}
public abstract void scrutinize(ItemUpdate update, String descText, String lang);
public abstract void scrutinize(TermedStatementEntityUpdate update, String descText, String lang);
}

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.*;
import java.util.HashMap;
@ -50,7 +50,7 @@ public class DifferenceWithinRangeScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
Map<PropertyIdValue, Value> propertyIdValueValueMap = new HashMap<>();
for (Statement statement : update.getAddedStatements()){
Snak mainSnak = statement.getClaim().getMainSnak();

View File

@ -28,7 +28,7 @@ import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.qa.QAWarning.Severity;
import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.Snak;
import org.wikidata.wdtk.datamodel.interfaces.SnakGroup;
import org.wikidata.wdtk.datamodel.interfaces.Value;
@ -89,7 +89,7 @@ public abstract class EditScrutinizer {
* @param edit:
* the list of ItemUpdates to scrutinize
*/
public abstract void scrutinize(ItemUpdate edit);
public abstract void scrutinize(TermedStatementEntityUpdate edit);
/**
* Method called once the edit batch has been read entirely

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
/**
* @author Lu Liu
@ -15,7 +15,7 @@ public class EnglishDescriptionScrutinizer extends DescriptionScrutinizer {
private static final String LANG = "en";
@Override
public void scrutinize(ItemUpdate update, String descText, String lang) {
public void scrutinize(TermedStatementEntityUpdate update, String descText, String lang) {
if (!LANG.equalsIgnoreCase(lang)) {
return;
}
@ -26,7 +26,7 @@ public class EnglishDescriptionScrutinizer extends DescriptionScrutinizer {
}
// Description are not sentences, so the punctuation sign at the end should be avoided.
protected void checkPunctuationSign(ItemUpdate update, String descText) {
protected void checkPunctuationSign(TermedStatementEntityUpdate update, String descText) {
assert descText.length() > 0;
final String punctuationSigns = ".?!;:,'\"";
@ -42,7 +42,7 @@ public class EnglishDescriptionScrutinizer extends DescriptionScrutinizer {
}
// Descriptions begin with a lowercase letter except when uppercase would normally be required or expected.
protected void checkUppercase(ItemUpdate update, String descText) {
protected void checkUppercase(TermedStatementEntityUpdate update, String descText) {
assert descText.length() > 0;
char first = descText.charAt(0);
@ -57,7 +57,7 @@ public class EnglishDescriptionScrutinizer extends DescriptionScrutinizer {
}
// Descriptions should not normally begin with initial articles ("a", "an", "the").
protected void checkArticle(ItemUpdate update, String descText) {
protected void checkArticle(TermedStatementEntityUpdate update, String descText) {
assert descText.length() > 0;
String firstWord = descText.split("\\s")[0].toLowerCase();

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Snak;
import org.wikidata.wdtk.datamodel.interfaces.SnakGroup;
@ -61,7 +61,7 @@ public class ItemRequiresScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
Map<PropertyIdValue, Set<Value>> propertyIdValueValueMap = new HashMap<>();
for (Statement statement : update.getAddedStatements()) {
Snak mainSnak = statement.getClaim().getMainSnak();

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -22,7 +22,7 @@ public class MultiValueScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
Map<PropertyIdValue, Integer> propertyCount = new HashMap<>();
for (Statement statement : update.getAddedStatements()) {

View File

@ -24,7 +24,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.StatementGroup;
/**
@ -46,7 +46,7 @@ public class NewItemScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
if (update.isNew()) {
info(newItemType);

View File

@ -23,7 +23,7 @@
******************************************************************************/
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
public class NoEditsMadeScrutinizer extends EditScrutinizer {
@ -42,7 +42,7 @@ public class NoEditsMadeScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate edit) {
public void scrutinize(TermedStatementEntityUpdate edit) {
nonNullUpdateSeen = true;
}

View File

@ -24,7 +24,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -56,7 +56,7 @@ public class SingleValueScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
Set<PropertyIdValue> seenSingleProperties = new HashSet<>();
for (Statement statement : update.getAddedStatements()) {

View File

@ -23,14 +23,14 @@
******************************************************************************/
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
public abstract class StatementScrutinizer extends EditScrutinizer {
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
EntityIdValue currentEntityId = update.getItemId();
for (Statement statement : update.getAddedStatements()) {
scrutinize(statement, currentEntityId, true);

View File

@ -24,7 +24,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Reference;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -44,7 +44,7 @@ public class UnsourcedScrutinizer extends EditScrutinizer {
public static final String constraintItemType = "no-references-provided";
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
for (Statement statement : update.getAddedStatements()) {
PropertyIdValue pid = statement.getClaim().getMainSnak().getPropertyId();
List<Statement> constraintDefinitions = _fetcher.getConstraintsByType(pid, citationNeededConstraintQid);

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Snak;
@ -55,7 +55,7 @@ public class UseAsQualifierScrutinizer extends EditScrutinizer {
}
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
for (Statement statement : update.getAddedStatements()) {
PropertyIdValue pid = statement.getClaim().getMainSnak().getPropertyId();
Map<PropertyIdValue, List<Value>> qualifiersMap = new HashMap<>();

View File

@ -23,7 +23,7 @@
******************************************************************************/
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.Snak;
@ -39,7 +39,7 @@ import org.wikidata.wdtk.datamodel.interfaces.ValueSnak;
public abstract class ValueScrutinizer extends SnakScrutinizer {
@Override
public void scrutinize(ItemUpdate update) {
public void scrutinize(TermedStatementEntityUpdate update) {
super.scrutinize(update);
for (MonolingualTextValue label : update.getLabels()) {

View File

@ -28,7 +28,7 @@ import java.util.List;
import org.jsoup.helper.Validate;
import org.openrefine.wikidata.schema.exceptions.SkipSchemaExpressionException;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -47,7 +47,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonTypeInfo(use = JsonTypeInfo.Id.NONE)
public class WbItemDocumentExpr implements WbExpression<ItemUpdate> {
public class WbItemDocumentExpr implements WbExpression<TermedStatementEntityUpdate> {
private WbExpression<? extends ItemIdValue> subject;
private List<WbNameDescExpr> nameDescs;
@ -70,7 +70,7 @@ public class WbItemDocumentExpr implements WbExpression<ItemUpdate> {
}
@Override
public ItemUpdate evaluate(ExpressionContext ctxt)
public TermedStatementEntityUpdate evaluate(ExpressionContext ctxt)
throws SkipSchemaExpressionException {
ItemIdValue subjectId = getSubject().evaluate(ctxt);
ItemUpdateBuilder update = new ItemUpdateBuilder(subjectId);

View File

@ -30,7 +30,7 @@ import java.util.List;
import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.schema.exceptions.SkipSchemaExpressionException;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.wikibaseapi.ApiConnection;
@ -116,8 +116,8 @@ public class WikibaseSchema implements OverlayModel {
* the context in which the schema should be evaluated.
* @return
*/
public List<ItemUpdate> evaluateItemDocuments(ExpressionContext ctxt) {
List<ItemUpdate> result = new ArrayList<>();
public List<TermedStatementEntityUpdate> evaluateItemDocuments(ExpressionContext ctxt) {
List<TermedStatementEntityUpdate> result = new ArrayList<>();
for (WbItemDocumentExpr expr : itemDocumentExprs) {
try {
@ -146,8 +146,8 @@ public class WikibaseSchema implements OverlayModel {
* a store in which issues will be emitted
* @return item updates are stored in their generating order (not merged yet).
*/
public List<ItemUpdate> evaluate(Project project, Engine engine, QAWarningStore warningStore) {
List<ItemUpdate> result = new ArrayList<>();
public List<TermedStatementEntityUpdate> evaluate(Project project, Engine engine, QAWarningStore warningStore) {
List<TermedStatementEntityUpdate> result = new ArrayList<>();
FilteredRows filteredRows = engine.getAllFilteredRows();
filteredRows.accept(project, new EvaluatingRowVisitor(result, warningStore));
return result;
@ -156,16 +156,16 @@ public class WikibaseSchema implements OverlayModel {
/**
* Same as above, ignoring any warnings.
*/
public List<ItemUpdate> evaluate(Project project, Engine engine) {
public List<TermedStatementEntityUpdate> evaluate(Project project, Engine engine) {
return evaluate(project, engine, null);
}
protected class EvaluatingRowVisitor implements RowVisitor {
private List<ItemUpdate> result;
private List<TermedStatementEntityUpdate> result;
private QAWarningStore warningStore;
public EvaluatingRowVisitor(List<ItemUpdate> result, QAWarningStore warningStore) {
public EvaluatingRowVisitor(List<TermedStatementEntityUpdate> result, QAWarningStore warningStore) {
this.result = result;
this.warningStore = warningStore;
}

View File

@ -0,0 +1,41 @@
/*******************************************************************************
* MIT License
*
* Copyright (c) 2018 Antonin Delpeuch
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package org.openrefine.wikidata.schema.entityvalues;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue;
import com.google.refine.model.Recon;
public class ReconMediaInfoIdValue extends ReconEntityIdValue implements MediaInfoIdValue {
public ReconMediaInfoIdValue(Recon recon, String cellValue) {
super(recon, cellValue);
}
@Override
public String getEntityType() {
return EntityIdValue.ET_MEDIA_INFO;
}
}

View File

@ -29,6 +29,7 @@ import java.util.List;
import java.util.Set;
import org.jsoup.helper.Validate;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -41,7 +42,7 @@ import org.wikidata.wdtk.datamodel.interfaces.Statement;
*/
public class ItemUpdateBuilder {
private ItemIdValue qid;
private EntityIdValue qid;
private List<Statement> addedStatements;
private Set<Statement> deletedStatements;
private Set<MonolingualTextValue> labels;
@ -58,7 +59,7 @@ public class ItemUpdateBuilder {
* the subject of the document. It can be a reconciled item value for
* new items.
*/
public ItemUpdateBuilder(ItemIdValue qid) {
public ItemUpdateBuilder(EntityIdValue qid) {
Validate.notNull(qid);
this.qid = qid;
this.addedStatements = new ArrayList<>();
@ -224,9 +225,9 @@ public class ItemUpdateBuilder {
*
* @return
*/
public ItemUpdate build() {
public TermedStatementEntityUpdate build() {
built = true;
return new ItemUpdate(qid, addedStatements, deletedStatements, labels, labelsIfNew, descriptions, descriptionsIfNew, aliases);
return new TermedStatementEntityUpdate(qid, addedStatements, deletedStatements, labels, labelsIfNew, descriptions, descriptionsIfNew, aliases);
}
}

View File

@ -55,9 +55,9 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*
* @author Antonin Delpeuch
*/
public class ItemUpdate {
public class TermedStatementEntityUpdate {
private final ItemIdValue qid;
private final EntityIdValue qid;
private final List<Statement> addedStatements;
private final Set<Statement> deletedStatements;
private final Map<String, MonolingualTextValue> labels;
@ -70,7 +70,7 @@ public class ItemUpdate {
* Constructor.
*
* @param qid
* the subject of the document. It can be a reconciled item value for
* the subject of the document. It can be a reconciled entity value for
* new items.
* @param addedStatements
* the statements to add on the item. They should be distinct. They
@ -91,7 +91,7 @@ public class ItemUpdate {
* so this is just kept as a set for simplicity.
*/
@JsonCreator
public ItemUpdate(@JsonProperty("subject") ItemIdValue qid,
public TermedStatementEntityUpdate(@JsonProperty("subject") EntityIdValue qid,
@JsonProperty("addedStatements") List<Statement> addedStatements,
@JsonProperty("deletedStatements") Set<Statement> deletedStatements,
@JsonProperty("labels") Set<MonolingualTextValue> labels,
@ -143,8 +143,8 @@ public class ItemUpdate {
* @param aliases
* the aliases to add
*/
private ItemUpdate(
ItemIdValue qid,
private TermedStatementEntityUpdate(
EntityIdValue qid,
List<Statement> addedStatements,
Set<Statement> deletedStatements,
Map<String, MonolingualTextValue> labels,
@ -166,7 +166,7 @@ public class ItemUpdate {
* @return the subject of the item
*/
@JsonProperty("subject")
public ItemIdValue getItemId() {
public EntityIdValue getItemId() {
return qid;
}
@ -260,7 +260,7 @@ public class ItemUpdate {
* @param other
* the other change that should be merged
*/
public ItemUpdate merge(ItemUpdate other) {
public TermedStatementEntityUpdate merge(TermedStatementEntityUpdate other) {
Validate.isTrue(qid.equals(other.getItemId()));
List<Statement> newAddedStatements = new ArrayList<>(addedStatements);
for (Statement statement : other.getAddedStatements()) {
@ -287,7 +287,7 @@ public class ItemUpdate {
aliases.add(alias);
}
}
return new ItemUpdate(qid, newAddedStatements, newDeletedStatements, newLabels, newLabelsIfNew, newDescriptions, newDescriptionsIfNew, newAliases);
return new TermedStatementEntityUpdate(qid, newAddedStatements, newDeletedStatements, newLabels, newLabelsIfNew, newDescriptions, newDescriptionsIfNew, newAliases);
}
/**
@ -325,22 +325,22 @@ public class ItemUpdate {
}
/**
* Group a list of ItemUpdates by subject: this is useful to make one single
* Group a list of TermedStatementEntityUpdates by subject: this is useful to make one single
* edit per item.
*
* @param itemDocuments
* @return a map from item ids to merged ItemUpdate for that id
* @return a map from item ids to merged TermedStatementEntityUpdate for that id
*/
public static Map<EntityIdValue, ItemUpdate> groupBySubject(List<ItemUpdate> itemDocuments) {
Map<EntityIdValue, ItemUpdate> map = new HashMap<>();
for (ItemUpdate update : itemDocuments) {
public static Map<EntityIdValue, TermedStatementEntityUpdate> groupBySubject(List<TermedStatementEntityUpdate> itemDocuments) {
Map<EntityIdValue, TermedStatementEntityUpdate> map = new HashMap<>();
for (TermedStatementEntityUpdate update : itemDocuments) {
if (update.isNull()) {
continue;
}
ItemIdValue qid = update.getItemId();
EntityIdValue qid = update.getItemId();
if (map.containsKey(qid)) {
ItemUpdate oldUpdate = map.get(qid);
TermedStatementEntityUpdate oldUpdate = map.get(qid);
map.put(qid, oldUpdate.merge(update));
} else {
map.put(qid, update);
@ -361,7 +361,7 @@ public class ItemUpdate {
* This should only be used when creating a new item. This ensures that we never
* add an alias without adding a label in the same language.
*/
public ItemUpdate normalizeLabelsAndAliases() {
public TermedStatementEntityUpdate normalizeLabelsAndAliases() {
// Ensure that we are only adding aliases with labels
Set<MonolingualTextValue> filteredAliases = new HashSet<>();
Map<String, MonolingualTextValue> newLabels = new HashMap<>(labelsIfNew);
@ -375,17 +375,17 @@ public class ItemUpdate {
}
Map<String, MonolingualTextValue> newDescriptions = new HashMap<>(descriptionsIfNew);
newDescriptions.putAll(descriptions);
return new ItemUpdate(qid, addedStatements, deletedStatements,
return new TermedStatementEntityUpdate(qid, addedStatements, deletedStatements,
newLabels, Collections.emptyMap(), newDescriptions, Collections.emptyMap(),
constructTermListMap(filteredAliases));
}
@Override
public boolean equals(Object other) {
if (other == null || !ItemUpdate.class.isInstance(other)) {
if (other == null || !TermedStatementEntityUpdate.class.isInstance(other)) {
return false;
}
ItemUpdate otherUpdate = (ItemUpdate) other;
TermedStatementEntityUpdate otherUpdate = (TermedStatementEntityUpdate) other;
return qid.equals(otherUpdate.getItemId()) && addedStatements.equals(otherUpdate.getAddedStatements())
&& deletedStatements.equals(otherUpdate.getDeletedStatements())
&& getLabels().equals(otherUpdate.getLabels())

View File

@ -32,7 +32,7 @@ import java.util.Map.Entry;
import java.util.Set;
import org.openrefine.wikidata.schema.entityvalues.ReconItemIdValue;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -62,7 +62,7 @@ public class QuickStatementsUpdateScheduler implements UpdateScheduler {
* @throws ImpossibleSchedulingException
* if two new item ids are referred to in the same statement
*/
protected void splitUpdate(ItemUpdate update)
protected void splitUpdate(TermedStatementEntityUpdate update)
throws ImpossibleSchedulingException {
ItemUpdateBuilder remainingUpdateBuilder = new ItemUpdateBuilder(update.getItemId())
.addLabels(update.getLabels(), true)
@ -93,13 +93,13 @@ public class QuickStatementsUpdateScheduler implements UpdateScheduler {
}
// Add the update that is not referring to anything to the schedule
ItemUpdate pointerFree = remainingUpdateBuilder.build();
TermedStatementEntityUpdate pointerFree = remainingUpdateBuilder.build();
if (!pointerFree.isNull()) {
pointerFreeUpdates.add(pointerFree);
}
// Add the other updates to the map
for (Entry<ItemIdValue, ItemUpdateBuilder> entry : referencingUpdates.entrySet()) {
ItemUpdate pointerUpdate = entry.getValue().build();
TermedStatementEntityUpdate pointerUpdate = entry.getValue().build();
UpdateSequence pointerUpdatesForKey = pointerUpdates.get(entry.getKey());
if (pointerUpdatesForKey == null) {
pointerUpdatesForKey = new UpdateSequence();
@ -110,19 +110,19 @@ public class QuickStatementsUpdateScheduler implements UpdateScheduler {
}
@Override
public List<ItemUpdate> schedule(List<ItemUpdate> updates)
public List<TermedStatementEntityUpdate> schedule(List<TermedStatementEntityUpdate> updates)
throws ImpossibleSchedulingException {
pointerUpdates = new HashMap<>();
pointerFreeUpdates = new UpdateSequence();
for (ItemUpdate update : updates) {
for (TermedStatementEntityUpdate update : updates) {
splitUpdate(update);
}
// Reconstruct
List<ItemUpdate> fullSchedule = new ArrayList<>();
List<TermedStatementEntityUpdate> fullSchedule = new ArrayList<>();
Set<ItemIdValue> mentionedNewEntities = new HashSet<>(pointerUpdates.keySet());
for (ItemUpdate update : pointerFreeUpdates.getUpdates()) {
for (TermedStatementEntityUpdate update : pointerFreeUpdates.getUpdates()) {
fullSchedule.add(update);
UpdateSequence backPointers = pointerUpdates.get(update.getItemId());
if (backPointers != null) {

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.updates.scheduler;
import java.util.List;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
/**
* A scheduling strategy for item updates. Given a list of initial updates, the
@ -48,6 +48,6 @@ public interface UpdateScheduler {
* @throws ImpossibleSchedulingException
* when the scheduler cannot cope with a particular edit plan.
*/
public List<ItemUpdate> schedule(List<ItemUpdate> updates)
public List<TermedStatementEntityUpdate> schedule(List<TermedStatementEntityUpdate> updates)
throws ImpossibleSchedulingException;
}

View File

@ -29,7 +29,8 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
/**
@ -43,7 +44,7 @@ public class UpdateSequence {
/**
* The list of updates stored by this container
*/
private List<ItemUpdate> updates = new ArrayList<>();
private List<TermedStatementEntityUpdate> updates = new ArrayList<>();
/**
* An index to keep track of where each item is touched in the sequence
*/
@ -55,14 +56,14 @@ public class UpdateSequence {
*
* @param update
*/
public void add(ItemUpdate update) {
ItemIdValue subject = update.getItemId();
public void add(TermedStatementEntityUpdate update) {
EntityIdValue subject = update.getItemId();
if (index.containsKey(subject)) {
int i = index.get(subject);
ItemUpdate oldUpdate = updates.get(i);
TermedStatementEntityUpdate oldUpdate = updates.get(i);
updates.set(i, oldUpdate.merge(update));
} else {
index.put(subject, updates.size());
index.put((ItemIdValue) subject, updates.size());
updates.add(update);
}
}
@ -70,7 +71,7 @@ public class UpdateSequence {
/**
* @return the list of merged updates
*/
public List<ItemUpdate> getUpdates() {
public List<TermedStatementEntityUpdate> getUpdates() {
return updates;
}

View File

@ -30,7 +30,7 @@ import java.util.Set;
import java.util.stream.Collectors;
import org.openrefine.wikidata.schema.entityvalues.ReconItemIdValue;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -67,13 +67,13 @@ public class WikibaseAPIUpdateScheduler implements UpdateScheduler {
private PointerExtractor extractor = new PointerExtractor();
@Override
public List<ItemUpdate> schedule(List<ItemUpdate> updates) {
List<ItemUpdate> result = new ArrayList<>();
public List<TermedStatementEntityUpdate> schedule(List<TermedStatementEntityUpdate> updates) {
List<TermedStatementEntityUpdate> result = new ArrayList<>();
pointerFreeUpdates = new UpdateSequence();
pointerFullUpdates = new UpdateSequence();
allPointers = new HashSet<>();
for (ItemUpdate update : updates) {
for (TermedStatementEntityUpdate update : updates) {
splitUpdate(update);
}
@ -97,7 +97,7 @@ public class WikibaseAPIUpdateScheduler implements UpdateScheduler {
*
* @param update
*/
protected void splitUpdate(ItemUpdate update) {
protected void splitUpdate(TermedStatementEntityUpdate update) {
ItemUpdateBuilder pointerFreeBuilder = new ItemUpdateBuilder(update.getItemId())
.addLabels(update.getLabels(), true)
.addLabels(update.getLabelsIfNew(), false)
@ -120,11 +120,11 @@ public class WikibaseAPIUpdateScheduler implements UpdateScheduler {
if (update.isNew()) {
// If the update is new, we might need to split it
// in two (if it refers to any other new entity).
ItemUpdate pointerFree = pointerFreeBuilder.build();
TermedStatementEntityUpdate pointerFree = pointerFreeBuilder.build();
if (!pointerFree.isNull()) {
pointerFreeUpdates.add(pointerFree);
}
ItemUpdate pointerFull = pointerFullBuilder.build();
TermedStatementEntityUpdate pointerFull = pointerFullBuilder.build();
if (!pointerFull.isEmpty()) {
pointerFullUpdates.add(pointerFull);
}

View File

@ -39,7 +39,7 @@ import java.util.stream.Collectors;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.testing.WikidataRefineTest;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -74,7 +74,7 @@ public class EditBatchProcessorTest extends WikidataRefineTest {
@Test
public void testNewItem()
throws InterruptedException, MediaWikiApiErrorException, IOException {
List<ItemUpdate> batch = new ArrayList<>();
List<TermedStatementEntityUpdate> batch = new ArrayList<>();
batch.add(new ItemUpdateBuilder(TestingData.existingId)
.addAlias(Datamodel.makeMonolingualTextValue("my new alias", "en"))
.addStatement(TestingData.generateStatement(TestingData.existingId, TestingData.newIdA)).build());
@ -122,7 +122,7 @@ public class EditBatchProcessorTest extends WikidataRefineTest {
}
List<ItemIdValue> qids = ids.stream().map(e -> Datamodel.makeWikidataItemIdValue(e))
.collect(Collectors.toList());
List<ItemUpdate> batch = qids.stream()
List<TermedStatementEntityUpdate> batch = qids.stream()
.map(qid -> new ItemUpdateBuilder(qid).addDescription(description, true).build())
.collect(Collectors.toList());

View File

@ -27,18 +27,20 @@ import static org.testng.Assert.assertEquals;
import org.openrefine.wikidata.schema.exceptions.NewItemNotCreatedYetException;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
public class ReconEntityRewriterTest {
NewItemLibrary library = null;
ReconEntityRewriter rewriter = null;
ItemIdValue newlyCreated = Datamodel.makeWikidataItemIdValue("Q1234");
PropertyIdValue newlyCreatedProperty = Datamodel.makeWikidataPropertyIdValue("P1234");
@BeforeMethod
public void setUp() {
@ -84,14 +86,14 @@ public class ReconEntityRewriterTest {
ItemIdValue subject = TestingData.newIdA;
rewriter = new ReconEntityRewriter(library, subject);
library.setQid(4567L, "Q1234");
ItemUpdate update = new ItemUpdateBuilder(subject)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(subject)
.addStatement(TestingData.generateStatement(subject, TestingData.newIdB))
.deleteStatement(TestingData.generateStatement(subject, TestingData.existingId))
.addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true)
.addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false)
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
ItemUpdate rewritten = rewriter.rewrite(update);
ItemUpdate expected = new ItemUpdateBuilder(subject)
TermedStatementEntityUpdate rewritten = rewriter.rewrite(update);
TermedStatementEntityUpdate expected = new ItemUpdateBuilder(subject)
.addStatement(TestingData.generateStatement(subject, newlyCreated))
.deleteStatement(TestingData.generateStatement(subject, TestingData.existingId))
.addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true)
@ -105,11 +107,11 @@ public class ReconEntityRewriterTest {
ItemIdValue subject = TestingData.newIdA;
rewriter = new ReconEntityRewriter(library, subject);
library.setQid(4567L, "Q1234");
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdB)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdB)
.addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false)
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
ItemUpdate rewritten = rewriter.rewrite(update);
ItemUpdate expected = new ItemUpdateBuilder(newlyCreated)
TermedStatementEntityUpdate rewritten = rewriter.rewrite(update);
TermedStatementEntityUpdate expected = new ItemUpdateBuilder(newlyCreated)
.addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false)
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
assertEquals(rewritten, expected);
@ -120,14 +122,14 @@ public class ReconEntityRewriterTest {
ItemIdValue subject = TestingData.matchedId;
rewriter = new ReconEntityRewriter(library, subject);
library.setQid(4567L, "Q1234");
ItemUpdate update = new ItemUpdateBuilder(subject)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(subject)
.addStatement(TestingData.generateStatement(subject, TestingData.newIdB))
.deleteStatement(TestingData.generateStatement(subject, TestingData.existingId))
.addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true)
.addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false)
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
ItemUpdate rewritten = rewriter.rewrite(update);
ItemUpdate expected = new ItemUpdateBuilder(subject)
TermedStatementEntityUpdate rewritten = rewriter.rewrite(update);
TermedStatementEntityUpdate expected = new ItemUpdateBuilder(subject)
.addStatement(TestingData.generateStatement(subject, newlyCreated))
.deleteStatement(TestingData.generateStatement(subject, TestingData.existingId))
.addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true)
@ -135,4 +137,25 @@ public class ReconEntityRewriterTest {
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
assertEquals(rewritten, expected);
}
@Test
public void testRewritePropertyUpdateOnExistingEntity() throws NewItemNotCreatedYetException {
PropertyIdValue subject = TestingData.matchedPropertyID;
rewriter = new ReconEntityRewriter(library, subject);
library.setQid(7654L, "P1234");
TermedStatementEntityUpdate update = new ItemUpdateBuilder(subject)
.addStatement(TestingData.generateStatement(subject, TestingData.newPropertyIdB))
.deleteStatement(TestingData.generateStatement(subject, TestingData.existingPropertyId))
.addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true)
.addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false)
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
TermedStatementEntityUpdate rewritten = rewriter.rewrite(update);
TermedStatementEntityUpdate expected = new ItemUpdateBuilder(subject)
.addStatement(TestingData.generateStatement(subject, newlyCreatedProperty))
.deleteStatement(TestingData.generateStatement(subject, TestingData.existingPropertyId))
.addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true)
.addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false)
.addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
assertEquals(rewritten, expected);
}
}

View File

@ -34,7 +34,7 @@ import java.util.Properties;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.testing.WikidataRefineTest;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -51,7 +51,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
private ItemIdValue qid1 = Datamodel.makeWikidataItemIdValue("Q1377");
private ItemIdValue qid2 = Datamodel.makeWikidataItemIdValue("Q865528");
private String export(ItemUpdate... itemUpdates)
private String export(TermedStatementEntityUpdate... itemUpdates)
throws IOException {
StringWriter writer = new StringWriter();
exporter.translateItemList(Arrays.asList(itemUpdates), writer);
@ -78,7 +78,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
public void testImpossibleScheduling()
throws IOException {
Statement sNewAtoNewB = TestingData.generateStatement(newIdA, newIdB);
ItemUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewB).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewB).build();
assertEquals(QuickStatementsExporter.impossibleSchedulingErrorMessage, export(update));
}
@ -90,7 +90,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
* Adding labels and description without overriding is not supported by QS, so
* we fall back on adding them with overriding.
*/
ItemUpdate update = new ItemUpdateBuilder(qid1)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(qid1)
.addLabel(Datamodel.makeMonolingualTextValue("some label", "en"), true)
.addDescription(Datamodel.makeMonolingualTextValue("some description", "en"), true)
.build();
@ -101,7 +101,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
@Test
public void testOptionalNameDesc()
throws IOException {
ItemUpdate update = new ItemUpdateBuilder(newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(newIdA)
.addLabel(Datamodel.makeMonolingualTextValue("my new item", "en"), false)
.addDescription(Datamodel.makeMonolingualTextValue("isn't it awesome?", "en"), false)
.addAlias(Datamodel.makeMonolingualTextValue("fabitem", "en")).build();
@ -113,7 +113,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
@Test
public void testDeleteStatement()
throws IOException {
ItemUpdate update = new ItemUpdateBuilder(qid1).deleteStatement(TestingData.generateStatement(qid1, qid2))
TermedStatementEntityUpdate update = new ItemUpdateBuilder(qid1).deleteStatement(TestingData.generateStatement(qid1, qid2))
.build();
assertEquals("- Q1377\tP38\tQ865528\n", export(update));
@ -129,7 +129,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
Claim claim = Datamodel.makeClaim(qid1, baseStatement.getClaim().getMainSnak(),
Collections.singletonList(group));
Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
ItemUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
assertEquals("Q1377\tP38\tQ865528\tP38\tQ1377\n", export(update));
}
@ -141,7 +141,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
Claim claim = Datamodel.makeClaim(qid1, Datamodel.makeSomeValueSnak(pid), Collections.emptyList());
Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
ItemUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
assertEquals("Q1377\tP123\tsomevalue\n", export(update));
}
@ -153,7 +153,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
Claim claim = Datamodel.makeClaim(qid1, Datamodel.makeNoValueSnak(pid), Collections.emptyList());
Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
ItemUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
assertEquals("Q1377\tP123\tnovalue\n", export(update));
}
@ -181,7 +181,7 @@ public class QuickStatementsExporterTest extends WikidataRefineTest {
Reference reference2 = Datamodel.makeReference(Collections.singletonList(group2));
Statement statement = Datamodel.makeStatement(claim, Arrays.asList(reference1, reference2), StatementRank.NORMAL, "");
ItemUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(qid1).addStatement(statement).build();
assertEquals("Q1377\tP38\tQ865528\tP38\tQ1377\tS38\tQ865528\n" +
"Q1377\tP38\tQ865528\tP38\tQ1377\tS38\tQ1377\n", export(update));

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -16,7 +16,7 @@ public class CommonDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testGoodDesc() {
String description = "good description";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();
scrutinize(update);
@ -29,7 +29,7 @@ public class CommonDescriptionScrutinizerTest extends ScrutinizerTest {
+ "long description long description long description long description "
+ "long description long description long description long description "
+ "long description long description long description long description";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();
scrutinize(update);
@ -39,7 +39,7 @@ public class CommonDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testIdenticalWithLabel() {
String description = "identical with label";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.addLabel(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();
@ -50,7 +50,7 @@ public class CommonDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testIdenticalWithLabel1() {
String description = "identical with label";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr"), true)
.build();
@ -64,7 +64,7 @@ public class CommonDescriptionScrutinizerTest extends ScrutinizerTest {
+ "long description long description long description long description "
+ "long description long description long description long description "
+ "long description long description long description long description";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.addLabel(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -60,7 +60,7 @@ public class ConflictsWithScrutinizerTest extends ScrutinizerTest {
Statement statement1 = new StatementImpl("P2002", value1,idA);
Statement statement2 = new StatementImpl("P31", value2,idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
Snak snak1 = Datamodel.makeValueSnak(propertyParameterPID, conflictingPropertyValue1);
Snak snak2 = Datamodel.makeValueSnak(itemParameterPID, conflictingItemValue1);
@ -87,7 +87,7 @@ public class ConflictsWithScrutinizerTest extends ScrutinizerTest {
Statement statement = new StatementImpl("P2002", value,id);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
Snak snak1 = Datamodel.makeValueSnak(propertyParameterPID, conflictingPropertyValue1);
Snak snak2 = Datamodel.makeValueSnak(itemParameterPID, conflictingItemValue1);
@ -116,7 +116,7 @@ public class ConflictsWithScrutinizerTest extends ScrutinizerTest {
Statement statement1 = new StatementImpl("P2002", value1,idA);
Statement statement2 = new StatementImpl("P31", value2,idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
Snak snak1 = Datamodel.makeValueSnak(propertyParameterPID, conflictingPropertyValue1);
Snak snak2 = Datamodel.makeValueSnak(itemParameterPID, conflictingItemValue1);
@ -143,7 +143,7 @@ public class ConflictsWithScrutinizerTest extends ScrutinizerTest {
Statement statement = new StatementImpl("P31", valueSnak,idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = new ArrayList<>();
@ -167,7 +167,7 @@ public class ConflictsWithScrutinizerTest extends ScrutinizerTest {
Statement statement2 = new StatementImpl("P31", value2, idA);
Statement statement3 = new StatementImpl("P553", value3, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).addStatement(statement3).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).addStatement(statement3).build();
Snak propertySnak1 = Datamodel.makeValueSnak(propertyParameterPID, conflictingPropertyValue1);
Snak itemSnak1 = Datamodel.makeValueSnak(itemParameterPID, conflictingItemValue1);

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -54,7 +54,7 @@ public class DifferenceWithinScrutinizerTest extends ScrutinizerTest{
ValueSnak value2 = Datamodel.makeValueSnak(upperBoundPid, upperYear);
Statement statement1 = new StatementImpl("P569", value1,idA);
Statement statement2 = new StatementImpl("P570", value2,idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
Snak propertyQualifier = Datamodel.makeValueSnak(propertyParameterPID, lowerBoundPid);
Snak minValueQualifier = Datamodel.makeValueSnak(minimumValuePID, minValue);
@ -79,7 +79,7 @@ public class DifferenceWithinScrutinizerTest extends ScrutinizerTest{
ValueSnak value2 = Datamodel.makeValueSnak(upperBoundPid, upperYear);
Statement statement1 = new StatementImpl("P569", value1,idA);
Statement statement2 = new StatementImpl("P570", value2,idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
Snak propertyQualifier = Datamodel.makeValueSnak(propertyParameterPID, lowerBoundPid);
Snak minValueQualifier = Datamodel.makeValueSnak(minimumValuePID, minValue);

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -65,7 +65,7 @@ public class DistinctValuesScrutinizerTest extends StatementScrutinizerTest {
Statement statement1 = new StatementImpl("P163", mainSnak, idA);
Statement statement2 = new StatementImpl("P163", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
List<SnakGroup> constraintQualifiers = new ArrayList<>();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, constraintQualifiers);
@ -86,7 +86,7 @@ public class DistinctValuesScrutinizerTest extends StatementScrutinizerTest {
Statement statement1 = new StatementImpl("P163", snak1, idA);
Statement statement2 = new StatementImpl("P163", snak2, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
List<SnakGroup> constraintQualifiers = new ArrayList<>();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, constraintQualifiers);

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -16,7 +16,7 @@ public class EnglishDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testGoodDesc() {
String description = "good description";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();
scrutinize(update);
@ -26,7 +26,7 @@ public class EnglishDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testEndWithPunctuationSign() {
String description = "description with punctuationSign.";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), false)
.build();
scrutinize(update);
@ -36,7 +36,7 @@ public class EnglishDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testBeginWithUppercase() {
String description = "Begin with uppercase";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();
scrutinize(update);
@ -46,7 +46,7 @@ public class EnglishDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testBeginWithArticle() {
String description = "an article test";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), false)
.build();
scrutinize(update);
@ -56,7 +56,7 @@ public class EnglishDescriptionScrutinizerTest extends ScrutinizerTest {
@Test
public void testAwfulDesc() {
String description = "An awful description.";
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addDescription(Datamodel.makeMonolingualTextValue(description, "en"), true)
.addLabel(Datamodel.makeMonolingualTextValue(description, "en"), true)
.build();

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -49,7 +49,7 @@ public class EntityTypeScrutinizerTest extends StatementScrutinizerTest {
ValueSnak mainValueSnak = Datamodel.makeValueSnak(propertyIdValue, propertyValue);
Statement statement = new StatementImpl("P2302", mainValueSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, allowedValue);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
@ -72,7 +72,7 @@ public class EntityTypeScrutinizerTest extends StatementScrutinizerTest {
ValueSnak mainValueSnak = Datamodel.makeValueSnak(propertyIdValue, propertyValue);
Statement statement = new StatementImpl("P2302", mainValueSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, itemValue);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -71,7 +71,7 @@ public class FormatScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
ValueSnak value = Datamodel.makeValueSnak(propertyIdValue, noMatchValue);
Statement statement = new StatementImpl("P18", value, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = generateFormatConstraint(regularExpression);
@ -87,7 +87,7 @@ public class FormatScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
ValueSnak value = Datamodel.makeValueSnak(propertyIdValue, completeMatchValue);
Statement statement = new StatementImpl("P18", value, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = generateFormatConstraint(regularExpression);
@ -103,7 +103,7 @@ public class FormatScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
ValueSnak value = Datamodel.makeValueSnak(propertyIdValue, incompleteMatchValue);
Statement statement = new StatementImpl("P18", value, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = generateFormatConstraint(regularExpression);
@ -119,7 +119,7 @@ public class FormatScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
ValueSnak value = Datamodel.makeValueSnak(propertyIdValue, incompleteMatchValue);
Statement statement = new StatementImpl("P18", value, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = generateFormatConstraint(invalidRegularExpression);

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -68,7 +68,7 @@ public class InverseConstraintScrutinizerTest extends StatementScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyId, propertyValue);
Statement statement = new StatementImpl("P25", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyParameter, inversePropertyID);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
@ -88,7 +88,7 @@ public class InverseConstraintScrutinizerTest extends StatementScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(symmetricPropertyID, symmetricPropertyValue);
Statement statement = new StatementImpl("P3373", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(symmetricPropertyID, symmetricEntityIdValue);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
@ -108,7 +108,7 @@ public class InverseConstraintScrutinizerTest extends StatementScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(propertyId);
Statement statement = new StatementImpl("P25", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyParameter, inverseEntityIdValue);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -44,7 +44,7 @@ public class ItemRequiresScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, itemValue);
Statement statement = new StatementImpl("P157", mainSnak, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, requiredValue);
@ -66,7 +66,7 @@ public class ItemRequiresScrutinizerTest extends ScrutinizerTest {
Statement statement = new StatementImpl("P157", mainSnak, idA);
Snak requiredPropertySnak = Datamodel.makeValueSnak(propertyParameterValue, itemValue);
Statement requiredStatement = new StatementImpl("P1196", requiredPropertySnak, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).addStatement(requiredStatement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).addStatement(requiredStatement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, requiredValue);
@ -88,7 +88,7 @@ public class ItemRequiresScrutinizerTest extends ScrutinizerTest {
Statement statement = new StatementImpl("P157", mainSnak, idA);
Snak requiredPropertySnak = Datamodel.makeValueSnak(propertyParameterValue, requiredValue);
Statement requiredStatement = new StatementImpl("P1196", requiredPropertySnak, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).addStatement(requiredStatement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).addStatement(requiredStatement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, requiredValue);
@ -108,7 +108,7 @@ public class ItemRequiresScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.newIdA;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, itemValue);
Statement statement = new StatementImpl("P157", mainSnak, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, requiredValue);
@ -130,7 +130,7 @@ public class ItemRequiresScrutinizerTest extends ScrutinizerTest {
ValueSnak mainSnak2 = Datamodel.makeValueSnak(propertyParameterValue, requiredValue);
Statement statement1 = new StatementImpl("P157", mainSnak1,id);
Statement statement2 = new StatementImpl("P1196", mainSnak2,id);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement1).addStatement(statement2).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, requiredValue);

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -40,7 +40,7 @@ public class MultiValueScrutinizerTest extends ScrutinizerTest {
Snak snakValue2 = Datamodel.makeSomeValueSnak(propertyIdValue);
Statement statement1 = new StatementImpl("P1963", snakValue1, idA);
Statement statement2 = new StatementImpl("P1963", snakValue2, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(TestingData.generateStatement(idA, idB))
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(TestingData.generateStatement(idA, idB))
.addStatement(TestingData.generateStatement(idA, idB)).addStatement(statement1).addStatement(statement2).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, new ArrayList<>());
@ -58,8 +58,8 @@ public class MultiValueScrutinizerTest extends ScrutinizerTest {
ItemIdValue idB = TestingData.newIdB;
Snak mainSnakValue = Datamodel.makeValueSnak(propertyIdValue, valueSnak);
Statement statement = new StatementImpl("P1963", mainSnakValue, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(TestingData.generateStatement(idA, idB)).addStatement(statement).build();
ItemUpdate updateB = new ItemUpdateBuilder(idB).addStatement(TestingData.generateStatement(idB, idB)).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(TestingData.generateStatement(idA, idB)).addStatement(statement).build();
TermedStatementEntityUpdate updateB = new ItemUpdateBuilder(idB).addStatement(TestingData.generateStatement(idB, idB)).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -76,8 +76,8 @@ public class MultiValueScrutinizerTest extends ScrutinizerTest {
ItemIdValue idB = TestingData.matchedId;
Snak mainSnakValue = Datamodel.makeValueSnak(propertyIdValue, valueSnak);
Statement statement = new StatementImpl("P1963", mainSnakValue, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(TestingData.generateStatement(idA, idB)).addStatement(statement).build();
ItemUpdate updateB = new ItemUpdateBuilder(idB).addStatement(TestingData.generateStatement(idB, idB)).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(TestingData.generateStatement(idA, idB)).addStatement(statement).build();
TermedStatementEntityUpdate updateB = new ItemUpdateBuilder(idB).addStatement(TestingData.generateStatement(idB, idB)).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);

View File

@ -26,7 +26,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import java.util.Collections;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -48,7 +48,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test
public void testTrigger() {
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA).build();
scrutinize(update);
assertWarningsRaised(NewItemScrutinizer.noDescType, NewItemScrutinizer.noLabelType,
NewItemScrutinizer.noTypeType, NewItemScrutinizer.newItemType);
@ -56,7 +56,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test
public void testEmptyItem() {
ItemUpdate update = new ItemUpdateBuilder(TestingData.existingId).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.existingId).build();
scrutinize(update);
assertNoWarningRaised();
}
@ -64,7 +64,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test
public void testGoodNewItem() {
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr"), false)
.addDescription(Datamodel.makeMonolingualTextValue("interesting item", "en"), true).addStatement(p31Statement)
.build();
@ -74,7 +74,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test
public void testDeletedStatements() {
ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr"), false)
.addDescription(Datamodel.makeMonolingualTextValue("interesting item", "en"), true).addStatement(p31Statement)
.deleteStatement(TestingData.generateStatement(TestingData.newIdA, TestingData.matchedId)).build();

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -72,7 +72,7 @@ public class QualifierCompatibilityScrutinizerTest extends StatementScrutinizerT
Snak mainSnak = Datamodel.makeSomeValueSnak(allowedPropertyIdValue);
Snak qualifierSnak = Datamodel.makeSomeValueSnak(disallowedQualifierProperty);
Statement statement = makeStatement(mainSnak, qualifierSnak);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak constraintQualifierSnak = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
List<Snak> qualifierList = Collections.singletonList(constraintQualifierSnak);
@ -93,7 +93,7 @@ public class QualifierCompatibilityScrutinizerTest extends StatementScrutinizerT
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(mandatoryPropertyIdValue);
Statement statement = makeStatement(mainSnak);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak constraintQualifierSnak = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
List<Snak> qualifierList = Collections.singletonList(constraintQualifierSnak);
@ -115,7 +115,7 @@ public class QualifierCompatibilityScrutinizerTest extends StatementScrutinizerT
Snak mainSnak = Datamodel.makeSomeValueSnak(allowedPropertyIdValue);
Snak qualifierSnak = Datamodel.makeSomeValueSnak(qualifierProperty);
Statement statement = makeStatement(mainSnak, qualifierSnak);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak constraintQualifierSnak = Datamodel.makeValueSnak(propertyParameterPID, propertyParameterValue);
List<Snak> qualifierList = Collections.singletonList(constraintQualifierSnak);

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -68,7 +68,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, valueWithBounds);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, NO_BOUNDS_CONSTRAINT_QID)).thenReturn(new ArrayList<>());
@ -83,7 +83,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, valueWithBounds);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(noBoundsEntity, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -99,7 +99,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, exactValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, INTEGER_VALUED_CONSTRAINT_QID)).thenReturn(new ArrayList<>());
@ -114,7 +114,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, exactValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(integerValueEntity, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -130,7 +130,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, trailingZeros);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(integerValueEntity, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -146,7 +146,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, integerValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(integerValueEntity, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -162,7 +162,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, integerValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, allowedUnit);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
@ -182,7 +182,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, wrongUnitValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, allowedUnit);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
@ -202,7 +202,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, goodUnitValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, allowedUnit);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
@ -222,7 +222,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, goodUnitValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(allowedUnitEntity, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -238,7 +238,7 @@ public class QuantityScrutinizerTest extends ValueScrutinizerTest{
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, integerValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, ALLOWED_UNITS_CONSTRAINT_QID)).thenReturn(new ArrayList<>());

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -67,7 +67,7 @@ public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(propertyIdValue);
Statement statement = new StatementImpl("P22", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyScopeParameter, asQualifier);
List<SnakGroup> constraintQualifiers = makeSnakGroupList(qualifierSnak);
@ -85,7 +85,7 @@ public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(propertyIdValue);
Statement statement = new StatementImpl("P22", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyScopeParameter, asMainSnak);
List<SnakGroup> constraintQualifiers = makeSnakGroupList(qualifierSnak);
@ -103,7 +103,7 @@ public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(propertyIdValue);
Statement statement = new StatementImpl("P22", mainSnak, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, SCOPE_CONSTRAINT_QID)).thenReturn(new ArrayList<>());
@ -122,7 +122,7 @@ public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
Statement statement = Datamodel.makeStatement(
TestingData.generateStatement(idA, propertyIdValue, idA).getClaim(),
Collections.singletonList(Datamodel.makeReference(snakGroups)), StatementRank.NORMAL, "");
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyScopeParameter, asMainSnak);
List<SnakGroup> constraintQualifiers = makeSnakGroupList(qualifierSnak);

View File

@ -30,7 +30,7 @@ import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.testng.annotations.BeforeMethod;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.Claim;
@ -82,9 +82,9 @@ public abstract class ScrutinizerTest {
scrutinizer.prepareDependencies();
}
public void scrutinize(ItemUpdate... updates) {
public void scrutinize(TermedStatementEntityUpdate... updates) {
scrutinizer.batchIsBeginning();
for(ItemUpdate update : Arrays.asList(updates)) {
for(TermedStatementEntityUpdate update : Arrays.asList(updates)) {
if(!update.isNull()) {
scrutinizer.scrutinize(update);
}

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -64,7 +64,7 @@ public class SingleValueScrutinizerTest extends ScrutinizerTest {
Snak snak2 = Datamodel.makeValueSnak(propertyIdValue, value2);
Statement statement1 = new StatementImpl("P21", snak1, idA);
Statement statement2 = new StatementImpl("P21", snak2, idA);
ItemUpdate update = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(idA).addStatement(statement1).addStatement(statement2).build();
List<Statement> statementList = constraintParameterStatementList(entityIdValue, new ArrayList<>());
@ -80,7 +80,7 @@ public class SingleValueScrutinizerTest extends ScrutinizerTest {
ItemIdValue idA = TestingData.existingId;
Snak snak1 = Datamodel.makeValueSnak(propertyIdValue, value1);
Statement statement1 = new StatementImpl("P21", snak1, idA);
ItemUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(idA).addStatement(statement1).build();
List<Statement> statementList = constraintParameterStatementList(entityIdValue, new ArrayList<>());

View File

@ -23,7 +23,7 @@
******************************************************************************/
package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -31,7 +31,7 @@ import org.wikidata.wdtk.datamodel.interfaces.Statement;
public abstract class StatementScrutinizerTest extends ScrutinizerTest {
public void scrutinize(Statement statement) {
ItemUpdate update = new ItemUpdateBuilder((ItemIdValue) statement.getClaim().getSubject())
TermedStatementEntityUpdate update = new ItemUpdateBuilder((ItemIdValue) statement.getClaim().getSubject())
.addStatement(statement).build();
scrutinize(update);
}

View File

@ -25,7 +25,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -72,7 +72,7 @@ public class UnsourcedScrutinizerTest extends StatementScrutinizerTest {
ItemIdValue id = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(propertyIdValue);
Statement statement = new StatementImpl("P172", mainSnak, id);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, Collections.emptyList());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
@ -90,7 +90,7 @@ public class UnsourcedScrutinizerTest extends StatementScrutinizerTest {
List<SnakGroup> constraintQualifiers = makeSnakGroupList(referenceSnak);
List<Statement> itemStatementList = constraintParameterStatementList(entityIdValue, constraintQualifiers);
Statement statement = itemStatementList.get(0);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, Collections.emptyList());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.ConstraintFetcher;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -46,7 +46,7 @@ public class UseAsQualifierScrutinizerTest extends ScrutinizerTest {
List<SnakGroup> qualifierList = makeSnakGroupList(statementQualifier);
List<Statement> statementList = constraintParameterStatementList(useAsQualifierEntityId, qualifierList);
Statement statement = statementList.get(0);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(qualifierPID, qualifierPropertyValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, qualifierAllowedValue);
@ -67,7 +67,7 @@ public class UseAsQualifierScrutinizerTest extends ScrutinizerTest {
List<SnakGroup> qualifierList = makeSnakGroupList(statementQualifier);
List<Statement> statementList = constraintParameterStatementList(useAsQualifierEntityId, qualifierList);
Statement statement = statementList.get(0);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(qualifierPID, qualifierPropertyValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, qualifierAllowedValue);
@ -86,7 +86,7 @@ public class UseAsQualifierScrutinizerTest extends ScrutinizerTest {
ItemIdValue id = TestingData.existingId;
List<Statement> statementList = constraintParameterStatementList(useAsQualifierEntityId, new ArrayList<>());
Statement statement = statementList.get(0);
ItemUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(id).addStatement(statement).build();
Snak qualifierSnak1 = Datamodel.makeValueSnak(qualifierPID, qualifierPropertyValue);
Snak qualifierSnak2 = Datamodel.makeValueSnak(itemParameterPID, qualifierAllowedValue);

View File

@ -27,7 +27,7 @@ import java.util.Collections;
import java.util.List;
import org.openrefine.wikidata.testing.JacksonSerializationTest;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.Assert;
import org.testng.annotations.Test;
@ -36,7 +36,7 @@ import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
public class WbItemDocumentExprTest extends WbExpressionTest<ItemUpdate> {
public class WbItemDocumentExprTest extends WbExpressionTest<TermedStatementEntityUpdate> {
public WbItemDocumentExpr expr;
ItemIdValue subject = Datamodel.makeWikidataItemIdValue("Q23");
@ -63,7 +63,7 @@ public class WbItemDocumentExprTest extends WbExpressionTest<ItemUpdate> {
@Test
public void testEvaluate() {
setRow(recon("Q3434"), "2010-07-23", "3.898,4.389", "my alias", recon("Q23"));
ItemUpdate result = new ItemUpdateBuilder(subject).addAlias(alias).addStatement(fullStatement).build();
TermedStatementEntityUpdate result = new ItemUpdateBuilder(subject).addAlias(alias).addStatement(fullStatement).build();
evaluatesTo(result, expr);
}
@ -76,14 +76,14 @@ public class WbItemDocumentExprTest extends WbExpressionTest<ItemUpdate> {
@Test
public void testStatementSkipped() {
setRow(recon("Q3434"), "2010-07-23", "3.898,invalid4.389", "my alias", recon("Q23"));
ItemUpdate result = new ItemUpdateBuilder(subject).addAlias(alias).build();
TermedStatementEntityUpdate result = new ItemUpdateBuilder(subject).addAlias(alias).build();
evaluatesTo(result, expr);
}
@Test
public void testAliasSkipped() {
setRow(recon("Q3434"), "2010-07-23", "3.898,4.389", "", recon("Q23"));
ItemUpdate result = new ItemUpdateBuilder(subject).addStatement(fullStatement).build();
TermedStatementEntityUpdate result = new ItemUpdateBuilder(subject).addStatement(fullStatement).build();
evaluatesTo(result, expr);
}

View File

@ -33,7 +33,7 @@ import java.util.List;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.testing.WikidataRefineTest;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -117,11 +117,11 @@ public class WikibaseSchemaTest extends WikidataRefineTest {
String serialized = TestingData.jsonFromFile("schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
Engine engine = new Engine(project);
List<ItemUpdate> updates = schema.evaluate(project, engine);
List<ItemUpdate> expected = new ArrayList<>();
ItemUpdate update1 = new ItemUpdateBuilder(qid1).addStatement(statement1).build();
List<TermedStatementEntityUpdate> updates = schema.evaluate(project, engine);
List<TermedStatementEntityUpdate> expected = new ArrayList<>();
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(qid1).addStatement(statement1).build();
expected.add(update1);
ItemUpdate update2 = new ItemUpdateBuilder(qid2).addStatement(statement2).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(qid2).addStatement(statement2).build();
expected.add(update2);
assertEquals(expected, updates);
}
@ -154,9 +154,9 @@ public class WikibaseSchemaTest extends WikidataRefineTest {
+ " ]\n"
+ " }");
engine.initializeFromConfig(engineConfig);
List<ItemUpdate> updates = schema.evaluate(project, engine);
List<ItemUpdate> expected = new ArrayList<>();
ItemUpdate update1 = new ItemUpdateBuilder(qid1).addStatement(statement1).build();
List<TermedStatementEntityUpdate> updates = schema.evaluate(project, engine);
List<TermedStatementEntityUpdate> expected = new ArrayList<>();
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(qid1).addStatement(statement1).build();
expected.add(update1);
assertEquals(expected, updates);
}

View File

@ -35,6 +35,7 @@ import org.openrefine.wikidata.schema.entityvalues.ReconItemIdValue;
import org.openrefine.wikidata.schema.entityvalues.ReconPropertyIdValue;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.Claim;
//import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -63,6 +64,11 @@ public class TestingData {
public static ItemIdValue matchedId = makeMatchedItemIdValue("Q89", "eist");
public static ItemIdValue existingId = Datamodel.makeWikidataItemIdValue("Q43");
public static PropertyIdValue newPropertyIdA = makeNewPropertyIdValue(4321L, "new Property A");
public static PropertyIdValue newPropertyIdB = makeNewPropertyIdValue(7654L, "new Property B");
public static PropertyIdValue matchedPropertyID = makeMatchedPropertyIdValue("P38", "currency");
public static PropertyIdValue existingPropertyId = Datamodel.makeWikidataPropertyIdValue("P43");
protected static PropertyIdValue pid = Datamodel.makeWikidataPropertyIdValue("P38");
public static class ReconStub extends Recon {
@ -118,7 +124,16 @@ public class TestingData {
public static WbMonolingualExpr getTestMonolingualExpr(String langCode, String langLabel, String text) {
return new WbMonolingualExpr(new WbLanguageConstant(langCode, langLabel), new WbStringConstant(text));
}
/*
public static Statement generateStatement(EntityIdValue from, PropertyIdValue pid, EntityIdValue to) {
Claim claim = Datamodel.makeClaim(from, Datamodel.makeValueSnak(pid, to), Collections.emptyList());
return Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
}
public static Statement generateStatement(EntityIdValue from, EntityIdValue to) {
return generateStatement(from, pid, to);
}
*/
public static Statement generateStatement(ItemIdValue from, PropertyIdValue pid, ItemIdValue to) {
Claim claim = Datamodel.makeClaim(from, Datamodel.makeValueSnak(pid, to), Collections.emptyList());
return Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
@ -128,6 +143,15 @@ public class TestingData {
return generateStatement(from, pid, to);
}
public static Statement generateStatement(PropertyIdValue from, PropertyIdValue pid, PropertyIdValue to) {
Claim claim = Datamodel.makeClaim(from, Datamodel.makeValueSnak(pid, to), Collections.emptyList());
return Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
}
public static Statement generateStatement(PropertyIdValue from, PropertyIdValue to) {
return generateStatement(from, pid, to);
}
public static String jsonFromFile(String filename)
throws IOException {
InputStream f = TestingData.class.getClassLoader().getResourceAsStream(filename);

View File

@ -83,31 +83,31 @@ public class ItemUpdateTest {
@Test
public void testIsNull() {
ItemUpdate update = new ItemUpdateBuilder(existingSubject).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject).build();
assertTrue(update.isNull());
ItemUpdate update2 = new ItemUpdateBuilder(newSubject).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(newSubject).build();
assertFalse(update2.isNull());
}
@Test
public void testIsEmpty() {
ItemUpdate update = new ItemUpdateBuilder(existingSubject).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject).build();
assertTrue(update.isEmpty());
ItemUpdate update2 = new ItemUpdateBuilder(newSubject).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(newSubject).build();
assertTrue(update2.isEmpty());
}
@Test
public void testIsNew() {
ItemUpdate newUpdate = new ItemUpdateBuilder(newSubject).build();
TermedStatementEntityUpdate newUpdate = new ItemUpdateBuilder(newSubject).build();
assertTrue(newUpdate.isNew());
ItemUpdate update = new ItemUpdateBuilder(existingSubject).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject).build();
assertFalse(update.isNew());
}
@Test
public void testAddStatements() {
ItemUpdate update = new ItemUpdateBuilder(existingSubject).addStatement(statement1).addStatement(statement2)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject).addStatement(statement1).addStatement(statement2)
.build();
assertFalse(update.isNull());
assertEquals(Arrays.asList(statement1, statement2), update.getAddedStatements());
@ -126,14 +126,14 @@ public class ItemUpdateTest {
*/
@Test(enabled=false)
public void testSerializeStatements() throws IOException {
ItemUpdate update = new ItemUpdateBuilder(existingSubject).addStatement(statement1).addStatement(statement2)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject).addStatement(statement1).addStatement(statement2)
.build();
TestUtils.isSerializedTo(update, TestingData.jsonFromFile("updates/statement_groups.json"));
}
@Test
public void testDeleteStatements() {
ItemUpdate update = new ItemUpdateBuilder(existingSubject).deleteStatement(statement1)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject).deleteStatement(statement1)
.deleteStatement(statement2).build();
assertEquals(Arrays.asList(statement1, statement2).stream().collect(Collectors.toSet()),
update.getDeletedStatements());
@ -141,24 +141,24 @@ public class ItemUpdateTest {
@Test
public void testMerge() {
ItemUpdate updateA = new ItemUpdateBuilder(existingSubject).addStatement(statement1).build();
ItemUpdate updateB = new ItemUpdateBuilder(existingSubject).addStatement(statement2).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingSubject).addStatement(statement1).build();
TermedStatementEntityUpdate updateB = new ItemUpdateBuilder(existingSubject).addStatement(statement2).build();
assertNotEquals(updateA, updateB);
ItemUpdate merged = updateA.merge(updateB);
TermedStatementEntityUpdate merged = updateA.merge(updateB);
assertEquals(statementGroups, merged.getAddedStatementGroups().stream().collect(Collectors.toSet()));
}
@Test
public void testGroupBySubject() {
ItemUpdate updateA = new ItemUpdateBuilder(newSubject).addStatement(statement1).build();
ItemUpdate updateB = new ItemUpdateBuilder(sameNewSubject).addStatement(statement2).build();
ItemUpdate updateC = new ItemUpdateBuilder(existingSubject).addLabel(label, true).build();
ItemUpdate updateD = new ItemUpdateBuilder(matchedSubject).build();
Map<EntityIdValue, ItemUpdate> grouped = ItemUpdate
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(newSubject).addStatement(statement1).build();
TermedStatementEntityUpdate updateB = new ItemUpdateBuilder(sameNewSubject).addStatement(statement2).build();
TermedStatementEntityUpdate updateC = new ItemUpdateBuilder(existingSubject).addLabel(label, true).build();
TermedStatementEntityUpdate updateD = new ItemUpdateBuilder(matchedSubject).build();
Map<EntityIdValue, TermedStatementEntityUpdate> grouped = TermedStatementEntityUpdate
.groupBySubject(Arrays.asList(updateA, updateB, updateC, updateD));
ItemUpdate mergedUpdate = new ItemUpdateBuilder(newSubject).addStatement(statement1).addStatement(statement2)
TermedStatementEntityUpdate mergedUpdate = new ItemUpdateBuilder(newSubject).addStatement(statement1).addStatement(statement2)
.build();
Map<EntityIdValue, ItemUpdate> expected = new HashMap<>();
Map<EntityIdValue, TermedStatementEntityUpdate> expected = new HashMap<>();
expected.put(newSubject, mergedUpdate);
expected.put(existingSubject, updateC);
assertEquals(expected, grouped);
@ -168,11 +168,11 @@ public class ItemUpdateTest {
public void testNormalizeTerms() {
MonolingualTextValue aliasEn = Datamodel.makeMonolingualTextValue("alias", "en");
MonolingualTextValue aliasFr = Datamodel.makeMonolingualTextValue("coucou", "fr");
ItemUpdate updateA = new ItemUpdateBuilder(newSubject).addLabel(label, true).addAlias(aliasEn).addAlias(aliasFr)
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(newSubject).addLabel(label, true).addAlias(aliasEn).addAlias(aliasFr)
.build();
assertFalse(updateA.isNull());
ItemUpdate normalized = updateA.normalizeLabelsAndAliases();
ItemUpdate expectedUpdate = new ItemUpdateBuilder(newSubject).addLabel(label, true).addAlias(aliasEn)
TermedStatementEntityUpdate normalized = updateA.normalizeLabelsAndAliases();
TermedStatementEntityUpdate expectedUpdate = new ItemUpdateBuilder(newSubject).addLabel(label, true).addAlias(aliasEn)
.addLabel(aliasFr, true).build();
assertEquals(expectedUpdate, normalized);
}
@ -181,9 +181,9 @@ public class ItemUpdateTest {
public void testMergeLabels() {
MonolingualTextValue label1 = Datamodel.makeMonolingualTextValue("first label", "en");
MonolingualTextValue label2 = Datamodel.makeMonolingualTextValue("second label", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, true).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, true).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, true).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, true).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(label2), merged.getLabels());
}
@ -191,9 +191,9 @@ public class ItemUpdateTest {
public void testMergeLabelsIfNew() {
MonolingualTextValue label1 = Datamodel.makeMonolingualTextValue("first label", "en");
MonolingualTextValue label2 = Datamodel.makeMonolingualTextValue("second label", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, false).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, false).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, false).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, false).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(label1), merged.getLabelsIfNew());
assertEquals(Collections.emptySet(), merged.getLabels());
}
@ -202,9 +202,9 @@ public class ItemUpdateTest {
public void testMergeLabelsIfNewOverriding() {
MonolingualTextValue label1 = Datamodel.makeMonolingualTextValue("first label", "en");
MonolingualTextValue label2 = Datamodel.makeMonolingualTextValue("second label", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, true).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, false).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, true).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, false).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(label1), merged.getLabels());
assertEquals(Collections.emptySet(), merged.getLabelsIfNew());
}
@ -213,9 +213,9 @@ public class ItemUpdateTest {
public void testMergeLabelsIfNewOverriding2() {
MonolingualTextValue label1 = Datamodel.makeMonolingualTextValue("first label", "en");
MonolingualTextValue label2 = Datamodel.makeMonolingualTextValue("second label", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, false).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, true).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addLabel(label1, false).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addLabel(label2, true).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(label2), merged.getLabels());
assertEquals(Collections.emptySet(), merged.getLabelsIfNew());
}
@ -224,9 +224,9 @@ public class ItemUpdateTest {
public void testMergeDescriptionsIfNew() {
MonolingualTextValue description1 = Datamodel.makeMonolingualTextValue("first description", "en");
MonolingualTextValue description2 = Datamodel.makeMonolingualTextValue("second description", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addDescription(description1, false).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addDescription(description2, false).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addDescription(description1, false).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addDescription(description2, false).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(description1), merged.getDescriptionsIfNew());
assertEquals(Collections.emptySet(), merged.getDescriptions());
assertFalse(merged.isEmpty());
@ -236,9 +236,9 @@ public class ItemUpdateTest {
public void testMergeDescriptionsIfNewOverriding() {
MonolingualTextValue description1 = Datamodel.makeMonolingualTextValue("first description", "en");
MonolingualTextValue description2 = Datamodel.makeMonolingualTextValue("second description", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addDescription(description1, true).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addDescription(description2, false).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addDescription(description1, true).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addDescription(description2, false).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(description1), merged.getDescriptions());
assertEquals(Collections.emptySet(), merged.getDescriptionsIfNew());
}
@ -247,9 +247,9 @@ public class ItemUpdateTest {
public void testMergeDescriptionsIfNewOverriding2() {
MonolingualTextValue description1 = Datamodel.makeMonolingualTextValue("first description", "en");
MonolingualTextValue description2 = Datamodel.makeMonolingualTextValue("second description", "en");
ItemUpdate update1 = new ItemUpdateBuilder(existingSubject).addDescription(description1, false).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingSubject).addDescription(description2, true).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingSubject).addDescription(description1, false).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingSubject).addDescription(description2, true).build();
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singleton(description2), merged.getDescriptions());
assertEquals(Collections.emptySet(), merged.getDescriptionsIfNew());
}
@ -258,7 +258,7 @@ public class ItemUpdateTest {
public void testConstructOverridingLabels() {
MonolingualTextValue label1 = Datamodel.makeMonolingualTextValue("first label", "en");
MonolingualTextValue label2 = Datamodel.makeMonolingualTextValue("second label", "en");
ItemUpdate update = new ItemUpdateBuilder(existingSubject)
TermedStatementEntityUpdate update = new ItemUpdateBuilder(existingSubject)
.addLabel(label1, false)
.addLabel(label2, true)
.build();

View File

@ -28,7 +28,7 @@ import static org.testng.Assert.assertEquals;
import java.util.Arrays;
import java.util.List;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
@ -37,35 +37,35 @@ public class QuickStatementsUpdateSchedulerTest extends UpdateSchedulerTest {
@Test
public void testNoNewItem()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoB).build();
ItemUpdate updateB = new ItemUpdateBuilder(existingIdB).addStatement(sBtoA).build();
List<ItemUpdate> scheduled = schedule(updateA, updateB);
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoB).build();
TermedStatementEntityUpdate updateB = new ItemUpdateBuilder(existingIdB).addStatement(sBtoA).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA, updateB);
assertEquals(Arrays.asList(updateA, updateB), scheduled);
}
@Test
public void testSplitUpdate()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).addStatement(sAtoNewB).build();
ItemUpdate newUpdateA = new ItemUpdateBuilder(newIdA).build();
ItemUpdate newUpdateB = new ItemUpdateBuilder(newIdB).build();
ItemUpdate splitUpdateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).build();
ItemUpdate splitUpdateB = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewB).build();
List<ItemUpdate> scheduled = schedule(updateA);
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).addStatement(sAtoNewB).build();
TermedStatementEntityUpdate newUpdateA = new ItemUpdateBuilder(newIdA).build();
TermedStatementEntityUpdate newUpdateB = new ItemUpdateBuilder(newIdB).build();
TermedStatementEntityUpdate splitUpdateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).build();
TermedStatementEntityUpdate splitUpdateB = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewB).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA);
assertSetEquals(Arrays.asList(newUpdateA, splitUpdateA, newUpdateB, splitUpdateB), scheduled);
}
@Test(expectedExceptions = ImpossibleSchedulingException.class)
public void testImpossibleForQS()
throws ImpossibleSchedulingException {
ItemUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewB).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewB).build();
schedule(update);
}
@Test
public void testSelfEditOnNewITem()
throws ImpossibleSchedulingException {
ItemUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewA).build();
TermedStatementEntityUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewA).build();
assertEquals(Arrays.asList(update), schedule(update));
}

View File

@ -31,7 +31,7 @@ import java.util.List;
import java.util.stream.Collectors;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -55,51 +55,51 @@ public abstract class UpdateSchedulerTest {
public abstract UpdateScheduler getScheduler();
protected List<ItemUpdate> schedule(ItemUpdate... itemUpdates)
protected List<TermedStatementEntityUpdate> schedule(TermedStatementEntityUpdate... itemUpdates)
throws ImpossibleSchedulingException {
return getScheduler().schedule(Arrays.asList(itemUpdates));
}
protected static void assertSetEquals(List<ItemUpdate> expected, List<ItemUpdate> actual) {
protected static void assertSetEquals(List<TermedStatementEntityUpdate> expected, List<TermedStatementEntityUpdate> actual) {
assertEquals(expected.stream().collect(Collectors.toSet()), actual.stream().collect(Collectors.toSet()));
}
@Test
public void testNewItemNotMentioned()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).build();
List<ItemUpdate> scheduled = schedule(updateA);
ItemUpdate newUpdate = new ItemUpdateBuilder(newIdA).build();
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA);
TermedStatementEntityUpdate newUpdate = new ItemUpdateBuilder(newIdA).build();
assertEquals(Arrays.asList(newUpdate, updateA), scheduled);
}
@Test
public void testNewItemMentioned()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).build();
ItemUpdate newUpdate = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoB).build();
List<ItemUpdate> scheduled = schedule(updateA, newUpdate);
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).build();
TermedStatementEntityUpdate newUpdate = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoB).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA, newUpdate);
assertEquals(Arrays.asList(newUpdate, updateA), scheduled);
}
@Test
public void testMerge()
throws ImpossibleSchedulingException {
ItemUpdate update1 = new ItemUpdateBuilder(existingIdA).addStatement(sAtoB).build();
ItemUpdate update2 = new ItemUpdateBuilder(existingIdA)
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(existingIdA).addStatement(sAtoB).build();
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(existingIdA)
.addLabel(Datamodel.makeMonolingualTextValue("hello", "fr"), true).addStatement(sAtoB).build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singletonList(merged), schedule(update1, update2));
}
@Test
public void testMergeNew()
throws ImpossibleSchedulingException {
ItemUpdate update1 = new ItemUpdateBuilder(newIdA).addLabel(Datamodel.makeMonolingualTextValue("hello", "fr"), true)
TermedStatementEntityUpdate update1 = new ItemUpdateBuilder(newIdA).addLabel(Datamodel.makeMonolingualTextValue("hello", "fr"), true)
.addStatement(sNewAtoB).build();
ItemUpdate update2 = new ItemUpdateBuilder(newIdA).addLabel(Datamodel.makeMonolingualTextValue("hello", "fr"), true)
TermedStatementEntityUpdate update2 = new ItemUpdateBuilder(newIdA).addLabel(Datamodel.makeMonolingualTextValue("hello", "fr"), true)
.build();
ItemUpdate merged = update1.merge(update2);
TermedStatementEntityUpdate merged = update1.merge(update2);
assertEquals(Collections.singletonList(merged), schedule(update1, update2));
}
}

View File

@ -28,7 +28,7 @@ import static org.testng.Assert.assertEquals;
import java.util.Arrays;
import java.util.List;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.TermedStatementEntityUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test;
@ -37,30 +37,30 @@ public class WikibaseAPIUpdateSchedulerTest extends UpdateSchedulerTest {
@Test
public void testOrderPreserved()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoB).build();
ItemUpdate updateB = new ItemUpdateBuilder(existingIdB).addStatement(sBtoA).build();
List<ItemUpdate> scheduled = schedule(updateA, updateB);
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoB).build();
TermedStatementEntityUpdate updateB = new ItemUpdateBuilder(existingIdB).addStatement(sBtoA).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA, updateB);
assertEquals(Arrays.asList(updateA, updateB), scheduled);
}
@Test
public void testUpdateIsNotSplit()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).addStatement(sAtoNewB).build();
ItemUpdate newUpdateA = new ItemUpdateBuilder(newIdA).build();
ItemUpdate newUpdateB = new ItemUpdateBuilder(newIdB).build();
List<ItemUpdate> scheduled = schedule(updateA);
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).addStatement(sAtoNewB).build();
TermedStatementEntityUpdate newUpdateA = new ItemUpdateBuilder(newIdA).build();
TermedStatementEntityUpdate newUpdateB = new ItemUpdateBuilder(newIdB).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA);
assertSetEquals(Arrays.asList(newUpdateA, newUpdateB, updateA), scheduled);
}
@Test
public void testMixedUpdate()
throws ImpossibleSchedulingException {
ItemUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).addStatement(sAtoNewB)
TermedStatementEntityUpdate updateA = new ItemUpdateBuilder(existingIdA).addStatement(sAtoNewA).addStatement(sAtoNewB)
.addStatement(sAtoB).build();
ItemUpdate newUpdateA = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoB).build();
ItemUpdate newUpdateB = new ItemUpdateBuilder(newIdB).build();
List<ItemUpdate> scheduled = schedule(updateA, newUpdateA);
TermedStatementEntityUpdate newUpdateA = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoB).build();
TermedStatementEntityUpdate newUpdateB = new ItemUpdateBuilder(newIdB).build();
List<TermedStatementEntityUpdate> scheduled = schedule(updateA, newUpdateA);
assertEquals(Arrays.asList(newUpdateA, newUpdateB, updateA), scheduled);
}