Test the commands

This commit is contained in:
Antonin Delpeuch 2018-03-02 17:07:06 +00:00
parent 973a28cc90
commit 7cb8757028
31 changed files with 395 additions and 178 deletions

View File

@ -44,6 +44,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import org.json.JSONWriter; import org.json.JSONWriter;
@ -56,6 +57,7 @@ import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.qa.QAWarningStore; import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.schema.WikibaseSchema; import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.utils.FirstLinesExtractor;
import com.google.refine.model.Project; import com.google.refine.model.Project;
import com.google.refine.util.ParsingUtilities; import com.google.refine.util.ParsingUtilities;
@ -76,8 +78,13 @@ public class PreviewWikibaseSchemaCommand extends Command {
WikibaseSchema schema = null; WikibaseSchema schema = null;
if (jsonString != null) { if (jsonString != null) {
JSONObject json = ParsingUtilities.evaluateJsonStringToObject(jsonString); try {
schema = WikibaseSchema.reconstruct(json); JSONObject json = ParsingUtilities.evaluateJsonStringToObject(jsonString);
schema = WikibaseSchema.reconstruct(json);
} catch(JSONException e) {
respond(response, "error", "Wikibase schema could not be parsed.");
return;
}
} else { } else {
schema = (WikibaseSchema) project.overlayModels.get("wikibaseSchema"); schema = (WikibaseSchema) project.overlayModels.get("wikibaseSchema");
} }
@ -85,6 +92,7 @@ public class PreviewWikibaseSchemaCommand extends Command {
respond(response, "error", "No Wikibase schema provided."); respond(response, "error", "No Wikibase schema provided.");
return; return;
} }
QAWarningStore warningStore = new QAWarningStore(); QAWarningStore warningStore = new QAWarningStore();
// Evaluate project // Evaluate project
@ -117,24 +125,8 @@ public class PreviewWikibaseSchemaCommand extends Command {
QuickStatementsExporter exporter = new QuickStatementsExporter(); QuickStatementsExporter exporter = new QuickStatementsExporter();
exporter.translateItemList(editBatch, stringWriter); exporter.translateItemList(editBatch, stringWriter);
String fullQS = stringWriter.toString();
stringWriter = new StringWriter();
LineNumberReader reader = new LineNumberReader(new StringReader(fullQS));
// Only keep the first 50 lines
int maxQSLinesForPreview = 50;
reader.setLineNumber(0);
String line = reader.readLine();
for(int i = 1; i != maxQSLinesForPreview && line != null; i++) {
stringWriter.write(line+"\n");
line = reader.readLine();
}
if (reader.getLineNumber() == maxQSLinesForPreview) {
stringWriter.write("...");
}
writer.key("quickstatements"); writer.key("quickstatements");
writer.value(stringWriter.toString()); writer.value(FirstLinesExtractor.extractFirstLines(stringWriter.toString(), 50));
} }
writer.endObject(); writer.endObject();

View File

@ -9,6 +9,7 @@ import javax.servlet.http.HttpServletResponse;
import org.openrefine.wikidata.operations.SaveWikibaseSchemaOperation; import org.openrefine.wikidata.operations.SaveWikibaseSchemaOperation;
import org.openrefine.wikidata.schema.WikibaseSchema; import org.openrefine.wikidata.schema.WikibaseSchema;
import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import com.google.refine.commands.Command; import com.google.refine.commands.Command;
@ -19,11 +20,7 @@ import com.google.refine.util.ParsingUtilities;
public class SaveWikibaseSchemaCommand extends Command { public class SaveWikibaseSchemaCommand extends Command {
public SaveWikibaseSchemaCommand() { @Override
super();
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {
@ -31,6 +28,11 @@ public class SaveWikibaseSchemaCommand extends Command {
Project project = getProject(request); Project project = getProject(request);
String jsonString = request.getParameter("schema"); String jsonString = request.getParameter("schema");
if (jsonString == null) {
respond(response, "error", "No Wikibase schema provided.");
return;
}
JSONObject json = ParsingUtilities.evaluateJsonStringToObject(jsonString); JSONObject json = ParsingUtilities.evaluateJsonStringToObject(jsonString);
WikibaseSchema schema = WikibaseSchema.reconstruct(json); WikibaseSchema schema = WikibaseSchema.reconstruct(json);
@ -38,7 +40,9 @@ public class SaveWikibaseSchemaCommand extends Command {
Process process = op.createProcess(project, new Properties()); Process process = op.createProcess(project, new Properties());
performProcessAndRespond(request, response, project, process); performProcessAndRespond(request, response, project, process);
} catch (JSONException e) {
respond(response, "error", "Wikibase schema could not be parsed.");
} catch (Exception e) { } catch (Exception e) {
respondException(response, e); respondException(response, e);
} }

View File

@ -181,7 +181,7 @@ public class ItemUpdate {
Set<MonolingualTextValue> newDescriptions = new HashSet<>(descriptions); Set<MonolingualTextValue> newDescriptions = new HashSet<>(descriptions);
newDescriptions.addAll(other.getDescriptions()); newDescriptions.addAll(other.getDescriptions());
Set<MonolingualTextValue> newAliases = new HashSet<>(aliases); Set<MonolingualTextValue> newAliases = new HashSet<>(aliases);
newAliases.addAll(other.getDescriptions()); newAliases.addAll(other.getAliases());
return new ItemUpdate( return new ItemUpdate(
qid, newAddedStatements, newDeletedStatements, qid, newAddedStatements, newDeletedStatements,
newLabels, newDescriptions, newAliases); newLabels, newDescriptions, newAliases);

View File

@ -0,0 +1,35 @@
package org.openrefine.wikidata.utils;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.StringReader;
import java.io.StringWriter;
public class FirstLinesExtractor {
/**
* Returns the first n lines of a given string
* @param content
* the content, where lines are separated by '\n'
* @param nbLines
* the number of lines to extract
* @return
* the first lines of the string
* @throws IOException
*/
public static String extractFirstLines(String content, int nbLines) throws IOException {
StringWriter stringWriter = new StringWriter();
LineNumberReader reader = new LineNumberReader(new StringReader(content));
// Only keep the first 50 lines
reader.setLineNumber(0);
String line = reader.readLine();
for(int i = 1; i != nbLines && line != null; i++) {
stringWriter.write(line+"\n");
line = reader.readLine();
}
if (reader.getLineNumber() == nbLines) {
stringWriter.write("...");
}
return stringWriter.toString();
}
}

View File

@ -45,10 +45,8 @@ public abstract class JacksonJsonizable implements Jsonizable {
try { try {
return mapper.readValue(json, klass); return mapper.readValue(json, klass);
} catch (JsonParseException e) { } catch (JsonParseException e) {
e.printStackTrace();
throw new JSONException(e.toString()); throw new JSONException(e.toString());
} catch (JsonMappingException e) { } catch (JsonMappingException e) {
e.printStackTrace();
throw new JSONException(e.toString()); throw new JSONException(e.toString());
} catch (IOException e) { } catch (IOException e) {
throw new JSONException(e.toString()); throw new JSONException(e.toString());

View File

@ -0,0 +1,58 @@
package org.openrefine.wikidata.commands;
import java.io.StringWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.openrefine.wikidata.testing.TestingData;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.times;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import com.google.refine.commands.Command;
import com.google.refine.model.Project;
import com.google.refine.tests.RefineTest;
import com.google.refine.util.ParsingUtilities;
public abstract class CommandTest extends RefineTest {
protected Project project = null;
protected HttpServletRequest request = null;
protected HttpServletResponse response = null;
protected StringWriter writer = null;
protected Command command = null;
@BeforeMethod(alwaysRun = true)
public void setUpProject() throws JSONException {
project = createCSVProject(TestingData.inceptionWithNewCsv);
TestingData.reconcileInceptionCells(project);
request = mock(HttpServletRequest.class);
response = mock(HttpServletResponse.class);
writer = new StringWriter();
PrintWriter printWriter = new PrintWriter(writer);
when(request.getParameter("project")).thenReturn(String.valueOf(project.id));
try {
when(response.getWriter()).thenReturn(printWriter);
} catch (IOException e1) {
Assert.fail();
}
}
}

View File

@ -0,0 +1,38 @@
package org.openrefine.wikidata.commands;
import javax.servlet.ServletException;
import org.json.JSONException;
import org.json.JSONObject;
import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import java.io.IOException;
import com.google.refine.util.ParsingUtilities;
import static org.openrefine.wikidata.testing.TestingData.jsonFromFile;
public class PreviewWikibaseSchemaCommandTest extends SchemaCommandTest {
@BeforeMethod
public void SetUp() throws JSONException {
command = new PreviewWikibaseSchemaCommand();
}
@Test
public void testValidSchema() throws JSONException, IOException, ServletException {
String schemaJson = jsonFromFile("data/schema/inception.json").toString();
when(request.getParameter("schema")).thenReturn(schemaJson);
command.doPost(request, response);
JSONObject response = ParsingUtilities.evaluateJsonStringToObject(writer.toString());
assertEquals(TestingData.inceptionWithNewQS, response.getString("quickstatements"));
}
}

View File

@ -0,0 +1,31 @@
package org.openrefine.wikidata.commands;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.openrefine.wikidata.testing.TestingData.jsonFromFile;
import java.io.IOException;
import javax.servlet.ServletException;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
public class SaveWikibaseSchemaCommandTest extends SchemaCommandTest {
@BeforeMethod
public void setUp() {
this.command = new SaveWikibaseSchemaCommand();
}
@Test
public void testValidSchema() throws ServletException, IOException {
String schemaJson = jsonFromFile("data/schema/inception.json").toString();
when(request.getParameter("schema")).thenReturn(schemaJson);
command.doPost(request, response);
assertTrue(writer.toString().contains("\"ok\""));
}
}

View File

@ -0,0 +1,29 @@
package org.openrefine.wikidata.commands;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import java.io.IOException;
import javax.servlet.ServletException;
public abstract class SchemaCommandTest extends CommandTest {
@Test
public void testNoSchema() throws ServletException, IOException {
command.doPost(request, response);
assertEquals("{\"status\":\"error\",\"message\":\"No Wikibase schema provided.\"}", writer.toString());
}
@Test
public void testInvalidSchema() throws ServletException, IOException {
when(request.getParameter("schema")).thenReturn("{bogus json");
command.doPost(request, response);
assertEquals("{\"status\":\"error\",\"message\":\"Wikibase schema could not be parsed.\"}", writer.toString());
}
}

View File

@ -6,7 +6,7 @@ import static org.junit.Assert.assertNull;
import java.math.BigDecimal; import java.math.BigDecimal;
import org.openrefine.wikidata.schema.entityvalues.ReconEntityIdValue; import org.openrefine.wikidata.schema.entityvalues.ReconEntityIdValue;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue; import org.wikidata.wdtk.datamodel.interfaces.GlobeCoordinatesValue;
@ -39,11 +39,11 @@ public class QSValuePrinterTest {
@Test @Test
public void printNewItemId() { public void printNewItemId() {
ReconEntityIdValue id = TestingDataGenerator.makeNewItemIdValue(12345L, "my new item"); ReconEntityIdValue id = TestingData.makeNewItemIdValue(12345L, "my new item");
assertEquals("LAST", id.accept(printer)); assertEquals("LAST", id.accept(printer));
// because no entity was previously created // because no entity was previously created
ReconEntityIdValue differentId = TestingDataGenerator.makeMatchedItemIdValue("Q78", "my existing item"); ReconEntityIdValue differentId = TestingData.makeMatchedItemIdValue("Q78", "my existing item");
assertEquals("Q78", differentId.accept(printer)); assertEquals("Q78", differentId.accept(printer));
} }

View File

@ -11,8 +11,7 @@ import java.util.Properties;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import org.openrefine.wikidata.schema.WikibaseSchema; import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.schema.WikibaseSchemaTest; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.testing.TestingDataGenerator;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.openrefine.wikidata.updates.scheduler.UpdateSchedulerTest; import org.openrefine.wikidata.updates.scheduler.UpdateSchedulerTest;
@ -32,8 +31,8 @@ import com.google.refine.tests.RefineTest;
public class QuickStatementsExporterTest extends RefineTest { public class QuickStatementsExporterTest extends RefineTest {
private QuickStatementsExporter exporter = new QuickStatementsExporter(); private QuickStatementsExporter exporter = new QuickStatementsExporter();
private ItemIdValue newIdA = TestingDataGenerator.makeNewItemIdValue(1234L, "new item A"); private ItemIdValue newIdA = TestingData.makeNewItemIdValue(1234L, "new item A");
private ItemIdValue newIdB = TestingDataGenerator.makeNewItemIdValue(5678L, "new item B"); private ItemIdValue newIdB = TestingData.makeNewItemIdValue(5678L, "new item B");
private ItemIdValue qid1 = Datamodel.makeWikidataItemIdValue("Q1377"); private ItemIdValue qid1 = Datamodel.makeWikidataItemIdValue("Q1377");
private ItemIdValue qid2 = Datamodel.makeWikidataItemIdValue("Q865528"); private ItemIdValue qid2 = Datamodel.makeWikidataItemIdValue("Q865528");
@ -46,14 +45,9 @@ public class QuickStatementsExporterTest extends RefineTest {
@Test @Test
public void testSimpleProject() throws JSONException, IOException { public void testSimpleProject() throws JSONException, IOException {
Project project = this.createCSVProject( Project project = this.createCSVProject(
"subject,inception,reference\n"+ TestingData.inceptionWithNewCsv);
"Q1377,1919,http://www.ljubljana-slovenia.com/university-ljubljana\n"+ TestingData.reconcileInceptionCells(project);
"Q865528,1965,\n"+ JSONObject serialized = TestingData.jsonFromFile("data/schema/inception.json");
"new uni,2016,http://new-uni.com/");
project.rows.get(0).cells.set(0, TestingDataGenerator.makeMatchedCell("Q1377", "University of Ljubljana"));
project.rows.get(1).cells.set(0, TestingDataGenerator.makeMatchedCell("Q865528", "University of Warwick"));
project.rows.get(2).cells.set(0, TestingDataGenerator.makeNewItemCell(1234L, "new uni"));
JSONObject serialized = WikibaseSchemaTest.jsonFromFile("data/schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized); WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
project.overlayModels.put("wikibaseSchema", schema); project.overlayModels.put("wikibaseSchema", schema);
Engine engine = new Engine(project); Engine engine = new Engine(project);
@ -61,21 +55,12 @@ public class QuickStatementsExporterTest extends RefineTest {
StringWriter writer = new StringWriter(); StringWriter writer = new StringWriter();
Properties properties = new Properties(); Properties properties = new Properties();
exporter.export(project, properties, engine, writer); exporter.export(project, properties, engine, writer);
assertEquals( assertEquals(TestingData.inceptionWithNewQS, writer.toString());
"Q1377\tP571\t+1919-01-01T00:00:00Z/9"+
"\tS854\t\"http://www.ljubljana-slovenia.com/university-ljubljana\""+
"\tS813\t+2018-02-28T00:00:00Z/11\n" +
"Q865528\tP571\t+1965-01-01T00:00:00Z/9"+
"\tS813\t+2018-02-28T00:00:00Z/11\n"+
"CREATE\n"+
"LAST\tP571\t+2016-01-01T00:00:00Z/9"+
"\tS854\t\"http://new-uni.com/\""+
"\tS813\t+2018-02-28T00:00:00Z/11\n", writer.toString());
} }
@Test @Test
public void testImpossibleScheduling() throws IOException { public void testImpossibleScheduling() throws IOException {
Statement sNewAtoNewB = TestingDataGenerator.generateStatement(newIdA, newIdB); Statement sNewAtoNewB = TestingData.generateStatement(newIdA, newIdB);
ItemUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewB).build(); ItemUpdate update = new ItemUpdateBuilder(newIdA).addStatement(sNewAtoNewB).build();
assertEquals(QuickStatementsExporter.impossibleSchedulingErrorMessage, assertEquals(QuickStatementsExporter.impossibleSchedulingErrorMessage,
@ -100,7 +85,7 @@ public class QuickStatementsExporterTest extends RefineTest {
@Test @Test
public void testDeleteStatement() throws IOException { public void testDeleteStatement() throws IOException {
ItemUpdate update = new ItemUpdateBuilder(qid1) ItemUpdate update = new ItemUpdateBuilder(qid1)
.deleteStatement(TestingDataGenerator.generateStatement(qid1, qid2)) .deleteStatement(TestingData.generateStatement(qid1, qid2))
.build(); .build();
assertEquals("- Q1377\tP38\tQ865528\n", export(update)); assertEquals("- Q1377\tP38\tQ865528\n", export(update));
@ -108,8 +93,8 @@ public class QuickStatementsExporterTest extends RefineTest {
@Test @Test
public void testQualifier() throws IOException { public void testQualifier() throws IOException {
Statement baseStatement = TestingDataGenerator.generateStatement(qid1, qid2); Statement baseStatement = TestingData.generateStatement(qid1, qid2);
Statement otherStatement = TestingDataGenerator.generateStatement(qid2, qid1); Statement otherStatement = TestingData.generateStatement(qid2, qid1);
Snak qualifierSnak = otherStatement.getClaim().getMainSnak(); Snak qualifierSnak = otherStatement.getClaim().getMainSnak();
SnakGroup group = Datamodel.makeSnakGroup(Collections.singletonList(qualifierSnak)); SnakGroup group = Datamodel.makeSnakGroup(Collections.singletonList(qualifierSnak));
Claim claim = Datamodel.makeClaim(qid1, baseStatement.getClaim().getMainSnak(), Claim claim = Datamodel.makeClaim(qid1, baseStatement.getClaim().getMainSnak(),

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -15,13 +15,13 @@ public class DistinctValuesScrutinizerTest extends StatementScrutinizerTest {
@Test @Test
public void testTrigger() { public void testTrigger() {
ItemIdValue idA = TestingDataGenerator.existingId; ItemIdValue idA = TestingData.existingId;
ItemIdValue idB = TestingDataGenerator.matchedId; ItemIdValue idB = TestingData.matchedId;
ItemUpdate updateA = new ItemUpdateBuilder(idA) ItemUpdate updateA = new ItemUpdateBuilder(idA)
.addStatement(TestingDataGenerator.generateStatement(idA, idB)) .addStatement(TestingData.generateStatement(idA, idB))
.build(); .build();
ItemUpdate updateB = new ItemUpdateBuilder(idB) ItemUpdate updateB = new ItemUpdateBuilder(idB)
.addStatement(TestingDataGenerator.generateStatement(idB, idB)) .addStatement(TestingData.generateStatement(idB, idB))
.build(); .build();
scrutinize(updateA, updateB); scrutinize(updateA, updateB);
assertWarningsRaised(DistinctValuesScrutinizer.type); assertWarningsRaised(DistinctValuesScrutinizer.type);

View File

@ -1,7 +1,7 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.qa.MockConstraintFetcher; import org.openrefine.wikidata.qa.MockConstraintFetcher;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -10,8 +10,8 @@ import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue;
public class InverseConstaintScrutinizerTest extends StatementScrutinizerTest { public class InverseConstaintScrutinizerTest extends StatementScrutinizerTest {
private ItemIdValue idA = TestingDataGenerator.existingId; private ItemIdValue idA = TestingData.existingId;
private ItemIdValue idB = TestingDataGenerator.newIdB; private ItemIdValue idB = TestingData.newIdB;
private PropertyIdValue pidWithInverse = MockConstraintFetcher.pidWithInverse; private PropertyIdValue pidWithInverse = MockConstraintFetcher.pidWithInverse;
private PropertyIdValue inversePid = MockConstraintFetcher.inversePid; private PropertyIdValue inversePid = MockConstraintFetcher.inversePid;
@ -23,7 +23,7 @@ public class InverseConstaintScrutinizerTest extends StatementScrutinizerTest {
@Test @Test
public void testTrigger() { public void testTrigger() {
ItemUpdate update = new ItemUpdateBuilder(idA) ItemUpdate update = new ItemUpdateBuilder(idA)
.addStatement(TestingDataGenerator.generateStatement(idA, pidWithInverse, idB)) .addStatement(TestingData.generateStatement(idA, pidWithInverse, idB))
.build(); .build();
scrutinize(update); scrutinize(update);
assertWarningsRaised(InverseConstraintScrutinizer.type); assertWarningsRaised(InverseConstraintScrutinizer.type);
@ -32,7 +32,7 @@ public class InverseConstaintScrutinizerTest extends StatementScrutinizerTest {
@Test @Test
public void testNoSymmetricClosure() { public void testNoSymmetricClosure() {
ItemUpdate update = new ItemUpdateBuilder(idA) ItemUpdate update = new ItemUpdateBuilder(idA)
.addStatement(TestingDataGenerator.generateStatement(idA, inversePid, idB)) .addStatement(TestingData.generateStatement(idA, inversePid, idB))
.build(); .build();
scrutinize(update); scrutinize(update);
assertNoWarningRaised(); assertNoWarningRaised();

View File

@ -2,7 +2,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import java.util.Collections; import java.util.Collections;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -14,8 +14,8 @@ import org.wikidata.wdtk.datamodel.interfaces.StatementRank;
public class NewItemScrutinizerTest extends ScrutinizerTest { public class NewItemScrutinizerTest extends ScrutinizerTest {
private Claim claim = Datamodel.makeClaim(TestingDataGenerator.newIdA, private Claim claim = Datamodel.makeClaim(TestingData.newIdA,
Datamodel.makeValueSnak(Datamodel.makeWikidataPropertyIdValue("P31"), TestingDataGenerator.existingId), Datamodel.makeValueSnak(Datamodel.makeWikidataPropertyIdValue("P31"), TestingData.existingId),
Collections.emptyList()); Collections.emptyList());
private Statement p31Statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, ""); private Statement p31Statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
@ -26,7 +26,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testTrigger() { public void testTrigger() {
ItemUpdate update = new ItemUpdateBuilder(TestingDataGenerator.newIdA).build(); ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA).build();
scrutinize(update); scrutinize(update);
assertWarningsRaised( assertWarningsRaised(
NewItemScrutinizer.noDescType, NewItemScrutinizer.noDescType,
@ -37,7 +37,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testEmptyItem() { public void testEmptyItem() {
ItemUpdate update = new ItemUpdateBuilder(TestingDataGenerator.existingId).build(); ItemUpdate update = new ItemUpdateBuilder(TestingData.existingId).build();
scrutinize(update); scrutinize(update);
assertNoWarningRaised(); assertNoWarningRaised();
} }
@ -45,7 +45,7 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testGoodNewItem() { public void testGoodNewItem() {
ItemUpdate update = new ItemUpdateBuilder(TestingDataGenerator.newIdA) ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr")) .addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr"))
.addDescription(Datamodel.makeMonolingualTextValue("interesting item", "en")) .addDescription(Datamodel.makeMonolingualTextValue("interesting item", "en"))
.addStatement(p31Statement) .addStatement(p31Statement)
@ -56,12 +56,12 @@ public class NewItemScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testDeletedStatements() { public void testDeletedStatements() {
ItemUpdate update = new ItemUpdateBuilder(TestingDataGenerator.newIdA) ItemUpdate update = new ItemUpdateBuilder(TestingData.newIdA)
.addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr")) .addLabel(Datamodel.makeMonolingualTextValue("bonjour", "fr"))
.addDescription(Datamodel.makeMonolingualTextValue("interesting item", "en")) .addDescription(Datamodel.makeMonolingualTextValue("interesting item", "en"))
.addStatement(p31Statement) .addStatement(p31Statement)
.deleteStatement(TestingDataGenerator.generateStatement(TestingDataGenerator.newIdA, .deleteStatement(TestingData.generateStatement(TestingData.newIdA,
TestingDataGenerator.matchedId)) TestingData.matchedId))
.build(); .build();
scrutinize(update); scrutinize(update);
assertWarningsRaised(NewItemScrutinizer.newItemType, NewItemScrutinizer.deletedStatementsType); assertWarningsRaised(NewItemScrutinizer.newItemType, NewItemScrutinizer.deletedStatementsType);

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -19,13 +19,13 @@ public class NoEditsMadeScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testNonNull() { public void testNonNull() {
scrutinize(new ItemUpdateBuilder(TestingDataGenerator.newIdA).build()); scrutinize(new ItemUpdateBuilder(TestingData.newIdA).build());
assertNoWarningRaised(); assertNoWarningRaised();
} }
@Test @Test
public void testNull() { public void testNull() {
scrutinize(new ItemUpdateBuilder(TestingDataGenerator.existingId).build()); scrutinize(new ItemUpdateBuilder(TestingData.existingId).build());
assertWarningsRaised(NoEditsMadeScrutinizer.type); assertWarningsRaised(NoEditsMadeScrutinizer.type);
} }
} }

View File

@ -6,7 +6,7 @@ import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.openrefine.wikidata.qa.MockConstraintFetcher; import org.openrefine.wikidata.qa.MockConstraintFetcher;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.Claim;
@ -45,7 +45,7 @@ public class QualifierCompatibilityScrutinizerTest extends StatementScrutinizerT
} }
private Statement makeStatement(Snak... qualifiers) { private Statement makeStatement(Snak... qualifiers) {
Claim claim = Datamodel.makeClaim(TestingDataGenerator.existingId, Claim claim = Datamodel.makeClaim(TestingData.existingId,
Datamodel.makeNoValueSnak(MockConstraintFetcher.mainSnakPid), makeQualifiers(qualifiers)); Datamodel.makeNoValueSnak(MockConstraintFetcher.mainSnakPid), makeQualifiers(qualifiers));
return Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, ""); return Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
} }

View File

@ -4,7 +4,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import org.openrefine.wikidata.qa.MockConstraintFetcher; import org.openrefine.wikidata.qa.MockConstraintFetcher;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
@ -15,7 +15,7 @@ import org.wikidata.wdtk.datamodel.interfaces.StatementRank;
public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest { public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
private ItemIdValue qid = TestingDataGenerator.existingId; private ItemIdValue qid = TestingData.existingId;
@Override @Override
public EditScrutinizer getScrutinizer() { public EditScrutinizer getScrutinizer() {
@ -24,19 +24,19 @@ public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
@Test @Test
public void testTriggerMainSnak() { public void testTriggerMainSnak() {
scrutinize(TestingDataGenerator.generateStatement(qid, MockConstraintFetcher.qualifierPid, qid)); scrutinize(TestingData.generateStatement(qid, MockConstraintFetcher.qualifierPid, qid));
assertWarningsRaised("property-restricted-to-qualifier-found-in-mainsnak"); assertWarningsRaised("property-restricted-to-qualifier-found-in-mainsnak");
} }
@Test @Test
public void testNoProblem() { public void testNoProblem() {
scrutinize(TestingDataGenerator.generateStatement(qid, MockConstraintFetcher.mainSnakPid, qid)); scrutinize(TestingData.generateStatement(qid, MockConstraintFetcher.mainSnakPid, qid));
assertNoWarningRaised(); assertNoWarningRaised();
} }
@Test @Test
public void testNotRestricted() { public void testNotRestricted() {
scrutinize(TestingDataGenerator.generateStatement(qid, Datamodel.makeWikidataPropertyIdValue("P3748"), qid)); scrutinize(TestingData.generateStatement(qid, Datamodel.makeWikidataPropertyIdValue("P3748"), qid));
assertNoWarningRaised(); assertNoWarningRaised();
} }
@ -45,7 +45,7 @@ public class RestrictedPositionScrutinizerTest extends SnakScrutinizerTest {
Snak snak = Datamodel.makeValueSnak(MockConstraintFetcher.mainSnakPid, qid); Snak snak = Datamodel.makeValueSnak(MockConstraintFetcher.mainSnakPid, qid);
List<SnakGroup> snakGroups = Collections.singletonList(Datamodel.makeSnakGroup(Collections.singletonList(snak))); List<SnakGroup> snakGroups = Collections.singletonList(Datamodel.makeSnakGroup(Collections.singletonList(snak)));
Statement statement = Datamodel.makeStatement( Statement statement = Datamodel.makeStatement(
TestingDataGenerator.generateStatement(qid, MockConstraintFetcher.mainSnakPid, qid).getClaim(), TestingData.generateStatement(qid, MockConstraintFetcher.mainSnakPid, qid).getClaim(),
Collections.singletonList(Datamodel.makeReference(snakGroups)), Collections.singletonList(Datamodel.makeReference(snakGroups)),
StatementRank.NORMAL, ""); StatementRank.NORMAL, "");
scrutinize(statement); scrutinize(statement);

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
@ -13,15 +13,15 @@ public class SelfReferentialScrutinizerTest extends StatementScrutinizerTest {
@Test @Test
public void testTrigger() { public void testTrigger() {
ItemIdValue id = TestingDataGenerator.matchedId; ItemIdValue id = TestingData.matchedId;
scrutinize(TestingDataGenerator.generateStatement(id, id)); scrutinize(TestingData.generateStatement(id, id));
assertWarningsRaised(SelfReferentialScrutinizer.type); assertWarningsRaised(SelfReferentialScrutinizer.type);
} }
@Test @Test
public void testNoProblem() { public void testNoProblem() {
ItemIdValue id = TestingDataGenerator.matchedId; ItemIdValue id = TestingData.matchedId;
scrutinize(TestingDataGenerator.generateStatement(id, TestingDataGenerator.existingId)); scrutinize(TestingData.generateStatement(id, TestingData.existingId));
assertNoWarningRaised(); assertNoWarningRaised();
} }
} }

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -15,11 +15,11 @@ public class SingleValueScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testTrigger() { public void testTrigger() {
ItemIdValue idA = TestingDataGenerator.existingId; ItemIdValue idA = TestingData.existingId;
ItemIdValue idB = TestingDataGenerator.matchedId; ItemIdValue idB = TestingData.matchedId;
ItemUpdate update = new ItemUpdateBuilder(idA) ItemUpdate update = new ItemUpdateBuilder(idA)
.addStatement(TestingDataGenerator.generateStatement(idA, idB)) .addStatement(TestingData.generateStatement(idA, idB))
.addStatement(TestingDataGenerator.generateStatement(idA, idB)) .addStatement(TestingData.generateStatement(idA, idB))
.build(); .build();
scrutinize(update); scrutinize(update);
assertWarningsRaised(SingleValueScrutinizer.type); assertWarningsRaised(SingleValueScrutinizer.type);
@ -27,13 +27,13 @@ public class SingleValueScrutinizerTest extends ScrutinizerTest {
@Test @Test
public void testNoIssue() { public void testNoIssue() {
ItemIdValue idA = TestingDataGenerator.existingId; ItemIdValue idA = TestingData.existingId;
ItemIdValue idB = TestingDataGenerator.matchedId; ItemIdValue idB = TestingData.matchedId;
ItemUpdate updateA = new ItemUpdateBuilder(idA) ItemUpdate updateA = new ItemUpdateBuilder(idA)
.addStatement(TestingDataGenerator.generateStatement(idA, idB)) .addStatement(TestingData.generateStatement(idA, idB))
.build(); .build();
ItemUpdate updateB = new ItemUpdateBuilder(idB) ItemUpdate updateB = new ItemUpdateBuilder(idB)
.addStatement(TestingDataGenerator.generateStatement(idB, idB)) .addStatement(TestingData.generateStatement(idB, idB))
.build(); .build();
scrutinize(updateA, updateB); scrutinize(updateA, updateB);
assertNoWarningRaised(); assertNoWarningRaised();

View File

@ -3,7 +3,7 @@ package org.openrefine.wikidata.qa.scrutinizers;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.Claim;
import org.wikidata.wdtk.datamodel.interfaces.Snak; import org.wikidata.wdtk.datamodel.interfaces.Snak;
@ -16,21 +16,21 @@ public abstract class SnakScrutinizerTest extends StatementScrutinizerTest {
public static Snak defaultMainSnak = Datamodel.makeNoValueSnak(Datamodel.makeWikidataPropertyIdValue("P3928")); public static Snak defaultMainSnak = Datamodel.makeNoValueSnak(Datamodel.makeWikidataPropertyIdValue("P3928"));
public void scrutinize(Snak snak) { public void scrutinize(Snak snak) {
Claim claim = Datamodel.makeClaim(TestingDataGenerator.existingId, snak, Claim claim = Datamodel.makeClaim(TestingData.existingId, snak,
Collections.emptyList()); Collections.emptyList());
Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, ""); Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
scrutinize(statement); scrutinize(statement);
} }
public void scrutinizeAsQualifier(Snak snak) { public void scrutinizeAsQualifier(Snak snak) {
Claim claim = Datamodel.makeClaim(TestingDataGenerator.existingId, defaultMainSnak, Claim claim = Datamodel.makeClaim(TestingData.existingId, defaultMainSnak,
toSnakGroups(snak)); toSnakGroups(snak));
Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, ""); Statement statement = Datamodel.makeStatement(claim, Collections.emptyList(), StatementRank.NORMAL, "");
scrutinize(statement); scrutinize(statement);
} }
public void scrutinizeAsReference(Snak snak) { public void scrutinizeAsReference(Snak snak) {
Claim claim = Datamodel.makeClaim(TestingDataGenerator.existingId, defaultMainSnak, Claim claim = Datamodel.makeClaim(TestingData.existingId, defaultMainSnak,
Collections.emptyList()); Collections.emptyList());
Statement statement = Datamodel.makeStatement(claim, Statement statement = Datamodel.makeStatement(claim,
Collections.singletonList(Datamodel.makeReference(toSnakGroups(snak))), StatementRank.NORMAL, ""); Collections.singletonList(Datamodel.makeReference(toSnakGroups(snak))), StatementRank.NORMAL, "");

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
public class UnsourcedScrutinizerTest extends StatementScrutinizerTest { public class UnsourcedScrutinizerTest extends StatementScrutinizerTest {
@ -12,8 +12,8 @@ public class UnsourcedScrutinizerTest extends StatementScrutinizerTest {
@Test @Test
public void testTrigger() { public void testTrigger() {
scrutinize(TestingDataGenerator.generateStatement(TestingDataGenerator.existingId, scrutinize(TestingData.generateStatement(TestingData.existingId,
TestingDataGenerator.matchedId)); TestingData.matchedId));
assertWarningsRaised(UnsourcedScrutinizer.type); assertWarningsRaised(UnsourcedScrutinizer.type);
} }

View File

@ -1,6 +1,6 @@
package org.openrefine.wikidata.qa.scrutinizers; package org.openrefine.wikidata.qa.scrutinizers;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
@ -16,6 +16,6 @@ public abstract class ValueScrutinizerTest extends SnakScrutinizerTest {
} }
public void scrutinizeLabel(MonolingualTextValue text) { public void scrutinizeLabel(MonolingualTextValue text) {
scrutinize(new ItemUpdateBuilder(TestingDataGenerator.existingId).addLabel(text).build()); scrutinize(new ItemUpdateBuilder(TestingData.existingId).addLabel(text).build());
} }
} }

View File

@ -5,7 +5,7 @@ import java.io.Serializable;
import org.openrefine.wikidata.qa.QAWarningStore; import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.schema.exceptions.SkipSchemaExpressionException; import org.openrefine.wikidata.schema.exceptions.SkipSchemaExpressionException;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
@ -96,6 +96,6 @@ public class WbExpressionTest<T> extends RefineTest {
* a cell for use in setRow * a cell for use in setRow
*/ */
public Cell recon(String qid) { public Cell recon(String qid) {
return TestingDataGenerator.makeMatchedCell(qid, qid); return TestingData.makeMatchedCell(qid, qid);
} }
} }

View File

@ -5,7 +5,7 @@ import static org.junit.Assert.assertEquals;
import java.util.Collections; import java.util.Collections;
import org.openrefine.wikidata.testing.JacksonSerializationTest; import org.openrefine.wikidata.testing.JacksonSerializationTest;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -25,7 +25,7 @@ public class WbNameDescExprTest extends WbExpressionTest<MonolingualTextValue> {
@Test @Test
public void testContributeToLabel() { public void testContributeToLabel() {
WbNameDescExpr labelExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.LABEL, WbNameDescExpr labelExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.LABEL,
TestingDataGenerator.getTestMonolingualExpr("fr", "français", "le croissant magnifique")); TestingData.getTestMonolingualExpr("fr", "français", "le croissant magnifique"));
ItemUpdateBuilder update = new ItemUpdateBuilder(subject); ItemUpdateBuilder update = new ItemUpdateBuilder(subject);
labelExpr.contributeTo(update, ctxt); labelExpr.contributeTo(update, ctxt);
assertEquals(Collections.singleton(Datamodel.makeMonolingualTextValue("le croissant magnifique", "fr")), assertEquals(Collections.singleton(Datamodel.makeMonolingualTextValue("le croissant magnifique", "fr")),
@ -35,7 +35,7 @@ public class WbNameDescExprTest extends WbExpressionTest<MonolingualTextValue> {
@Test @Test
public void testContributeToDescription() { public void testContributeToDescription() {
WbNameDescExpr descriptionExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.DESCRIPTION, WbNameDescExpr descriptionExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.DESCRIPTION,
TestingDataGenerator.getTestMonolingualExpr("de", "Deutsch", "wunderschön")); TestingData.getTestMonolingualExpr("de", "Deutsch", "wunderschön"));
ItemUpdateBuilder update = new ItemUpdateBuilder(subject); ItemUpdateBuilder update = new ItemUpdateBuilder(subject);
descriptionExpr.contributeTo(update, ctxt); descriptionExpr.contributeTo(update, ctxt);
assertEquals(Collections.singleton(Datamodel.makeMonolingualTextValue("wunderschön", "de")), assertEquals(Collections.singleton(Datamodel.makeMonolingualTextValue("wunderschön", "de")),
@ -45,7 +45,7 @@ public class WbNameDescExprTest extends WbExpressionTest<MonolingualTextValue> {
@Test @Test
public void testContributeToAlias() { public void testContributeToAlias() {
WbNameDescExpr aliasExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.ALIAS, WbNameDescExpr aliasExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.ALIAS,
TestingDataGenerator.getTestMonolingualExpr("en", "English", "snack")); TestingData.getTestMonolingualExpr("en", "English", "snack"));
ItemUpdateBuilder update = new ItemUpdateBuilder(subject); ItemUpdateBuilder update = new ItemUpdateBuilder(subject);
aliasExpr.contributeTo(update, ctxt); aliasExpr.contributeTo(update, ctxt);
assertEquals(Collections.singleton(Datamodel.makeMonolingualTextValue("snack", "en")), assertEquals(Collections.singleton(Datamodel.makeMonolingualTextValue("snack", "en")),
@ -62,7 +62,7 @@ public class WbNameDescExprTest extends WbExpressionTest<MonolingualTextValue> {
@Test @Test
public void testGetters() { public void testGetters() {
WbMonolingualExpr monolingualExpr = TestingDataGenerator.getTestMonolingualExpr("en", "English", "not sure what"); WbMonolingualExpr monolingualExpr = TestingData.getTestMonolingualExpr("en", "English", "not sure what");
WbNameDescExpr aliasExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.ALIAS, WbNameDescExpr aliasExpr = new WbNameDescExpr(WbNameDescExpr.NameDescrType.ALIAS,
monolingualExpr); monolingualExpr);
assertEquals(WbNameDescExpr.NameDescrType.ALIAS, aliasExpr.getType()); assertEquals(WbNameDescExpr.NameDescrType.ALIAS, aliasExpr.getType());

View File

@ -4,8 +4,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException; import java.io.IOException;
import java.io.StringWriter; import java.io.StringWriter;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -15,7 +13,7 @@ import java.util.Properties;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import org.json.JSONWriter; import org.json.JSONWriter;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
@ -66,25 +64,16 @@ public class WikibaseSchemaTest extends RefineTest {
private Project project; private Project project;
public static JSONObject jsonFromFile(String filename) throws IOException, JSONException {
byte[] contents = Files.readAllBytes(Paths.get(filename));
String decoded = new String(contents, "utf-8");
return ParsingUtilities.evaluateJsonStringToObject(decoded);
}
@BeforeMethod @BeforeMethod
public void setUpProject() { public void setUpProject() {
project = this.createCSVProject( project = this.createCSVProject(TestingData.inceptionCsv);
"subject,inception,reference\n"+ project.rows.get(0).cells.set(0, TestingData.makeMatchedCell("Q1377", "University of Ljubljana"));
"Q1377,1919,http://www.ljubljana-slovenia.com/university-ljubljana\n"+ project.rows.get(1).cells.set(0, TestingData.makeMatchedCell("Q865528", "University of Warwick"));
"Q865528,1965,");
project.rows.get(0).cells.set(0, TestingDataGenerator.makeMatchedCell("Q1377", "University of Ljubljana"));
project.rows.get(1).cells.set(0, TestingDataGenerator.makeMatchedCell("Q865528", "University of Warwick"));
} }
@Test @Test
public void testSerialize() throws JSONException, IOException { public void testSerialize() throws JSONException, IOException {
JSONObject serialized = jsonFromFile("data/schema/history_of_medicine.json"); JSONObject serialized = TestingData.jsonFromFile("data/schema/history_of_medicine.json");
WikibaseSchema parsed = WikibaseSchema.reconstruct(serialized); WikibaseSchema parsed = WikibaseSchema.reconstruct(serialized);
StringWriter writer = new StringWriter(); StringWriter writer = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(writer); JSONWriter jsonWriter = new JSONWriter(writer);
@ -92,20 +81,20 @@ public class WikibaseSchemaTest extends RefineTest {
writer.close(); writer.close();
JSONObject newSerialized = ParsingUtilities.evaluateJsonStringToObject(writer.toString()); JSONObject newSerialized = ParsingUtilities.evaluateJsonStringToObject(writer.toString());
// toString because it looks like JSONObject equality isn't great // toString because it looks like JSONObject equality isn't great
assertEquals(jsonFromFile("data/schema/history_of_medicine_normalized.json").toString(), newSerialized.toString()); assertEquals(TestingData.jsonFromFile("data/schema/history_of_medicine_normalized.json").toString(), newSerialized.toString());
} }
@Test @Test
public void testDeserialize() throws JSONException, IOException { public void testDeserialize() throws JSONException, IOException {
// this json file was generated by an earlier version of the software // this json file was generated by an earlier version of the software
// it contains extra "type" fields that are now ignored. // it contains extra "type" fields that are now ignored.
JSONObject serialized = jsonFromFile("data/schema/roarmap.json"); JSONObject serialized = TestingData.jsonFromFile("data/schema/roarmap.json");
WikibaseSchema.reconstruct(serialized); WikibaseSchema.reconstruct(serialized);
} }
@Test @Test
public void testEvaluate() throws JSONException, IOException { public void testEvaluate() throws JSONException, IOException {
JSONObject serialized = jsonFromFile("data/schema/inception.json"); JSONObject serialized = TestingData.jsonFromFile("data/schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized); WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
Engine engine = new Engine(project); Engine engine = new Engine(project);
List<ItemUpdate> updates = schema.evaluate(project, engine); List<ItemUpdate> updates = schema.evaluate(project, engine);
@ -119,7 +108,7 @@ public class WikibaseSchemaTest extends RefineTest {
@Test @Test
public void testEvaluateRespectsFacets() throws JSONException, IOException { public void testEvaluateRespectsFacets() throws JSONException, IOException {
JSONObject serialized = jsonFromFile("data/schema/inception.json"); JSONObject serialized = TestingData.jsonFromFile("data/schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized); WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
Engine engine = new Engine(project); Engine engine = new Engine(project);
JSONObject engineConfig = new JSONObject("{\n" + JSONObject engineConfig = new JSONObject("{\n" +

View File

@ -7,7 +7,7 @@ import static org.junit.Assert.assertTrue;
import java.util.Collections; import java.util.Collections;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -15,12 +15,12 @@ import com.google.refine.model.Recon;
public class ReconEntityIdValueTest { public class ReconEntityIdValueTest {
private ReconEntityIdValue newItem = TestingDataGenerator.makeNewItemIdValue(1234L, "new item"); private ReconEntityIdValue newItem = TestingData.makeNewItemIdValue(1234L, "new item");
private ReconEntityIdValue sameNewItem = TestingDataGenerator.makeNewItemIdValue(1234L, "different text"); private ReconEntityIdValue sameNewItem = TestingData.makeNewItemIdValue(1234L, "different text");
private ReconEntityIdValue differentNewItem = TestingDataGenerator.makeNewItemIdValue(7890L, "new item"); private ReconEntityIdValue differentNewItem = TestingData.makeNewItemIdValue(7890L, "new item");
private ReconEntityIdValue newProp = TestingDataGenerator.makeNewPropertyIdValue(1234L, "new prop"); private ReconEntityIdValue newProp = TestingData.makeNewPropertyIdValue(1234L, "new prop");
private ReconEntityIdValue existingProp = TestingDataGenerator.makeMatchedPropertyIdValue("P53", "new prop"); private ReconEntityIdValue existingProp = TestingData.makeMatchedPropertyIdValue("P53", "new prop");
private ReconEntityIdValue existingItem = TestingDataGenerator.makeMatchedItemIdValue("Q42", "existing item"); private ReconEntityIdValue existingItem = TestingData.makeMatchedItemIdValue("Q42", "existing item");
@Test @Test
public void testIsNew() { public void testIsNew() {
@ -37,7 +37,7 @@ public class ReconEntityIdValueTest {
@Test @Test
public void testGetTypes() { public void testGetTypes() {
String[] types = {"Q5"}; String[] types = {"Q5"};
Recon matchedRecon = TestingDataGenerator.makeMatchedRecon("Q453", "other item", types); Recon matchedRecon = TestingData.makeMatchedRecon("Q453", "other item", types);
ReconEntityIdValue existingIdWithTypes = new ReconItemIdValue(matchedRecon, "cell content"); ReconEntityIdValue existingIdWithTypes = new ReconItemIdValue(matchedRecon, "cell content");
assertEquals(Collections.singletonList("Q5"), existingIdWithTypes.getTypes()); assertEquals(Collections.singletonList("Q5"), existingIdWithTypes.getTypes());
assertEquals(Collections.emptyList(), existingItem.getTypes()); assertEquals(Collections.emptyList(), existingItem.getTypes());

View File

@ -1,7 +1,13 @@
package org.openrefine.wikidata.testing; package org.openrefine.wikidata.testing;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collections; import java.util.Collections;
import org.json.JSONException;
import org.json.JSONObject;
import org.openrefine.wikidata.exporters.QuickStatementsExporterTest;
import org.openrefine.wikidata.schema.WbLanguageConstant; import org.openrefine.wikidata.schema.WbLanguageConstant;
import org.openrefine.wikidata.schema.WbMonolingualExpr; import org.openrefine.wikidata.schema.WbMonolingualExpr;
import org.openrefine.wikidata.schema.WbStringConstant; import org.openrefine.wikidata.schema.WbStringConstant;
@ -15,10 +21,35 @@ import org.wikidata.wdtk.datamodel.interfaces.Statement;
import org.wikidata.wdtk.datamodel.interfaces.StatementRank; import org.wikidata.wdtk.datamodel.interfaces.StatementRank;
import com.google.refine.model.Cell; import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Recon; import com.google.refine.model.Recon;
import com.google.refine.model.ReconCandidate; import com.google.refine.model.ReconCandidate;
import com.google.refine.util.ParsingUtilities;
public class TestingDataGenerator { public class TestingData {
public static final String inceptionCsv = "subject,inception,reference\n"+
"Q1377,1919,http://www.ljubljana-slovenia.com/university-ljubljana\n"+
"Q865528,1965,";
public static final String inceptionWithNewCsv = "subject,inception,reference\n"+
"Q1377,1919,http://www.ljubljana-slovenia.com/university-ljubljana\n"+
"Q865528,1965,\n"+
"new uni,2016,http://new-uni.com/";
public static final String inceptionWithNewQS =
"Q1377\tP571\t+1919-01-01T00:00:00Z/9"+
"\tS854\t\"http://www.ljubljana-slovenia.com/university-ljubljana\""+
"\tS813\t+2018-02-28T00:00:00Z/11\n" +
"Q865528\tP571\t+1965-01-01T00:00:00Z/9"+
"\tS813\t+2018-02-28T00:00:00Z/11\n"+
"CREATE\n"+
"LAST\tP571\t+2016-01-01T00:00:00Z/9"+
"\tS854\t\"http://new-uni.com/\""+
"\tS813\t+2018-02-28T00:00:00Z/11\n";
public static ItemIdValue newIdA = makeNewItemIdValue(1234L, "new item A");
public static ItemIdValue newIdB = makeNewItemIdValue(4567L, "new item B");
public static ItemIdValue matchedId = makeMatchedItemIdValue("Q89","eist");
public static ItemIdValue existingId = Datamodel.makeWikidataItemIdValue("Q43");
protected static PropertyIdValue pid = Datamodel.makeWikidataPropertyIdValue("P38"); protected static PropertyIdValue pid = Datamodel.makeWikidataPropertyIdValue("P38");
@ -77,10 +108,17 @@ public class TestingDataGenerator {
return generateStatement(from, pid, to); return generateStatement(from, pid, to);
} }
public static ItemIdValue newIdA = makeNewItemIdValue(1234L, "new item A"); public static JSONObject jsonFromFile(String filename) throws IOException, JSONException {
public static ItemIdValue newIdB = makeNewItemIdValue(4567L, "new item B"); byte[] contents = Files.readAllBytes(Paths.get(filename));
public static ItemIdValue matchedId = makeMatchedItemIdValue("Q89","eist"); String decoded = new String(contents, "utf-8");
public static ItemIdValue existingId = Datamodel.makeWikidataItemIdValue("Q43"); return ParsingUtilities.evaluateJsonStringToObject(decoded);
}
public static void reconcileInceptionCells(Project project) {
project.rows.get(0).cells.set(0, TestingData.makeMatchedCell("Q1377", "University of Ljubljana"));
project.rows.get(1).cells.set(0, TestingData.makeMatchedCell("Q865528", "University of Warwick"));
project.rows.get(2).cells.set(0, TestingData.makeNewItemCell(1234L, "new uni"));
}
} }

View File

@ -13,7 +13,7 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
@ -29,9 +29,9 @@ import org.wikidata.wdtk.datamodel.interfaces.StatementRank;
public class ItemUpdateTest { public class ItemUpdateTest {
private ItemIdValue existingSubject = Datamodel.makeWikidataItemIdValue("Q34"); private ItemIdValue existingSubject = Datamodel.makeWikidataItemIdValue("Q34");
private ItemIdValue newSubject = TestingDataGenerator.makeNewItemIdValue(1234L, "new item"); private ItemIdValue newSubject = TestingData.makeNewItemIdValue(1234L, "new item");
private ItemIdValue sameNewSubject = TestingDataGenerator.makeNewItemIdValue(1234L, "other new item"); private ItemIdValue sameNewSubject = TestingData.makeNewItemIdValue(1234L, "other new item");
private ItemIdValue matchedSubject = TestingDataGenerator.makeMatchedItemIdValue("Q78", "well known item"); private ItemIdValue matchedSubject = TestingData.makeMatchedItemIdValue("Q78", "well known item");
private PropertyIdValue pid1 = Datamodel.makeWikidataPropertyIdValue("P348"); private PropertyIdValue pid1 = Datamodel.makeWikidataPropertyIdValue("P348");
private PropertyIdValue pid2 = Datamodel.makeWikidataPropertyIdValue("P52"); private PropertyIdValue pid2 = Datamodel.makeWikidataPropertyIdValue("P52");

View File

@ -7,7 +7,7 @@ import java.util.Collections;
import java.util.Set; import java.util.Set;
import org.openrefine.wikidata.schema.entityvalues.ReconItemIdValue; import org.openrefine.wikidata.schema.entityvalues.ReconItemIdValue;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.Claim; import org.wikidata.wdtk.datamodel.interfaces.Claim;
@ -23,22 +23,22 @@ import org.wikidata.wdtk.datamodel.interfaces.Value;
public class PointerExtractorTest { public class PointerExtractorTest {
private PropertyIdValue pid = Datamodel.makeWikidataPropertyIdValue("P89"); private PropertyIdValue pid = Datamodel.makeWikidataPropertyIdValue("P89");
private Snak snakWithNew = Datamodel.makeValueSnak(pid, TestingDataGenerator.newIdA); private Snak snakWithNew = Datamodel.makeValueSnak(pid, TestingData.newIdA);
private Snak snakWithoutNew = Datamodel.makeValueSnak(pid, TestingDataGenerator.matchedId); private Snak snakWithoutNew = Datamodel.makeValueSnak(pid, TestingData.matchedId);
private SnakGroup snakGroupWithNew = Datamodel.makeSnakGroup(Collections.singletonList(snakWithNew)); private SnakGroup snakGroupWithNew = Datamodel.makeSnakGroup(Collections.singletonList(snakWithNew));
private SnakGroup snakGroupWithoutNew = Datamodel.makeSnakGroup(Collections.singletonList(snakWithoutNew)); private SnakGroup snakGroupWithoutNew = Datamodel.makeSnakGroup(Collections.singletonList(snakWithoutNew));
private Claim claimWithNew = Datamodel.makeClaim(TestingDataGenerator.existingId, snakWithNew, Collections.emptyList()); private Claim claimWithNew = Datamodel.makeClaim(TestingData.existingId, snakWithNew, Collections.emptyList());
private Claim claimNewSubject = Datamodel.makeClaim(TestingDataGenerator.newIdB, snakWithoutNew, Collections.emptyList()); private Claim claimNewSubject = Datamodel.makeClaim(TestingData.newIdB, snakWithoutNew, Collections.emptyList());
private Claim claimNewQualifier = Datamodel.makeClaim(TestingDataGenerator.matchedId, snakWithoutNew, private Claim claimNewQualifier = Datamodel.makeClaim(TestingData.matchedId, snakWithoutNew,
Collections.singletonList(snakGroupWithNew)); Collections.singletonList(snakGroupWithNew));
private static PointerExtractor e = new PointerExtractor(); private static PointerExtractor e = new PointerExtractor();
@Test @Test
public void testExtractEntityId() { public void testExtractEntityId() {
assertEquals(Collections.singleton(TestingDataGenerator.newIdA), e.extractPointers(TestingDataGenerator.newIdA)); assertEquals(Collections.singleton(TestingData.newIdA), e.extractPointers(TestingData.newIdA));
assertEmpty(e.extractPointers(TestingDataGenerator.existingId)); assertEmpty(e.extractPointers(TestingData.existingId));
assertEmpty(e.extractPointers(TestingDataGenerator.matchedId)); assertEmpty(e.extractPointers(TestingData.matchedId));
} }
@Test @Test
@ -56,26 +56,26 @@ public class PointerExtractorTest {
@Test @Test
public void testSnak() { public void testSnak() {
assertEmpty(e.extractPointers(snakWithoutNew)); assertEmpty(e.extractPointers(snakWithoutNew));
assertEquals(Collections.singleton(TestingDataGenerator.newIdA), e.extractPointers(snakWithNew)); assertEquals(Collections.singleton(TestingData.newIdA), e.extractPointers(snakWithNew));
assertEmpty(e.extractPointers(Datamodel.makeNoValueSnak(pid))); assertEmpty(e.extractPointers(Datamodel.makeNoValueSnak(pid)));
} }
@Test @Test
public void testSnakGroup() { public void testSnakGroup() {
assertEmpty(e.extractPointers(snakGroupWithoutNew)); assertEmpty(e.extractPointers(snakGroupWithoutNew));
assertEquals(Collections.singleton(TestingDataGenerator.newIdA), e.extractPointers(snakGroupWithNew)); assertEquals(Collections.singleton(TestingData.newIdA), e.extractPointers(snakGroupWithNew));
} }
@Test @Test
public void testStatement() { public void testStatement() {
assertEmpty(e.extractPointers(Datamodel.makeStatement(claimNewSubject, assertEmpty(e.extractPointers(Datamodel.makeStatement(claimNewSubject,
Collections.emptyList(), StatementRank.NORMAL, ""))); Collections.emptyList(), StatementRank.NORMAL, "")));
assertEquals(Collections.singleton(TestingDataGenerator.newIdA), e.extractPointers(Datamodel.makeStatement(claimWithNew, assertEquals(Collections.singleton(TestingData.newIdA), e.extractPointers(Datamodel.makeStatement(claimWithNew,
Collections.emptyList(), StatementRank.NORMAL, ""))); Collections.emptyList(), StatementRank.NORMAL, "")));
assertEquals(Collections.singleton(TestingDataGenerator.newIdA), e.extractPointers(Datamodel.makeStatement(claimNewQualifier, assertEquals(Collections.singleton(TestingData.newIdA), e.extractPointers(Datamodel.makeStatement(claimNewQualifier,
Collections.emptyList(), StatementRank.NORMAL, ""))); Collections.emptyList(), StatementRank.NORMAL, "")));
Reference reference = Datamodel.makeReference(Collections.singletonList(snakGroupWithNew)); Reference reference = Datamodel.makeReference(Collections.singletonList(snakGroupWithNew));
assertEquals(Collections.singleton(TestingDataGenerator.newIdA), e.extractPointers(Datamodel.makeStatement(claimNewSubject, assertEquals(Collections.singleton(TestingData.newIdA), e.extractPointers(Datamodel.makeStatement(claimNewSubject,
Collections.singletonList(reference), StatementRank.NORMAL, ""))); Collections.singletonList(reference), StatementRank.NORMAL, "")));
} }

View File

@ -7,7 +7,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.openrefine.wikidata.testing.TestingDataGenerator; import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate; import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder; import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -22,16 +22,16 @@ public abstract class UpdateSchedulerTest {
protected ItemIdValue existingIdA = Datamodel.makeWikidataItemIdValue("Q43"); protected ItemIdValue existingIdA = Datamodel.makeWikidataItemIdValue("Q43");
protected ItemIdValue existingIdB = Datamodel.makeWikidataItemIdValue("Q538"); protected ItemIdValue existingIdB = Datamodel.makeWikidataItemIdValue("Q538");
protected ItemIdValue newIdA = TestingDataGenerator.makeNewItemIdValue(1234L, "new item A"); protected ItemIdValue newIdA = TestingData.makeNewItemIdValue(1234L, "new item A");
protected ItemIdValue newIdB = TestingDataGenerator.makeNewItemIdValue(5678L, "new item B"); protected ItemIdValue newIdB = TestingData.makeNewItemIdValue(5678L, "new item B");
protected Statement sAtoB = TestingDataGenerator.generateStatement(existingIdA, existingIdB); protected Statement sAtoB = TestingData.generateStatement(existingIdA, existingIdB);
protected Statement sBtoA = TestingDataGenerator.generateStatement(existingIdB, existingIdA); protected Statement sBtoA = TestingData.generateStatement(existingIdB, existingIdA);
protected Statement sAtoNewA = TestingDataGenerator.generateStatement(existingIdA, newIdA); protected Statement sAtoNewA = TestingData.generateStatement(existingIdA, newIdA);
protected Statement sAtoNewB = TestingDataGenerator.generateStatement(existingIdA, newIdB); protected Statement sAtoNewB = TestingData.generateStatement(existingIdA, newIdB);
protected Statement sNewAtoB = TestingDataGenerator.generateStatement(newIdA, existingIdB); protected Statement sNewAtoB = TestingData.generateStatement(newIdA, existingIdB);
protected Statement sNewAtoNewB = TestingDataGenerator.generateStatement(newIdA, newIdB); protected Statement sNewAtoNewB = TestingData.generateStatement(newIdA, newIdB);
protected Statement sNewAtoNewA = TestingDataGenerator.generateStatement(newIdA, newIdA); protected Statement sNewAtoNewA = TestingData.generateStatement(newIdA, newIdA);
public abstract UpdateScheduler getScheduler(); public abstract UpdateScheduler getScheduler();

View File

@ -0,0 +1,20 @@
package org.openrefine.wikidata.utils;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import org.testng.annotations.Test;
public class FirstLinesExtractorTest {
@Test
public void testShort() throws IOException {
assertEquals("a\nb\nc\n", FirstLinesExtractor.extractFirstLines("a\nb\nc\n", 5));
}
@Test
public void testLong() throws IOException {
assertEquals("a\nb\n...", FirstLinesExtractor.extractFirstLines("a\nb\nc", 3));
}
}