Merge pull request #1755 from OpenRefine/jackson-serialization

Jackson serialization
This commit is contained in:
Antonin Delpeuch 2018-12-01 23:56:50 +00:00 committed by GitHub
commit 5639f1b2f1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
551 changed files with 9671 additions and 64895 deletions

View File

@ -153,18 +153,13 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.1.3</version>
<version>2.9.7</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.9.7</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>

View File

@ -39,13 +39,13 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.ProjectManager;
import com.google.refine.ProjectMetadata;
import com.google.refine.RefineServlet;
import com.google.refine.commands.HttpUtilities;
import com.google.refine.extension.database.model.DatabaseColumn;
@ -55,7 +55,6 @@ import com.google.refine.importing.ImportingController;
import com.google.refine.importing.ImportingJob;
import com.google.refine.importing.ImportingManager;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.JSONUtilities;
import com.google.refine.util.ParsingUtilities;
@ -140,8 +139,8 @@ public class DatabaseImportController implements ImportingController {
}
JSONObject result = new JSONObject();
JSONObject options = new JSONObject();
ObjectNode result = ParsingUtilities.mapper.createObjectNode();
ObjectNode options = ParsingUtilities.mapper.createObjectNode();
JSONUtilities.safePut(result, "status", "ok");
JSONUtilities.safePut(result, OPTIONS_KEY, options);
@ -191,7 +190,7 @@ public class DatabaseImportController implements ImportingController {
job.updating = true;
try {
JSONObject optionObj = ParsingUtilities.evaluateJsonStringToObject(
ObjectNode optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(
request.getParameter("options"));
List<Exception> exceptions = new LinkedList<Exception>();
@ -207,35 +206,28 @@ public class DatabaseImportController implements ImportingController {
optionObj,
exceptions
);
// String exStr = getExceptionString(exceptions);
// logger.info("exceptions::" + exStr);
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
writer.object();
writer.writeStartObject();
if (exceptions.size() == 0) {
job.project.update(); // update all internal models, indexes, caches, etc.
writer.key("status");
writer.value("ok");
writer.writeStringField("status", "ok");
} else {
writer.key("status");
writer.value("error");
writer.key("message");
writer.value(getExceptionString(exceptions));
// writer.array();
// writeErrors(writer, exceptions);
// writer.endArray();
writer.writeStringField("status", "error");
writer.writeStringField("message", getExceptionString(exceptions));
}
writer.endObject();
} catch (JSONException e) {
writer.writeEndObject();
} catch (IOException e) {
throw new ServletException(e);
} finally {
writer.flush();
writer.close();
w.flush();
w.close();
}
} catch (JSONException e) {
} catch (IOException e) {
throw new ServletException(e);
} finally {
job.touch();
@ -271,7 +263,7 @@ public class DatabaseImportController implements ImportingController {
ProjectMetadata metadata,
final ImportingJob job,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) throws DatabaseServiceException{
@ -329,7 +321,7 @@ public class DatabaseImportController implements ImportingController {
job.updating = true;
try {
final JSONObject optionObj = ParsingUtilities.evaluateJsonStringToObject(
final ObjectNode optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(
request.getParameter("options"));
final List<Exception> exceptions = new LinkedList<Exception>();
@ -378,7 +370,7 @@ public class DatabaseImportController implements ImportingController {
}.start();
HttpUtilities.respond(response, "ok", "done");
} catch (JSONException e) {
} catch (IOException e) {
throw new ServletException(e);
}
}
@ -400,7 +392,7 @@ public class DatabaseImportController implements ImportingController {
ProjectMetadata metadata,
final ImportingJob job,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) throws DatabaseServiceException{

View File

@ -35,13 +35,9 @@ import java.util.Properties;
import javax.servlet.ServletConfig;
import org.json.JSONException;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.Jsonizable;
import edu.mit.simile.butterfly.ButterflyModuleImpl;

View File

@ -34,13 +34,13 @@ import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.ProjectManager;
import com.google.refine.io.FileProjectManager;

View File

@ -36,16 +36,17 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
//import com.google.refine.ProjectManager;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.DatabaseServiceException;
import com.google.refine.extension.database.model.DatabaseInfo;
import com.google.refine.util.ParsingUtilities;
public class ConnectCommand extends DatabaseCommand {
@ -66,7 +67,7 @@ public class ConnectCommand extends DatabaseCommand {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
ObjectMapper mapperObj = new ObjectMapper();
try {
@ -74,22 +75,20 @@ public class ConnectCommand extends DatabaseCommand {
.connect(databaseConfiguration);
String databaseInfoString = mapperObj.writeValueAsString(databaseInfo);
response.setStatus(HttpStatus.SC_OK);
writer.object();
writer.key("code");
writer.value("ok");
writer.key("databaseInfo");
writer.value(databaseInfoString);
writer.endObject();
writer.writeStartObject();
writer.writeStringField("code", "ok");
writer.writeStringField("databaseInfo", databaseInfoString);
writer.writeEndObject();
} catch (DatabaseServiceException e) {
logger.error("ConnectCommand::Post::DatabaseServiceException::{}", e);
sendError(HttpStatus.SC_UNAUTHORIZED,response, writer, e);
sendError(HttpStatus.SC_UNAUTHORIZED,response, e);
}catch (Exception e) {
logger.error("ConnectCommand::Post::Exception::{}", e);
sendError(HttpStatus.SC_UNAUTHORIZED,response, writer, e);
sendError(HttpStatus.SC_UNAUTHORIZED,response, e);
} finally {
// w.flush();
writer.flush();
writer.close();
w.close();
}
} catch (Exception e) {

View File

@ -33,7 +33,6 @@ import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -82,7 +81,7 @@ public abstract class DatabaseCommand extends Command {
* @param e
* @throws IOException
*/
protected void sendError(int status, HttpServletResponse response, JSONWriter writer, Exception e)
protected void sendError(int status, HttpServletResponse response, Exception e)
throws IOException {
//logger.info("sendError::{}", writer);
@ -97,7 +96,7 @@ public abstract class DatabaseCommand extends Command {
* @param e
* @throws IOException
*/
protected void sendError(int status, HttpServletResponse response, JSONWriter writer, DatabaseServiceException e)
protected void sendError(int status, HttpServletResponse response, DatabaseServiceException e)
throws IOException {
String message = "";

View File

@ -36,16 +36,17 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
//import com.google.refine.ProjectManager;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.DatabaseServiceException;
import com.google.refine.extension.database.model.DatabaseInfo;
import com.google.refine.util.ParsingUtilities;
public class ExecuteQueryCommand extends DatabaseCommand {
@ -69,7 +70,7 @@ public class ExecuteQueryCommand extends DatabaseCommand {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
DatabaseInfo databaseInfo = DatabaseService.get(databaseConfiguration.getDatabaseType())
@ -84,22 +85,22 @@ public class ExecuteQueryCommand extends DatabaseCommand {
}
writer.object();
writer.key("code");
writer.value("ok");
writer.key("QueryResult");
writer.value(jsonStr);
writer.endObject();
writer.writeStartObject();
writer.writeStringField("code", "ok");
writer.writeStringField("QueryResult", jsonStr);
writer.writeEndObject();
} catch (DatabaseServiceException e) {
logger.error("QueryCommand::Post::DatabaseServiceException::{}", e);
sendError(HttpStatus.SC_BAD_REQUEST, response, writer, e);
sendError(HttpStatus.SC_BAD_REQUEST, response, e);
} catch (Exception e) {
logger.error("QueryCommand::Post::Exception::{}", e);
sendError(HttpStatus.SC_BAD_REQUEST,response, writer, e);
sendError(HttpStatus.SC_BAD_REQUEST,response, e);
} finally {
writer.flush();
writer.close();
w.close();
}
} catch (Exception e) {

View File

@ -37,13 +37,14 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.HttpStatus;
import org.json.JSONException;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseUtils;
import com.google.refine.util.ParsingUtilities;
public class SavedConnectionCommand extends DatabaseCommand {
@ -117,54 +118,41 @@ public class SavedConnectionCommand extends DatabaseCommand {
* @throws IOException
* @throws JSONException
*/
private void writeSavedConnectionResponse(HttpServletResponse response, DatabaseConfiguration savedConnection) throws IOException, JSONException {
private void writeSavedConnectionResponse(HttpServletResponse response, DatabaseConfiguration savedConnection) throws IOException {
Writer w = response.getWriter();
try {
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.object();
writer.key(DatabaseUtils.SAVED_CONNECTION_KEY);
writer.array();
writer.writeStartObject();
writer.writeArrayFieldStart(DatabaseUtils.SAVED_CONNECTION_KEY);
writer.object();
writer.key("connectionName");
writer.value(savedConnection.getConnectionName());
writer.writeStartObject();
writer.writeStringField("connectionName", savedConnection.getConnectionName());
writer.key("databaseType");
writer.value(savedConnection.getDatabaseType());
writer.writeStringField("databaseType", savedConnection.getDatabaseType());
writer.key("databaseHost");
writer.value(savedConnection.getDatabaseHost());
writer.writeStringField("databaseHost", savedConnection.getDatabaseHost());
writer.key("databasePort");
writer.value(savedConnection.getDatabasePort());
writer.writeNumberField("databasePort", savedConnection.getDatabasePort());
writer.key("databaseName");
writer.value(savedConnection.getDatabaseName());
writer.key("databasePassword");
writer.writeStringField("databaseName", savedConnection.getDatabaseName());
//
String dbPasswd = savedConnection.getDatabasePassword();
if(dbPasswd != null && !dbPasswd.isEmpty()) {
dbPasswd = DatabaseUtils.decrypt(savedConnection.getDatabasePassword());
//logger.info("Decrypted Password::" + dbPasswd);
}
writer.value(dbPasswd);
//
// writer.value(savedConnection.getDatabasePassword());
writer.writeStringField("databasePassword", dbPasswd);
writer.key("databaseSchema");
writer.value(savedConnection.getDatabaseSchema());
writer.writeStringField("databaseSchema", savedConnection.getDatabaseSchema());
writer.key("databaseUser");
writer.value(savedConnection.getDatabaseUser());
writer.writeStringField("databaseUser", savedConnection.getDatabaseUser());
writer.endObject();
writer.endArray();
writer.writeEndObject();
writer.writeEndArray();
writer.endObject();
writer.writeEndObject();
writer.flush();
writer.close();
}finally {
w.flush();
@ -178,59 +166,52 @@ public class SavedConnectionCommand extends DatabaseCommand {
* @throws IOException
* @throws JSONException
*/
private void writeSavedConnectionResponse(HttpServletResponse response) throws IOException, JSONException {
private void writeSavedConnectionResponse(HttpServletResponse response) throws IOException {
Writer w = response.getWriter();
try {
List<DatabaseConfiguration> savedConnections = DatabaseUtils.getSavedConnections();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.object();
writer.key(DatabaseUtils.SAVED_CONNECTION_KEY);
writer.array();
writer.writeStartObject();
writer.writeArrayFieldStart(DatabaseUtils.SAVED_CONNECTION_KEY);
int size = savedConnections.size();
for (int i = 0; i < size; i++) {
writer.object();
writer.writeStartObject();
DatabaseConfiguration dbConfig = (DatabaseConfiguration) savedConnections.get(i);
writer.key("connectionName");
writer.value(dbConfig.getConnectionName());
writer.writeStringField("connectionName", dbConfig.getConnectionName());
writer.key("databaseType");
writer.value(dbConfig.getDatabaseType());
writer.writeStringField("databaseType", dbConfig.getDatabaseType());
writer.key("databaseHost");
writer.value(dbConfig.getDatabaseHost());
writer.writeStringField("databaseHost", dbConfig.getDatabaseHost());
writer.key("databasePort");
writer.value(dbConfig.getDatabasePort());
writer.writeNumberField("databasePort", dbConfig.getDatabasePort());
writer.key("databaseName");
writer.value(dbConfig.getDatabaseName());
writer.writeStringField("databaseName", dbConfig.getDatabaseName());
writer.key("databasePassword");
String dbPasswd = dbConfig.getDatabasePassword();
if(dbPasswd != null && !dbPasswd.isEmpty()) {
dbPasswd = DatabaseUtils.decrypt(dbConfig.getDatabasePassword());
}
// writer.value(dbConfig.getDatabasePassword());
writer.value(dbPasswd);
writer.writeStringField("databasePassword", dbPasswd);
writer.key("databaseSchema");
writer.value(dbConfig.getDatabaseSchema());
writer.writeStringField("databaseSchema", dbConfig.getDatabaseSchema());
writer.key("databaseUser");
writer.value(dbConfig.getDatabaseUser());
writer.writeStringField("databaseUser", dbConfig.getDatabaseUser());
writer.endObject();
writer.writeEndObject();
}
writer.endArray();
writer.endObject();
writer.writeEndArray();
writer.writeEndObject();
writer.flush();
writer.close();
// logger.info("Saved Connection Get Response sent");
} finally {
w.flush();

View File

@ -36,13 +36,15 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.HttpStatus;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.DatabaseServiceException;
import com.google.refine.util.ParsingUtilities;
@ -69,7 +71,7 @@ public class TestConnectCommand extends DatabaseCommand {
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
@ -77,20 +79,18 @@ public class TestConnectCommand extends DatabaseCommand {
.testConnection(databaseConfiguration);
response.setStatus(HttpStatus.SC_OK);
writer.object();
writer.writeStartObject();
writer.key("connectionResult");
writer.value(connectionTestResult);
writer.key("code");
writer.value("ok");
writer.endObject();
writer.writeBooleanField("connectionResult", connectionTestResult);
writer.writeStringField("code", "ok");
writer.writeEndObject();
} catch (DatabaseServiceException e) {
logger.error("TestConnectCommand::Post::DatabaseServiceException::{}", e);
sendError(HttpStatus.SC_UNAUTHORIZED,response, writer, e);
sendError(HttpStatus.SC_UNAUTHORIZED,response, e);
} finally {
// writer.endObject();
// w.flush();
writer.flush();
writer.close();
w.close();
}
} catch (Exception e) {

View File

@ -36,16 +36,17 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
//import com.google.refine.ProjectManager;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.DatabaseServiceException;
import com.google.refine.extension.database.model.DatabaseInfo;
import com.google.refine.util.ParsingUtilities;
public class TestQueryCommand extends DatabaseCommand {
@ -70,7 +71,7 @@ public class TestQueryCommand extends DatabaseCommand {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
DatabaseInfo databaseInfo = DatabaseService.get(dbConfig.getDatabaseType())
@ -83,22 +84,22 @@ public class TestQueryCommand extends DatabaseCommand {
logger.debug("TestQueryCommand::Post::Result::{} " ,jsonStr);
}
writer.object();
writer.key("code");
writer.value("ok");
writer.key("QueryResult");
writer.value(jsonStr);
writer.endObject();
writer.writeStartObject();
writer.writeStringField("code", "ok");
writer.writeStringField("QueryResult", jsonStr);
writer.writeEndObject();
} catch (DatabaseServiceException e) {
logger.error("TestQueryCommand::Post::DatabaseServiceException::{}", e);
sendError(HttpStatus.SC_BAD_REQUEST, response, writer, e);
sendError(HttpStatus.SC_BAD_REQUEST, response, e);
} catch (Exception e) {
logger.error("TestQueryCommand::Post::Exception::{}", e);
sendError(HttpStatus.SC_BAD_REQUEST,response, writer, e);
sendError(HttpStatus.SC_BAD_REQUEST,response, e);
} finally {
writer.flush();
writer.close();
w.close();
}
} catch (Exception e) {

View File

@ -12,8 +12,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.Assert;
@ -24,7 +22,9 @@ import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.ProjectManager;
import com.google.refine.ProjectMetadata;
import com.google.refine.RefineServlet;
import com.google.refine.extension.database.mysql.MySQLDatabaseService;
import com.google.refine.extension.database.stub.RefineDbServletStub;
@ -32,7 +32,7 @@ import com.google.refine.importing.ImportingJob;
import com.google.refine.importing.ImportingManager;
import com.google.refine.io.FileProjectManager;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.ParsingUtilities;
@ -60,7 +60,7 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
private DatabaseImportController SUT = null;
@BeforeMethod
public void setUp() throws JSONException, IOException {
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
File dir = DBExtensionTestUtils.createTempDirectory("OR_DBExtension_Test_WorkspaceDir");
@ -102,10 +102,9 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
SUT.doGet(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
String code = json.getString("status");
String message = json.getString("message");
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String code = json.get("status").asText();
String message = json.get("message").asText();
Assert.assertNotNull(code);
Assert.assertNotNull(message);
Assert.assertEquals(code, "error");
@ -119,7 +118,7 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
}
@Test
public void testDoPostInvalidSubCommand() throws IOException, ServletException, JSONException {
public void testDoPostInvalidSubCommand() throws IOException, ServletException {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
when(request.getQueryString()).thenReturn(
@ -130,10 +129,10 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
SUT.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String code = json.getString("status");
String message = json.getString("message");
String code = json.get("status").asText();
String message = json.get("message").asText();
Assert.assertNotNull(code);
Assert.assertNotNull(message);
Assert.assertEquals(code, "error");
@ -143,7 +142,7 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
@Test
public void testDoPostInitializeParser() throws ServletException, IOException, JSONException {
public void testDoPostInitializeParser() throws ServletException, IOException {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
@ -155,15 +154,15 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
SUT.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String status = json.getString("status");
String status = json.get("status").asText();
//System.out.println("json::" + json);
Assert.assertEquals(status, "ok");
}
@Test
public void testDoPostParsePreview() throws IOException, ServletException, JSONException {
public void testDoPostParsePreview() throws IOException, ServletException {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
@ -188,15 +187,15 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
SUT.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String status = json.getString("status");
String status = json.get("status").asText();
//System.out.println("json::" + json);
Assert.assertEquals(status, "ok");
}
@Test
public void testDoPostCreateProject() throws IOException, ServletException, JSONException {
public void testDoPostCreateProject() throws IOException, ServletException {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
@ -222,9 +221,9 @@ public class DatabaseImportControllerTest extends DBExtensionTests{
SUT.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String status = json.getString("status");
String status = json.get("status").asText();
//System.out.println("json::" + json);
Assert.assertEquals(status, "ok");
}

View File

@ -11,8 +11,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.Assert;
@ -21,10 +19,12 @@ import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.extension.database.DBExtensionTests;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.mysql.MySQLDatabaseService;
import com.google.refine.util.ParsingUtilities;
@Test(groups = { "requiresMySQL" })
@ -67,7 +67,7 @@ public class ConnectCommandTest extends DBExtensionTests {
@Test
public void testDoPost() throws IOException, ServletException, JSONException {
public void testDoPost() throws IOException, ServletException {
when(request.getParameter("databaseType")).thenReturn(MySQLDatabaseService.DB_NAME);
when(request.getParameter("databaseServer")).thenReturn(testDbConfig.getDatabaseHost());
@ -85,12 +85,12 @@ public class ConnectCommandTest extends DBExtensionTests {
connectCommand.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String code = json.getString("code");
String code = json.get("code").asText();
Assert.assertEquals(code, "ok");
String databaseInfo = json.getString("databaseInfo");
String databaseInfo = json.get("databaseInfo").asText();
Assert.assertNotNull(databaseInfo);
}

View File

@ -10,8 +10,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.Assert;
@ -20,10 +18,12 @@ import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.extension.database.DBExtensionTests;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.mysql.MySQLDatabaseService;
import com.google.refine.util.ParsingUtilities;
@Test(groups = { "requiresMySQL" })
public class ExecuteQueryCommandTest extends DBExtensionTests {
@ -63,7 +63,7 @@ public class ExecuteQueryCommandTest extends DBExtensionTests {
}
@Test
public void testDoPost() throws IOException, ServletException, JSONException {
public void testDoPost() throws IOException, ServletException {
when(request.getParameter("databaseType")).thenReturn(testDbConfig.getDatabaseType());
when(request.getParameter("databaseServer")).thenReturn(testDbConfig.getDatabaseHost());
@ -84,12 +84,12 @@ public class ExecuteQueryCommandTest extends DBExtensionTests {
executeQueryCommand.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String code = json.getString("code");
String code = json.get("code").asText();
Assert.assertEquals(code, "ok");
String queryResult = json.getString("QueryResult");
String queryResult = json.get("QueryResult").asText();
Assert.assertNotNull(queryResult);
}

View File

@ -11,9 +11,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.Assert;
@ -24,7 +21,10 @@ import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.ProjectManager;
import com.google.refine.ProjectMetadata;
import com.google.refine.RefineServlet;
import com.google.refine.extension.database.DBExtensionTestUtils;
import com.google.refine.extension.database.DBExtensionTests;
@ -35,7 +35,7 @@ import com.google.refine.extension.database.stub.RefineDbServletStub;
import com.google.refine.importing.ImportingManager;
import com.google.refine.io.FileProjectManager;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.ParsingUtilities;
public class SavedConnectionCommandTest extends DBExtensionTests{
@ -59,7 +59,7 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
private SavedConnectionCommand SUT = null;
@BeforeMethod
public void setUp() throws JSONException, IOException {
public void setUp() throws IOException {
MockitoAnnotations.initMocks(this);
File dir = DBExtensionTestUtils.createTempDirectory("OR_DBExtension_Test_WorkspaceDir");
@ -136,7 +136,7 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
}
@Test
public void testDoPost() throws IOException, ServletException, JSONException {
public void testDoPost() throws IOException, ServletException {
when(request.getParameter("connectionName")).thenReturn("test-db-name");
when(request.getParameter("databaseType")).thenReturn(MySQLDatabaseService.DB_NAME);
@ -155,18 +155,18 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
JSONArray savedConnections = json.getJSONArray("savedConnections");
ArrayNode savedConnections = (ArrayNode) json.get("savedConnections");
Assert.assertNotNull(savedConnections);
int len = savedConnections.length();
int len = savedConnections.size();
Assert.assertEquals(len, 1);
}
@Test
public void testDoGet() throws IOException, ServletException, JSONException {
public void testDoGet() throws IOException, ServletException {
String testDbName = "testLocalDb";
//add saved connection
saveDatabaseConfiguration(testDbName);
@ -187,21 +187,20 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
SUT.doGet(request, response);
JSONObject json = new JSONObject(sw.getBuffer().toString().trim());
ObjectNode json = ParsingUtilities.mapper.readValue(sw.getBuffer().toString().trim(), ObjectNode.class);
JSONArray savedConnections = json.getJSONArray("savedConnections");
ArrayNode savedConnections = (ArrayNode) json.get("savedConnections");
Assert.assertNotNull(savedConnections);
Assert.assertEquals(savedConnections.length(), 1);
Assert.assertEquals(savedConnections.size(), 1);
JSONObject sc = (JSONObject)savedConnections.get(0);
// System.out.println("sc" + sc);
String connName = sc.getString("connectionName");
ObjectNode sc = (ObjectNode)savedConnections.get(0);
String connName = sc.get("connectionName").asText();
Assert.assertEquals(connName, testDbName);
}
@Test
public void testDoPut() throws IOException, ServletException, JSONException {
public void testDoPut() throws IOException, ServletException {
String testDbName = "testLocalDb";
saveDatabaseConfiguration(testDbName);
@ -223,15 +222,15 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
SUT.doPut(request, response);
JSONObject json = new JSONObject(sw.getBuffer().toString().trim());
JSONArray savedConnections = json.getJSONArray("savedConnections");
ObjectNode json = ParsingUtilities.mapper.readValue(sw.getBuffer().toString().trim(), ObjectNode.class);
ArrayNode savedConnections = (ArrayNode) json.get("savedConnections");
Assert.assertNotNull(savedConnections);
Assert.assertEquals(savedConnections.length(), 1);
Assert.assertEquals(savedConnections.size(), 1);
JSONObject sc = (JSONObject)savedConnections.get(0);
ObjectNode sc = (ObjectNode)savedConnections.get(0);
System.out.println("sc" + sc);
String newDbHost = sc.getString("databaseHost");
String newDbHost = sc.get("databaseHost").asText();
Assert.assertEquals(newDbHost, newHost);
}
@ -249,11 +248,11 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
when(request.getParameter("connectionName")).thenReturn(testDbName);
SUT.doDelete(request, response);
JSONObject json = new JSONObject(sw.getBuffer().toString().trim());
JSONArray savedConnections = json.getJSONArray("savedConnections");
ObjectNode json = ParsingUtilities.mapper.readValue(sw.getBuffer().toString().trim(), ObjectNode.class);
ArrayNode savedConnections = (ArrayNode) json.get("savedConnections");
Assert.assertNotNull(savedConnections);
Assert.assertEquals(savedConnections.length(), 0);
Assert.assertEquals(savedConnections.size(), 0);
} catch (Exception e) {
// TODO Auto-generated catch block
@ -279,7 +278,7 @@ public class SavedConnectionCommandTest extends DBExtensionTests{
// String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject();
ObjectNode json = ParsingUtilities.mapper.createObjectNode();
Assert.assertNotNull(json);

View File

@ -10,8 +10,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.Assert;
@ -20,10 +18,12 @@ import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.extension.database.DBExtensionTests;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.mysql.MySQLDatabaseService;
import com.google.refine.util.ParsingUtilities;
@Test(groups = { "requiresMySQL" })
@ -66,7 +66,7 @@ public class TestConnectCommandTest extends DBExtensionTests{
@Test
public void testDoPost() throws IOException, ServletException, JSONException {
public void testDoPost() throws IOException, ServletException {
when(request.getParameter("databaseType")).thenReturn(MySQLDatabaseService.DB_NAME);
when(request.getParameter("databaseServer")).thenReturn(testDbConfig.getDatabaseHost());
@ -86,9 +86,9 @@ public class TestConnectCommandTest extends DBExtensionTests{
connectCommand.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String code = json.getString("code");
String code = json.get("code").asText();
Assert.assertEquals(code, "ok");
}

View File

@ -10,8 +10,6 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.Assert;
@ -20,10 +18,12 @@ import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.extension.database.DBExtensionTests;
import com.google.refine.extension.database.DatabaseConfiguration;
import com.google.refine.extension.database.DatabaseService;
import com.google.refine.extension.database.mysql.MySQLDatabaseService;
import com.google.refine.util.ParsingUtilities;
@Test(groups = { "requiresMySQL" })
public class TestQueryCommandTest extends DBExtensionTests {
@ -64,7 +64,7 @@ public class TestQueryCommandTest extends DBExtensionTests {
@Test
public void testDoPost() throws IOException, ServletException, JSONException {
public void testDoPost() throws IOException, ServletException {
when(request.getParameter("databaseType")).thenReturn(testDbConfig.getDatabaseType());
when(request.getParameter("databaseServer")).thenReturn(testDbConfig.getDatabaseHost());
@ -85,12 +85,12 @@ public class TestQueryCommandTest extends DBExtensionTests {
executeQueryCommand.doPost(request, response);
String result = sw.getBuffer().toString().trim();
JSONObject json = new JSONObject(result);
ObjectNode json = ParsingUtilities.mapper.readValue(result, ObjectNode.class);
String code = json.getString("code");
String code = json.get("code").asText();
Assert.assertEquals(code, "ok");
String queryResult = json.getString("QueryResult");
String queryResult = json.get("QueryResult").asText();
Assert.assertNotNull(queryResult);
}

View File

@ -40,7 +40,6 @@ import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.refine.commands.Command;
public class DeAuthorizeCommand extends Command {

View File

@ -32,18 +32,17 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONObject;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.api.services.fusiontables.Fusiontables;
import com.google.api.services.fusiontables.model.Column;
import com.google.api.services.fusiontables.model.Sqlresponse;
import com.google.api.services.fusiontables.model.Table;
import com.google.refine.ProjectMetadata;
import com.google.refine.importers.TabularImportingParserBase;
import com.google.refine.importers.TabularImportingParserBase.TableDataReader;
import com.google.refine.importing.ImportingJob;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.JSONUtilities;
/**
@ -60,7 +59,7 @@ public class FusionTableImporter {
ProjectMetadata metadata,
final ImportingJob job,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) {
Fusiontables service = FusionTableHandler.getFusionTablesService(token);
@ -206,7 +205,7 @@ public class FusionTableImporter {
ProjectMetadata metadata,
final ImportingJob job,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) {
String docUrlString = JSONUtilities.getString(options, "docUrl", null);

View File

@ -4,13 +4,11 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONObject;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.api.client.http.AbstractInputStreamContent;
import com.google.api.client.http.ByteArrayContent;
import com.google.api.client.http.HttpResponseException;
import com.google.api.services.fusiontables.Fusiontables;
import com.google.refine.exporters.TabularSerializer;
final class FusionTableSerializer implements TabularSerializer {
@ -31,7 +29,7 @@ final class FusionTableSerializer implements TabularSerializer {
}
@Override
public void startFile(JSONObject options) {
public void startFile(JsonNode options) {
}
@Override

View File

@ -6,20 +6,19 @@ import java.net.URL;
import java.util.List;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.api.services.sheets.v4.Sheets;
import com.google.api.services.sheets.v4.model.Sheet;
import com.google.api.services.sheets.v4.model.Spreadsheet;
import com.google.api.services.sheets.v4.model.ValueRange;
import com.google.refine.ProjectMetadata;
import com.google.refine.importers.TabularImportingParserBase;
import com.google.refine.importers.TabularImportingParserBase.TableDataReader;
import com.google.refine.importing.ImportingJob;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.JSONUtilities;
public class GDataImporter {
@ -31,7 +30,7 @@ public class GDataImporter {
ProjectMetadata metadata,
final ImportingJob job,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) throws IOException {
String docType = JSONUtilities.getString(options, "docType", null);
@ -64,7 +63,7 @@ public class GDataImporter {
ProjectMetadata metadata,
final ImportingJob job,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) {
String docUrlString = JSONUtilities.getString(options, "docUrl", null);
@ -100,7 +99,7 @@ public class GDataImporter {
URL docURL,
int worksheetIndex,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions) {
try {

View File

@ -11,13 +11,12 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.api.services.drive.Drive;
import com.google.api.services.drive.model.File;
import com.google.api.services.drive.model.FileList;
@ -28,8 +27,8 @@ import com.google.api.services.fusiontables.model.TableList;
import com.google.api.services.sheets.v4.Sheets;
import com.google.api.services.sheets.v4.model.Sheet;
import com.google.api.services.sheets.v4.model.Spreadsheet;
import com.google.refine.ProjectManager;
import com.google.refine.ProjectMetadata;
import com.google.refine.RefineServlet;
import com.google.refine.commands.HttpUtilities;
import com.google.refine.importing.DefaultImportingController;
@ -37,7 +36,6 @@ import com.google.refine.importing.ImportingController;
import com.google.refine.importing.ImportingJob;
import com.google.refine.importing.ImportingManager;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.JSONUtilities;
import com.google.refine.util.ParsingUtilities;
@ -86,11 +84,10 @@ public class GDataImportingController implements ImportingController {
}
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
writer.object();
writer.key("documents");
writer.array();
writer.writeStartObject();
writer.writeArrayFieldStart("documents");
try {
listSpreadsheets(GoogleAPIExtension.getDriveService(token), writer);
@ -98,50 +95,52 @@ public class GDataImportingController implements ImportingController {
} catch (Exception e) {
logger.error("doListDocuments exception:" + ExceptionUtils.getStackTrace(e));
} finally {
writer.endArray();
writer.endObject();
writer.writeEndArray();
writer.writeEndObject();
}
} catch (JSONException e) {
} catch (IOException e) {
throw new ServletException(e);
} finally {
writer.flush();
writer.close();
w.flush();
w.close();
}
}
private void listSpreadsheets(Drive drive, JSONWriter writer)
throws IOException, JSONException {
private void listSpreadsheets(Drive drive, JsonGenerator writer)
throws IOException {
com.google.api.services.drive.Drive.Files.List files = drive.files().list();
files.setQ("mimeType = 'application/vnd.google-apps.spreadsheet'");
files.setFields("nextPageToken, files(id, name, webViewLink, owners, modifiedTime)");
FileList fileList = files.execute();
for (File entry : fileList.getFiles()) {
writer.object();
writer.key("docId"); writer.value(entry.getId());
writer.key("docLink"); writer.value(entry.getWebViewLink());
writer.key("docSelfLink"); writer.value(entry.getWebViewLink());
writer.key("title"); writer.value(entry.getName());
writer.writeStartObject();
writer.writeStringField("docId", entry.getId());
writer.writeStringField("docLink", entry.getWebViewLink());
writer.writeStringField("docSelfLink", entry.getWebViewLink());
writer.writeStringField("title", entry.getName());
writer.key("type"); writer.value("spreadsheet");
writer.writeStringField("type", "spreadsheet");
com.google.api.client.util.DateTime updated = entry.getModifiedTime();
if (updated != null) {
writer.key("updated"); writer.value(updated.toString());
writer.writeStringField("updated", updated.toString());
}
writer.key("authors"); writer.array();
writer.writeArrayFieldStart("authors");
for (User user : entry.getOwners()) {
writer.value(user.getDisplayName());
writer.writeString(user.getDisplayName());
}
writer.endArray();
writer.writeEndArray();
writer.endObject();
writer.writeEndObject();
}
}
private void listFusionTables(Fusiontables service, JSONWriter writer)
throws IOException, JSONException {
private void listFusionTables(Fusiontables service, JsonGenerator writer)
throws IOException {
Fusiontables.Table.List listTables = service.table().list();
TableList tablelist = listTables.execute();
@ -155,13 +154,13 @@ public class GDataImportingController implements ImportingController {
String link = "https://www.google.com/fusiontables/DataSource?docid=" + id;
// Add JSON object to our stream
writer.object();
writer.key("docId"); writer.value(id);
writer.key("docLink"); writer.value(link);
writer.key("docSelfLink"); writer.value(link);
writer.key("title"); writer.value(name);
writer.key("type"); writer.value("table");
writer.endObject();
writer.writeStartObject();
writer.writeStringField("docId", id);
writer.writeStringField("docLink", link);
writer.writeStringField("docSelfLink", link);
writer.writeStringField("title", name);
writer.writeStringField("type", "table");
writer.writeEndObject();
}
}
@ -171,8 +170,8 @@ public class GDataImportingController implements ImportingController {
String token = TokenCookie.getToken(request);
String type = parameters.getProperty("docType");
String urlString = parameters.getProperty("docUrl");
JSONObject result = new JSONObject();
JSONObject options = new JSONObject();
ObjectNode result = ParsingUtilities.mapper.createObjectNode();
ObjectNode options = ParsingUtilities.mapper.createObjectNode();
JSONUtilities.safePut(result, "status", "ok");
JSONUtilities.safePut(result, "options", options);
@ -181,7 +180,7 @@ public class GDataImportingController implements ImportingController {
JSONUtilities.safePut(options, "storeBlankCellsAsNulls", true);
if ("spreadsheet".equals(type)) {
JSONArray worksheets = new JSONArray();
ArrayNode worksheets = ParsingUtilities.mapper.createArrayNode();
// extract spreadSheetId from URL
String spreadSheetId = GoogleAPIExtension.extractSpreadSheetId(urlString);
@ -192,7 +191,7 @@ public class GDataImportingController implements ImportingController {
List<Sheet> worksheetEntries =
getWorksheetEntriesForDoc(token, spreadSheetId);
for (Sheet sheet : worksheetEntries) {
JSONObject worksheetO = new JSONObject();
ObjectNode worksheetO = ParsingUtilities.mapper.createObjectNode();
JSONUtilities.safePut(worksheetO, "name", sheet.getProperties().getTitle());
JSONUtilities.safePut(worksheetO, "rows", sheet.getProperties().getGridProperties().getRowCount());
JSONUtilities.safePut(worksheetO, "link",
@ -234,54 +233,50 @@ public class GDataImportingController implements ImportingController {
}
job.updating = true;
ObjectNode optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(
request.getParameter("options"));
List<Exception> exceptions = new LinkedList<Exception>();
job.prepareNewProject();
GDataImporter.parse(
token,
job.project,
job.metadata,
job,
100,
optionObj,
exceptions
);
Writer w = response.getWriter();
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
JSONObject optionObj = ParsingUtilities.evaluateJsonStringToObject(
request.getParameter("options"));
List<Exception> exceptions = new LinkedList<Exception>();
job.prepareNewProject();
GDataImporter.parse(
token,
job.project,
job.metadata,
job,
100,
optionObj,
exceptions
);
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
try {
writer.object();
if (exceptions.size() == 0) {
job.project.update(); // update all internal models, indexes, caches, etc.
writer.key("status"); writer.value("ok");
} else {
writer.key("status"); writer.value("error");
writer.key("errors");
writer.array();
DefaultImportingController.writeErrors(writer, exceptions);
writer.endArray();
}
writer.endObject();
} catch (JSONException e) {
throw new ServletException(e);
} finally {
w.flush();
w.close();
writer.writeStartObject();
if (exceptions.size() == 0) {
job.project.update(); // update all internal models, indexes, caches, etc.
writer.writeStringField("status", "ok");
} else {
writer.writeStringField("status", "error");
writer.writeArrayFieldStart("errors");
DefaultImportingController.writeErrors(writer, exceptions);
writer.writeEndArray();
}
} catch (JSONException e) {
writer.writeEndObject();
} catch (IOException e) {
throw new ServletException(e);
} finally {
job.touch();
job.updating = false;
writer.flush();
writer.close();
w.flush();
w.close();
}
job.touch();
job.updating = false;
}
private void doCreateProject(HttpServletRequest request, HttpServletResponse response, Properties parameters)
@ -297,57 +292,53 @@ public class GDataImportingController implements ImportingController {
}
job.updating = true;
try {
final JSONObject optionObj = ParsingUtilities.evaluateJsonStringToObject(
request.getParameter("options"));
final List<Exception> exceptions = new LinkedList<Exception>();
job.setState("creating-project");
final Project project = new Project();
new Thread() {
@Override
public void run() {
ProjectMetadata pm = new ProjectMetadata();
pm.setName(JSONUtilities.getString(optionObj, "projectName", "Untitled"));
pm.setEncoding(JSONUtilities.getString(optionObj, "encoding", "UTF-8"));
try {
GDataImporter.parse(
token,
project,
pm,
job,
-1,
optionObj,
exceptions
);
} catch (IOException e) {
logger.error(ExceptionUtils.getStackTrace(e));
}
if (!job.canceled) {
if (exceptions.size() > 0) {
job.setError(exceptions);
} else {
project.update(); // update all internal models, indexes, caches, etc.
ProjectManager.singleton.registerProject(project, pm);
job.setState("created-project");
job.setProjectID(project.id);
}
job.touch();
job.updating = false;
}
final ObjectNode optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(
request.getParameter("options"));
final List<Exception> exceptions = new LinkedList<Exception>();
job.setState("creating-project");
final Project project = new Project();
new Thread() {
@Override
public void run() {
ProjectMetadata pm = new ProjectMetadata();
pm.setName(JSONUtilities.getString(optionObj, "projectName", "Untitled"));
pm.setEncoding(JSONUtilities.getString(optionObj, "encoding", "UTF-8"));
try {
GDataImporter.parse(
token,
project,
pm,
job,
-1,
optionObj,
exceptions
);
} catch (IOException e) {
logger.error(ExceptionUtils.getStackTrace(e));
}
}.start();
HttpUtilities.respond(response, "ok", "done");
} catch (JSONException e) {
throw new ServletException(e);
}
if (!job.canceled) {
if (exceptions.size() > 0) {
job.setError(exceptions);
} else {
project.update(); // update all internal models, indexes, caches, etc.
ProjectManager.singleton.registerProject(project, pm);
job.setState("created-project");
job.setProjectID(project.id);
}
job.touch();
job.updating = false;
}
}
}.start();
HttpUtilities.respond(response, "ok", "done");
}
}

View File

@ -25,7 +25,6 @@ import com.google.api.services.drive.DriveScopes;
import com.google.api.services.fusiontables.FusiontablesScopes;
import com.google.api.services.sheets.v4.Sheets;
import com.google.api.services.sheets.v4.SheetsScopes;
import com.google.refine.ProjectManager;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.util.ParsingUtilities;

View File

@ -6,10 +6,10 @@ import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.api.services.sheets.v4.Sheets;
import com.google.api.services.sheets.v4.model.AppendCellsRequest;
import com.google.api.services.sheets.v4.model.BatchUpdateSpreadsheetRequest;
@ -17,7 +17,6 @@ import com.google.api.services.sheets.v4.model.BatchUpdateSpreadsheetResponse;
import com.google.api.services.sheets.v4.model.ExtendedValue;
import com.google.api.services.sheets.v4.model.Request;
import com.google.api.services.sheets.v4.model.RowData;
import com.google.refine.exporters.TabularSerializer;
final class SpreadsheetSerializer implements TabularSerializer {
@ -44,7 +43,7 @@ final class SpreadsheetSerializer implements TabularSerializer {
}
@Override
public void startFile(JSONObject options) {
public void startFile(JsonNode options) {
}

View File

@ -14,18 +14,17 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.codec.binary.Base64;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.api.client.http.FileContent;
import com.google.api.services.drive.Drive;
import com.google.api.services.drive.model.File;
import com.google.api.services.drive.model.File.ContentHints;
import com.google.api.services.drive.model.File.ContentHints.Thumbnail;
import com.google.refine.ProjectManager;
import com.google.refine.browsing.Engine;
import com.google.refine.commands.Command;
@ -34,6 +33,7 @@ import com.google.refine.commands.project.ExportRowsCommand;
import com.google.refine.exporters.CustomizableTabularExporterUtilities;
import com.google.refine.io.FileProjectManager;
import com.google.refine.model.Project;
import com.google.refine.util.ParsingUtilities;
public class UploadCommand extends Command {
static final Logger logger = LoggerFactory.getLogger("gdata_upload");
@ -61,31 +61,33 @@ public class UploadCommand extends Command {
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
writer.object();
writer.writeStartObject();
List<Exception> exceptions = new LinkedList<Exception>();
String url = upload(project, engine, params, token, name, exceptions);
if (url != null) {
writer.key("status"); writer.value("ok");
writer.key("url"); writer.value(url);
writer.writeStringField("status", "ok");
writer.writeStringField("url", url);
} else if (exceptions.size() == 0) {
writer.key("status"); writer.value("error");
writer.key("message"); writer.value("No such format");
writer.writeStringField("status", "error");
writer.writeStringField("message", "No such format");
} else {
for (Exception e : exceptions) {
logger.warn(e.getLocalizedMessage(), e);
}
writer.key("status"); writer.value("error");
writer.key("message"); writer.value(exceptions.get(0).getLocalizedMessage());
writer.writeStringField("status", "error");
writer.writeStringField("message", exceptions.get(0).getLocalizedMessage());
}
} catch (Exception e) {
e.printStackTrace();
writer.key("status"); writer.value("error");
writer.key("message"); writer.value(e.getMessage());
writer.writeStringField("status", "error");
writer.writeStringField("message", e.getMessage());
} finally {
writer.endObject();
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}

View File

@ -37,15 +37,16 @@ import java.io.LineNumberReader;
import java.io.Reader;
import java.util.List;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.ProjectMetadata;
import com.google.refine.importers.TabularImportingParserBase;
import com.google.refine.importing.ImportingJob;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.util.JSONUtilities;
import com.google.refine.util.ParsingUtilities;
public class PCAxisImporter extends TabularImportingParserBase {
static final Logger logger = LoggerFactory.getLogger(PCAxisImporter.class);
@ -55,9 +56,9 @@ public class PCAxisImporter extends TabularImportingParserBase {
}
@Override
public JSONObject createParserUIInitializationData(
ImportingJob job, List<JSONObject> fileRecords, String format) {
JSONObject options = new JSONObject();
public ObjectNode createParserUIInitializationData(
ImportingJob job, List<ObjectNode> fileRecords, String format) {
ObjectNode options = ParsingUtilities.mapper.createObjectNode();
JSONUtilities.safePut(options, "includeFileSources", fileRecords.size() > 1);
JSONUtilities.safePut(options, "skipDataLines", 0);
JSONUtilities.safePut(options, "limit", -1);
@ -72,7 +73,7 @@ public class PCAxisImporter extends TabularImportingParserBase {
String fileSource,
Reader reader,
int limit,
JSONObject options,
ObjectNode options,
List<Exception> exceptions
) {
LineNumberReader lnReader = new LineNumberReader(reader);

View File

@ -5,7 +5,9 @@ import java.io.Writer;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONWriter;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.refine.util.ParsingUtilities;
public class CommandUtilities {
@ -21,11 +23,13 @@ public class CommandUtilities {
public static void respondError(HttpServletResponse response, String errorMessage)
throws IOException {
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
writer.object();
writer.key("code"); writer.value("error");
writer.key("message"); writer.value(errorMessage);
writer.endObject();
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.writeStartObject();
writer.writeStringField("code", "error");
writer.writeStringField("message", errorMessage);
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}

View File

@ -24,17 +24,18 @@
package org.openrefine.wikidata.commands;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONWriter;
import org.openrefine.wikidata.editing.ConnectionManager;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.refine.commands.Command;
import com.google.refine.util.ParsingUtilities;
public class LoginCommand extends Command {
@ -53,20 +54,17 @@ public class LoginCommand extends Command {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
StringWriter sb = new StringWriter(2048);
JSONWriter writer = new JSONWriter(sb);
Writer w = response.getWriter();
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
try {
writer.object();
writer.key("logged_in");
writer.value(manager.isLoggedIn());
writer.key("username");
writer.value(manager.getUsername());
writer.endObject();
} catch (JSONException e) {
logger.error(e.getMessage());
}
respond(response, sb.toString());
writer.writeStartObject();
writer.writeBooleanField("logged_in", manager.isLoggedIn());
writer.writeStringField("username", manager.getUsername());
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}
@Override

View File

@ -0,0 +1,68 @@
package org.openrefine.wikidata.commands;
import java.util.List;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.qa.QAWarning.Severity;
import org.openrefine.wikidata.updates.ItemUpdate;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class PreviewResults {
protected List<QAWarning> warnings;
protected Severity maxSeverity;
protected int nbWarnings;
protected int editCount;
protected List<ItemUpdate> editsPreview;
@JsonProperty("warnings")
public List<QAWarning> getWarnings() {
return warnings;
}
@JsonProperty("max_severity")
public Severity getMaxSeverity() {
return maxSeverity;
}
@JsonProperty("nb_warnings")
public int getNbWarnings() {
return nbWarnings;
}
@JsonProperty("edit_count")
public int getEditCount() {
return editCount;
}
@JsonProperty("edits_preview")
public List<ItemUpdate> getEditsPreview() {
return editsPreview;
}
protected PreviewResults(
List<QAWarning> warnings,
Severity maxSeverity,
int nbWarnings,
int editCount,
List<ItemUpdate> editsPreview) {
this.warnings = warnings;
this.maxSeverity = maxSeverity;
this.nbWarnings = nbWarnings;
this.editCount = editCount;
this.editsPreview = editsPreview;
}
@Override
public String toString() {
try {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
} catch (JsonProcessingException e) {
return super.toString();
}
}
}

View File

@ -27,32 +27,26 @@ package org.openrefine.wikidata.commands;
import static org.openrefine.wikidata.commands.CommandUtilities.respondError;
import java.io.IOException;
import java.io.StringWriter;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONWriter;
import org.openrefine.wikidata.qa.EditInspector;
import org.openrefine.wikidata.qa.QAWarning;
import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.scheduler.WikibaseAPIUpdateScheduler;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.browsing.Engine;
import com.google.refine.commands.Command;
import com.google.refine.model.Project;
import com.google.refine.util.ParsingUtilities;
public class PreviewWikibaseSchemaCommand extends Command {
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
@ -69,7 +63,7 @@ public class PreviewWikibaseSchemaCommand extends Command {
if (jsonString != null) {
try {
schema = WikibaseSchema.reconstruct(jsonString);
} catch (JSONException e) {
} catch (IOException e) {
respondError(response, "Wikibase schema could not be parsed.");
return;
}
@ -87,50 +81,25 @@ public class PreviewWikibaseSchemaCommand extends Command {
Engine engine = getEngine(request, project);
List<ItemUpdate> editBatch = schema.evaluate(project, engine, warningStore);
StringWriter sb = new StringWriter(2048);
JSONWriter writer = new JSONWriter(sb);
writer.object();
// Inspect the edits and generate warnings
EditInspector inspector = new EditInspector(warningStore);
inspector.inspect(editBatch);
// Dump the first 10 edits, scheduled with the default scheduler
WikibaseAPIUpdateScheduler scheduler = new WikibaseAPIUpdateScheduler();
List<ItemUpdate> nonNullEdits = scheduler.schedule(editBatch).stream()
.filter(e -> !e.isNull())
.collect(Collectors.toList());
List<ItemUpdate> firstEdits = nonNullEdits.stream()
.limit(10)
.collect(Collectors.toList());
{
// Inspect the edits and generate warnings
EditInspector inspector = new EditInspector(warningStore);
inspector.inspect(editBatch);
writer.key("warnings");
writer.array();
for (QAWarning warning : warningStore.getWarnings()) {
warning.write(writer, new Properties());
}
writer.endArray();
// Add max warning level
writer.key("max_severity");
writer.value(warningStore.getMaxSeverity().toString());
// this is not the length of the warnings array written before,
// but the total number of issues raised (before deduplication)
writer.key("nb_warnings");
writer.value(warningStore.getNbWarnings());
// Dump the first 10 edits, scheduled with the default scheduler
WikibaseAPIUpdateScheduler scheduler = new WikibaseAPIUpdateScheduler();
List<ItemUpdate> nonNullEdits = scheduler.schedule(editBatch).stream()
.filter(e -> !e.isNull())
.collect(Collectors.toList());
writer.key("edit_count");
writer.value(nonNullEdits.size());
List<ItemUpdate> firstEdits = nonNullEdits.stream()
.limit(10)
.collect(Collectors.toList());
ObjectMapper mapper = new ObjectMapper();
String firstEditsJson = mapper.writeValueAsString(firstEdits);
writer.key("edits_preview");
writer.value(new JSONArray(firstEditsJson));
}
writer.endObject();
respond(response, sb.toString());
PreviewResults previewResults = new PreviewResults(
warningStore.getWarnings(),
warningStore.getMaxSeverity(),
warningStore.getNbWarnings(),
nonNullEdits.size(), firstEdits);
respondJSON(response, previewResults);
} catch (Exception e) {
respondException(response, e);
}

View File

@ -23,6 +23,8 @@
******************************************************************************/
package org.openrefine.wikidata.commands;
import static org.openrefine.wikidata.commands.CommandUtilities.respondError;
import java.io.IOException;
import java.util.Properties;
@ -30,11 +32,8 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.openrefine.wikidata.operations.SaveWikibaseSchemaOperation;
import org.openrefine.wikidata.schema.WikibaseSchema;
import static org.openrefine.wikidata.commands.CommandUtilities.respondError;
import com.google.refine.commands.Command;
import com.google.refine.model.AbstractOperation;
@ -57,15 +56,14 @@ public class SaveWikibaseSchemaCommand extends Command {
return;
}
JSONObject json = ParsingUtilities.evaluateJsonStringToObject(jsonString);
WikibaseSchema schema = WikibaseSchema.reconstruct(json);
WikibaseSchema schema = ParsingUtilities.mapper.readValue(jsonString, WikibaseSchema.class);
AbstractOperation op = new SaveWikibaseSchemaOperation(schema);
Process process = op.createProcess(project, new Properties());
performProcessAndRespond(request, response, project, process);
} catch (JSONException e) {
} catch (IOException e) {
// We do not use respondException here because this is an expected
// exception which happens every time a user tries to save an incomplete
// schema - the exception should not be logged.

View File

@ -25,16 +25,16 @@ package org.openrefine.wikidata.editing;
import java.io.IOException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.wikibaseapi.ApiConnection;
import org.wikidata.wdtk.wikibaseapi.LoginFailedException;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.ProjectManager;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.util.ParsingUtilities;
/**
* Manages a connection to Wikidata, with login credentials stored in the
@ -72,16 +72,12 @@ public class ConnectionManager {
public void login(String username, String password, boolean rememberCredentials) {
if (rememberCredentials) {
try {
JSONArray array = new JSONArray();
JSONObject obj = new JSONObject();
obj.put("username", username);
obj.put("password", password);
array.put(obj);
prefStore.put(PREFERENCE_STORE_KEY, array);
} catch (JSONException e) {
logger.error(e.getMessage());
}
ArrayNode array = ParsingUtilities.mapper.createArrayNode();
ObjectNode obj = ParsingUtilities.mapper.createObjectNode();
obj.put("username", username);
obj.put("password", password);
array.add(obj);
prefStore.put(PREFERENCE_STORE_KEY, array);
}
connection = ApiConnection.getWikidataApiConnection();
@ -93,33 +89,27 @@ public class ConnectionManager {
}
public void restoreSavedConnection() {
JSONObject savedCredentials = getStoredCredentials();
ObjectNode savedCredentials = getStoredCredentials();
if (savedCredentials != null) {
connection = ApiConnection.getWikidataApiConnection();
try {
connection.login(savedCredentials.getString("username"), savedCredentials.getString("password"));
connection.login(savedCredentials.get("username").asText(), savedCredentials.get("password").asText());
} catch (LoginFailedException e) {
connection = null;
} catch (JSONException e) {
connection = null;
}
}
}
public JSONObject getStoredCredentials() {
JSONArray array = (JSONArray) prefStore.get(PREFERENCE_STORE_KEY);
if (array != null && array.length() > 0) {
try {
return array.getJSONObject(0);
} catch (JSONException e) {
logger.error(e.getMessage());
}
public ObjectNode getStoredCredentials() {
ArrayNode array = (ArrayNode) prefStore.get(PREFERENCE_STORE_KEY);
if (array != null && array.size() > 0 && array.get(0) instanceof ObjectNode) {
return (ObjectNode) array.get(0);
}
return null;
}
public void logout() {
prefStore.put(PREFERENCE_STORE_KEY, new JSONArray());
prefStore.put(PREFERENCE_STORE_KEY, ParsingUtilities.mapper.createArrayNode());
if (connection != null) {
try {
connection.logout();

View File

@ -23,13 +23,9 @@
******************************************************************************/
package org.openrefine.wikidata.editing;
import java.util.Properties;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.google.refine.Jsonizable;
/**
* This is just the necessary bits to store Wikidata credentials in OpenRefine's
@ -38,9 +34,11 @@ import com.google.refine.Jsonizable;
* @author Antonin Delpeuch
*
*/
class WikibaseCredentials implements Jsonizable {
class WikibaseCredentials {
@JsonProperty("username")
private String username;
@JsonProperty("password")
private String password;
public WikibaseCredentials() {
@ -48,7 +46,12 @@ class WikibaseCredentials implements Jsonizable {
password = null;
}
public WikibaseCredentials(String username, String password) {
@JsonCreator
public WikibaseCredentials(
@JsonProperty("username")
String username,
@JsonProperty("password")
String password) {
this.username = username;
this.password = password;
}
@ -64,23 +67,9 @@ class WikibaseCredentials implements Jsonizable {
public boolean isNonNull() {
return username != null && password != null && !"null".equals(username) && !"null".equals(password);
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("class");
writer.value(this.getClass().getName());
writer.key("username");
writer.value(username);
writer.key("password");
writer.value(password);
writer.endObject();
@JsonProperty("class")
public String getClassName() {
return getClass().getName();
}
public static WikibaseCredentials load(JSONObject obj)
throws JSONException {
return new WikibaseCredentials(obj.getString("username"), obj.getString("password"));
}
}

View File

@ -4,12 +4,12 @@ import java.io.IOException;
import java.io.Writer;
import java.util.Properties;
import org.json.JSONWriter;
import org.openrefine.wikidata.schema.WikibaseSchema;
import com.google.refine.browsing.Engine;
import com.google.refine.exporters.WriterExporter;
import com.google.refine.model.Project;
import com.google.refine.util.ParsingUtilities;
public class SchemaExporter implements WriterExporter {
@ -24,8 +24,7 @@ public class SchemaExporter implements WriterExporter {
if (schema == null) {
schema = new WikibaseSchema();
}
JSONWriter jsonWriter = new JSONWriter(writer);
schema.write(jsonWriter, new Properties());
ParsingUtilities.mapper.writeValue(writer, schema);
}
}

View File

@ -31,9 +31,6 @@ import java.util.Properties;
import java.util.Random;
import org.apache.commons.lang.Validate;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.openrefine.wikidata.editing.ConnectionManager;
import org.openrefine.wikidata.editing.EditBatchProcessor;
import org.openrefine.wikidata.editing.NewItemLibrary;
@ -46,16 +43,16 @@ import org.wikidata.wdtk.wikibaseapi.ApiConnection;
import org.wikidata.wdtk.wikibaseapi.WikibaseDataEditor;
import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.EngineConfig;
import com.google.refine.history.Change;
import com.google.refine.history.HistoryEntry;
import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Project;
import com.google.refine.operations.EngineDependentOperation;
import com.google.refine.operations.OperationRegistry;
import com.google.refine.process.LongRunningProcess;
import com.google.refine.process.Process;
import com.google.refine.util.Pool;
@ -64,44 +61,46 @@ public class PerformWikibaseEditsOperation extends EngineDependentOperation {
static final Logger logger = LoggerFactory.getLogger(PerformWikibaseEditsOperation.class);
@JsonProperty("summary")
private String summary;
public PerformWikibaseEditsOperation(EngineConfig engineConfig, String summary) {
@JsonCreator
public PerformWikibaseEditsOperation(
@JsonProperty("engineConfig")
EngineConfig engineConfig,
@JsonProperty("summary")
String summary) {
super(engineConfig);
Validate.notNull(summary, "An edit summary must be provided.");
Validate.notEmpty(summary, "An edit summary must be provided.");
this.summary = summary;
}
static public AbstractOperation reconstruct(Project project, JSONObject obj)
throws Exception {
JSONObject engineConfig = obj.getJSONObject("engineConfig");
String summary = null;
if (obj.has("summary")) {
summary = obj.getString("summary");
}
return new PerformWikibaseEditsOperation(
EngineConfig.reconstruct(engineConfig), summary);
/* The constructor above should be enough for deserialization,
* but for some unknown reason it can fail in certain cases
* (might be due to caching deserializers across threads?)
*
* So we sadly add a default constructor and a setter below.
*
* TODO delete the default constructor and setter, make summary final
*/
public PerformWikibaseEditsOperation() {
super(null);
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("op");
writer.value(OperationRegistry.s_opClassToName.get(this.getClass()));
writer.key("description");
writer.value("Perform Wikibase edits");
writer.key("summary");
writer.value(summary);
writer.key("engineConfig");
getEngineConfig().write(writer, options);
writer.endObject();
@JsonProperty("engineConfig")
public void setEngineConfig(EngineConfig config) {
this._engineConfig = config;
}
@JsonProperty("summary")
public void setSummary(String summary) {
this.summary = summary;
}
@Override
protected String getBriefDescription(Project project) {
return "Peform edits on Wikidata";
return "Perform Wikibase edits";
}
@Override

View File

@ -28,50 +28,53 @@ import java.io.LineNumberReader;
import java.io.Writer;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.openrefine.wikidata.schema.WikibaseSchema;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.history.Change;
import com.google.refine.history.HistoryEntry;
import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Project;
import com.google.refine.operations.OperationRegistry;
import com.google.refine.util.ParsingUtilities;
import com.google.refine.util.Pool;
public class SaveWikibaseSchemaOperation extends AbstractOperation {
final public String operationDescription = "Save Wikibase schema";
final protected WikibaseSchema _schema;
@JsonIgnore
final public static String operationDescription = "Save Wikibase schema";
@JsonProperty("schema")
protected WikibaseSchema _schema;
public SaveWikibaseSchemaOperation(WikibaseSchema schema) {
@JsonCreator
public SaveWikibaseSchemaOperation(
@JsonProperty("schema")
WikibaseSchema schema) {
this._schema = schema;
}
static public AbstractOperation reconstruct(Project project, JSONObject obj)
throws Exception {
return new SaveWikibaseSchemaOperation(WikibaseSchema.reconstruct(obj.getJSONObject("schema")));
/* The constructor above should be enough for deserialization,
* but for some unknown reason it can fail in certain cases
* (might be due to caching deserializers across threads?)
*
* So we sadly add a default constructor and a setter below.
*
* TODO delete the default constructor and setter, make schema final
*/
public SaveWikibaseSchemaOperation() {
}
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("op");
writer.value(OperationRegistry.s_opClassToName.get(this.getClass()));
writer.key("description");
writer.value(operationDescription);
writer.key("schema");
_schema.write(writer, options);
writer.endObject();
@JsonProperty("schema")
public void setSchema(WikibaseSchema schema) {
this._schema = schema;
}
@Override
protected String getBriefDescription(Project project) {
return "Save Wikibase schema skelton";
return operationDescription;
}
@Override
@ -134,9 +137,9 @@ public class SaveWikibaseSchemaOperation extends AbstractOperation {
String value = line.substring(equal + 1);
if ("oldSchema".equals(field) && value.length() > 0) {
oldSchema = WikibaseSchema.reconstruct(ParsingUtilities.evaluateJsonStringToObject(value));
oldSchema = ParsingUtilities.mapper.readValue(value, WikibaseSchema.class);
} else if ("newSchema".equals(field) && value.length() > 0) {
newSchema = WikibaseSchema.reconstruct(ParsingUtilities.evaluateJsonStringToObject(value));
newSchema = ParsingUtilities.mapper.readValue(value, WikibaseSchema.class);
}
}
@ -149,12 +152,7 @@ public class SaveWikibaseSchemaOperation extends AbstractOperation {
static protected void writeWikibaseSchema(WikibaseSchema s, Writer writer)
throws IOException {
if (s != null) {
JSONWriter jsonWriter = new JSONWriter(writer);
try {
s.write(jsonWriter, new Properties());
} catch (JSONException e) {
e.printStackTrace();
}
ParsingUtilities.defaultWriter.writeValue(writer, s);
}
}
}

View File

@ -28,7 +28,6 @@ import java.util.Map;
import java.util.Map.Entry;
import org.jsoup.helper.Validate;
import org.openrefine.wikidata.utils.JacksonJsonizable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
@ -42,7 +41,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* @author Antonin Delpeuch
*
*/
public class QAWarning extends JacksonJsonizable implements Comparable<QAWarning> {
public class QAWarning implements Comparable<QAWarning> {
public enum Severity {
INFO, // We just report something to the user but it is probably fine

View File

@ -30,7 +30,6 @@ import org.jsoup.helper.Validate;
import org.openrefine.wikidata.schema.exceptions.SkipSchemaExpressionException;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
import org.openrefine.wikidata.utils.JacksonJsonizable;
import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
@ -48,7 +47,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
*/
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonTypeInfo(use = JsonTypeInfo.Id.NONE)
public class WbItemDocumentExpr extends JacksonJsonizable implements WbExpression<ItemUpdate> {
public class WbItemDocumentExpr implements WbExpression<ItemUpdate> {
private WbExpression<? extends ItemIdValue> subject;
private List<WbNameDescExpr> nameDescs;

View File

@ -26,11 +26,7 @@ package org.openrefine.wikidata.schema;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.openrefine.wikidata.qa.QAWarningStore;
import org.openrefine.wikidata.schema.exceptions.SkipSchemaExpressionException;
import org.openrefine.wikidata.updates.ItemUpdate;
@ -38,18 +34,16 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RowVisitor;
import com.google.refine.model.OverlayModel;
import com.google.refine.model.Project;
import com.google.refine.model.Row;
import com.google.refine.util.ParsingUtilities;
/**
* Main class representing a skeleton of Wikibase edits with OpenRefine columns
@ -63,6 +57,7 @@ public class WikibaseSchema implements OverlayModel {
final static Logger logger = LoggerFactory.getLogger("RdfSchema");
@JsonProperty("itemDocuments")
protected List<WbItemDocumentExpr> itemDocumentExprs = new ArrayList<WbItemDocumentExpr>();
protected String baseIri = "http://www.wikidata.org/entity/";
@ -85,6 +80,7 @@ public class WikibaseSchema implements OverlayModel {
/**
* @return the site IRI of the Wikibase instance referenced by this schema
*/
@JsonIgnore
public String getBaseIri() {
return baseIri;
}
@ -92,6 +88,7 @@ public class WikibaseSchema implements OverlayModel {
/**
* @return the list of document expressions for this schema
*/
@JsonIgnore
public List<WbItemDocumentExpr> getItemDocumentExpressions() {
return itemDocumentExprs;
}
@ -180,39 +177,12 @@ public class WikibaseSchema implements OverlayModel {
;
}
}
static public WikibaseSchema reconstruct(JSONObject o)
throws JSONException {
return reconstruct(o.toString());
}
static public WikibaseSchema reconstruct(String json) throws JSONException {
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.readValue(json, WikibaseSchema.class);
} catch (JsonParseException e) {
throw new JSONException(e.toString());
} catch (JsonMappingException e) {
throw new JSONException(e.toString());
} catch (IOException e) {
throw new JSONException(e.toString());
}
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("itemDocuments");
writer.array();
for (WbItemDocumentExpr changeExpr : itemDocumentExprs) {
changeExpr.write(writer, options);
}
writer.endArray();
writer.endObject();
static public WikibaseSchema reconstruct(String json) throws IOException {
return ParsingUtilities.mapper.readValue(json, WikibaseSchema.class);
}
static public WikibaseSchema load(Project project, JSONObject obj)
static public WikibaseSchema load(Project project, String obj)
throws Exception {
return reconstruct(obj);
}

View File

@ -27,6 +27,8 @@ import java.util.List;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* An entity id value that also comes with a label and possibly types.
*
@ -47,6 +49,7 @@ public interface PrefetchedEntityIdValue extends EntityIdValue {
*
* @return the preferred label of the entity
*/
@JsonProperty("label")
public String getLabel();
/**
@ -56,5 +59,6 @@ public interface PrefetchedEntityIdValue extends EntityIdValue {
*
* Empty lists should be returned for
*/
@JsonProperty("types")
public List<String> getTypes();
}

View File

@ -33,6 +33,7 @@ import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.model.Recon;
@ -72,6 +73,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
return !isMatched();
}
@JsonProperty("label")
public String getLabel() {
if (isMatched()) {
return _recon.match.name;
@ -80,6 +82,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
}
}
@JsonProperty("types")
public List<String> getTypes() {
if (isMatched()) {
return Arrays.asList(_recon.match.types);
@ -89,6 +92,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
}
@Override
@JsonProperty("entityType")
public abstract String getEntityType();
/**
@ -96,6 +100,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
*
* @return the reconciliation id of the reconciled cell
*/
@JsonProperty("reconInternalId")
public long getReconInternalId() {
return getRecon().id;
}
@ -114,6 +119,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
* Returns the id of the reconciled item
*/
@Override
@JsonProperty("id")
public String getId() {
if (isMatched()) {
return _recon.match.id;
@ -126,6 +132,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
}
@Override
@JsonProperty("siteIri")
public String getSiteIri() {
if (isMatched()) {
return _recon.identifierSpace;
@ -135,6 +142,7 @@ public abstract class ReconEntityIdValue implements PrefetchedEntityIdValue {
}
@Override
@JsonProperty("iri")
public String getIri() {
return getSiteIri() + getId();
}

View File

@ -30,6 +30,8 @@ import org.wikidata.wdtk.datamodel.helpers.Hash;
import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue;
import org.wikidata.wdtk.datamodel.interfaces.ValueVisitor;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* An EntityIdValue that we have obtained from a suggest widget in the schema
* alignment dialog.
@ -50,26 +52,31 @@ public abstract class SuggestedEntityIdValue implements PrefetchedEntityIdValue
}
@Override
@JsonProperty("id")
public String getId() {
return _id;
}
@Override
@JsonProperty("siteIri")
public String getSiteIri() {
return _siteIRI;
}
@Override
@JsonProperty("label")
public String getLabel() {
return _label;
}
@Override
@JsonProperty("types")
public List<String> getTypes() {
return new ArrayList<>();
}
@Override
@JsonProperty("iri")
public String getIri() {
return getSiteIri() + getId();
}

View File

@ -1,79 +0,0 @@
/*******************************************************************************
* MIT License
*
* Copyright (c) 2018 Antonin Delpeuch
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package org.openrefine.wikidata.utils;
import java.io.IOException;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.Jsonizable;
/**
* This class is inefficient because it serializes the object to string and then
* deserializes it back. Unfortunately, this is the only simple way to bridge
* Jackson to org.json. This conversion should be removed when (if ?) we migrate
* OpenRefine a better JSON library.
*
* @author Antonin Delpeuch
*
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public abstract class JacksonJsonizable implements Jsonizable {
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
ObjectMapper mapper = new ObjectMapper();
try {
writer.value(new JSONObject(mapper.writeValueAsString(this)));
} catch (JsonProcessingException e) {
throw new JSONException(e.toString());
}
}
public static <T> T fromJSONClass(JSONObject obj, Class<T> klass)
throws JSONException {
ObjectMapper mapper = new ObjectMapper();
String json = obj.toString();
try {
return mapper.readValue(json, klass);
} catch (JsonParseException e) {
throw new JSONException(e.toString());
} catch (JsonMappingException e) {
throw new JSONException(e.toString());
} catch (IOException e) {
throw new JSONException(e.toString());
}
}
}

View File

@ -33,7 +33,6 @@ import java.io.StringWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.openrefine.wikidata.testing.TestingData;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
@ -52,8 +51,7 @@ public abstract class CommandTest extends RefineTest {
protected Command command = null;
@BeforeMethod(alwaysRun = true)
public void setUpProject()
throws JSONException {
public void setUpProject() {
project = createCSVProject(TestingData.inceptionWithNewCsv);
TestingData.reconcileInceptionCells(project);
request = mock(HttpServletRequest.class);

View File

@ -6,15 +6,13 @@ import java.io.IOException;
import javax.servlet.ServletException;
import org.json.JSONException;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class LoginCommandTest extends CommandTest {
@BeforeMethod
public void SetUp()
throws JSONException {
public void SetUp() {
command = new LoginCommand();
}

View File

@ -31,35 +31,31 @@ import java.io.IOException;
import javax.servlet.ServletException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Assert;
import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.util.ParsingUtilities;
public class PreviewWikibaseSchemaCommandTest extends SchemaCommandTest {
@BeforeMethod
public void SetUp()
throws JSONException {
public void SetUp() {
command = new PreviewWikibaseSchemaCommand();
}
@Test
public void testValidSchema()
throws JSONException, IOException, ServletException {
throws IOException, ServletException {
String schemaJson = jsonFromFile("schema/inception.json").toString();
when(request.getParameter("schema")).thenReturn(schemaJson);
command.doPost(request, response);
JSONObject response = ParsingUtilities.evaluateJsonStringToObject(writer.toString());
JSONArray edits = response.getJSONArray("edits_preview");
assertEquals(3, edits.length());
ObjectNode response = ParsingUtilities.evaluateJsonStringToObjectNode(writer.toString());
ArrayNode edits = (ArrayNode) response.get("edits_preview");
assertEquals(3, edits.size());
}
}

View File

@ -32,6 +32,8 @@ import javax.servlet.ServletException;
import org.testng.annotations.Test;
import com.google.refine.util.ParsingUtilities;
public abstract class SchemaCommandTest extends CommandTest {
@Test
@ -48,6 +50,6 @@ public abstract class SchemaCommandTest extends CommandTest {
when(request.getParameter("schema")).thenReturn("{bogus json");
command.doPost(request, response);
assertEquals("{\"code\":\"error\",\"message\":\"Wikibase schema could not be parsed.\"}", writer.toString());
assertEquals("error", ParsingUtilities.mapper.readTree(writer.toString()).get("code").asText());
}
}

View File

@ -31,8 +31,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
@ -67,10 +65,10 @@ public class QuickStatementsExporterTest extends RefineTest {
@Test
public void testSimpleProject()
throws JSONException, IOException {
throws IOException {
Project project = this.createCSVProject(TestingData.inceptionWithNewCsv);
TestingData.reconcileInceptionCells(project);
JSONObject serialized = TestingData.jsonFromFile("schema/inception.json");
String serialized = TestingData.jsonFromFile("schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
project.overlayModels.put("wikibaseSchema", schema);
Engine engine = new Engine(project);

View File

@ -1,7 +1,5 @@
package org.openrefine.wikidata.exporters;
import static org.openrefine.wikidata.testing.JacksonSerializationTest.assertJsonEquals;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Properties;
@ -11,6 +9,7 @@ import org.testng.annotations.Test;
import com.google.refine.browsing.Engine;
import com.google.refine.model.Project;
import com.google.refine.tests.RefineTest;
import com.google.refine.tests.util.TestUtils;
public class SchemaExporterTest extends RefineTest {
@ -24,7 +23,7 @@ public class SchemaExporterTest extends RefineTest {
StringWriter writer = new StringWriter();
Properties properties = new Properties();
exporter.export(project, properties, engine, writer);
assertJsonEquals("{\"itemDocuments\":[]}", writer.toString());
TestUtils.assertEqualAsJson("{\"itemDocuments\":[]}", writer.toString());
}
}

View File

@ -32,9 +32,6 @@ import java.io.StringReader;
import java.io.StringWriter;
import java.util.Properties;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.openrefine.wikidata.testing.JacksonSerializationTest;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -43,6 +40,8 @@ import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Project;
import com.google.refine.operations.OperationRegistry;
import com.google.refine.tests.RefineTest;
import com.google.refine.tests.util.TestUtils;
import com.google.refine.util.ParsingUtilities;
import com.google.refine.util.Pool;
import edu.mit.simile.butterfly.ButterflyModule;
@ -68,18 +67,17 @@ public abstract class OperationTest extends RefineTest {
public abstract AbstractOperation reconstruct()
throws Exception;
public abstract JSONObject getJson()
public abstract String getJson()
throws Exception;
@Test
public void testReconstruct()
throws Exception {
JSONObject json = getJson();
String json = getJson();
AbstractOperation op = reconstruct();
StringWriter writer = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(writer);
op.write(jsonWriter, new Properties());
JacksonSerializationTest.assertJsonEquals(json.toString(), writer.toString());
ParsingUtilities.defaultWriter.writeValue(writer, op);
TestUtils.assertEqualAsJson(json, writer.toString());
}
protected LineNumberReader makeReader(String input) {

View File

@ -27,7 +27,6 @@ import static org.junit.Assert.assertEquals;
import java.io.LineNumberReader;
import org.json.JSONObject;
import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -36,6 +35,7 @@ import com.google.refine.browsing.EngineConfig;
import com.google.refine.history.Change;
import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Recon;
import com.google.refine.util.ParsingUtilities;
public class PerformWikibaseEditsOperationTest extends OperationTest {
@ -47,19 +47,18 @@ public class PerformWikibaseEditsOperationTest extends OperationTest {
@Override
public AbstractOperation reconstruct()
throws Exception {
JSONObject json = getJson();
return PerformWikibaseEditsOperation.reconstruct(project, json);
return ParsingUtilities.mapper.readValue(getJson(), PerformWikibaseEditsOperation.class);
}
@Override
public JSONObject getJson()
public String getJson()
throws Exception {
return TestingData.jsonFromFile("operations/perform-edits.json");
}
@Test(expectedExceptions=IllegalArgumentException.class)
public void testConstructor() {
new PerformWikibaseEditsOperation(EngineConfig.reconstruct(new JSONObject("{}")), "");
new PerformWikibaseEditsOperation(EngineConfig.reconstruct("{}"), "");
}
@Test

View File

@ -28,7 +28,6 @@ import static org.junit.Assert.assertNull;
import java.io.LineNumberReader;
import org.json.JSONObject;
import org.openrefine.wikidata.schema.WikibaseSchema;
import org.openrefine.wikidata.testing.TestingData;
import org.testng.annotations.BeforeMethod;
@ -36,6 +35,7 @@ import org.testng.annotations.Test;
import com.google.refine.history.Change;
import com.google.refine.model.AbstractOperation;
import com.google.refine.util.ParsingUtilities;
public class SaveWikibaseSchemaOperationTest extends OperationTest {
@ -47,11 +47,11 @@ public class SaveWikibaseSchemaOperationTest extends OperationTest {
@Override
public AbstractOperation reconstruct()
throws Exception {
return SaveWikibaseSchemaOperation.reconstruct(project, getJson());
return ParsingUtilities.mapper.readValue(getJson(), SaveWikibaseSchemaOperation.class);
}
@Override
public JSONObject getJson()
public String getJson()
throws Exception {
return TestingData.jsonFromFile("operations/save-schema.json");
}
@ -59,8 +59,8 @@ public class SaveWikibaseSchemaOperationTest extends OperationTest {
@Test
public void testLoadChange()
throws Exception {
JSONObject schemaJson = TestingData.jsonFromFile("schema/inception.json");
String changeString = "newSchema=" + schemaJson.toString() + "\n" + "oldSchema=\n" + "/ec/";
String schemaJson = TestingData.jsonFromFile("schema/inception.json");
String changeString = "newSchema=" + schemaJson + "\n" + "oldSchema=\n" + "/ec/";
WikibaseSchema schema = WikibaseSchema.reconstruct(schemaJson);
LineNumberReader reader = makeReader(changeString);

View File

@ -26,16 +26,11 @@ package org.openrefine.wikidata.schema;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.openrefine.wikidata.testing.TestingData;
import org.openrefine.wikidata.updates.ItemUpdate;
import org.openrefine.wikidata.updates.ItemUpdateBuilder;
@ -52,13 +47,11 @@ import org.wikidata.wdtk.datamodel.interfaces.StatementRank;
import org.wikidata.wdtk.datamodel.interfaces.StringValue;
import org.wikidata.wdtk.datamodel.interfaces.TimeValue;
import com.fasterxml.jackson.databind.exc.InvalidDefinitionException;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.EngineConfig;
import com.google.refine.model.Project;
import com.google.refine.tests.RefineTest;
import com.google.refine.util.ParsingUtilities;
import com.google.refine.tests.util.TestUtils;
public class WikibaseSchemaTest extends RefineTest {
@ -100,32 +93,25 @@ public class WikibaseSchemaTest extends RefineTest {
@Test
public void testSerialize()
throws JSONException, IOException {
JSONObject serialized = TestingData.jsonFromFile("schema/history_of_medicine.json");
throws IOException {
String serialized = TestingData.jsonFromFile("schema/history_of_medicine.json");
WikibaseSchema parsed = WikibaseSchema.reconstruct(serialized);
StringWriter writer = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(writer);
parsed.write(jsonWriter, new Properties());
writer.close();
JSONObject newSerialized = ParsingUtilities.evaluateJsonStringToObject(writer.toString());
// toString because it looks like JSONObject equality isn't great
assertEquals(TestingData.jsonFromFile("schema/history_of_medicine_normalized.json").toString(),
newSerialized.toString());
TestUtils.isSerializedTo(parsed, TestingData.jsonFromFile("schema/history_of_medicine_normalized.json").toString());
}
@Test
public void testDeserialize()
throws JSONException, IOException {
throws IOException {
// this json file was generated by an earlier version of the software
// it contains extra "type" fields that are now ignored.
JSONObject serialized = TestingData.jsonFromFile("schema/roarmap.json");
String serialized = TestingData.jsonFromFile("schema/roarmap.json");
WikibaseSchema.reconstruct(serialized);
}
@Test
public void testEvaluate()
throws JSONException, IOException {
JSONObject serialized = TestingData.jsonFromFile("schema/inception.json");
throws IOException {
String serialized = TestingData.jsonFromFile("schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
Engine engine = new Engine(project);
List<ItemUpdate> updates = schema.evaluate(project, engine);
@ -137,8 +123,8 @@ public class WikibaseSchemaTest extends RefineTest {
assertEquals(expected, updates);
}
@Test(expectedExceptions = JSONException.class)
public void testDeserializeEmpty() throws JSONException {
@Test(expectedExceptions = IOException.class)
public void testDeserializeEmpty() throws IOException {
String schemaJson = "{\"itemDocuments\":[{\"statementGroups\":[{\"statements\":[]}],"
+"\"nameDescs\":[]}],\"wikibasePrefix\":\"http://www.wikidata.org/entity/\"}";
WikibaseSchema.reconstruct(schemaJson);
@ -146,11 +132,11 @@ public class WikibaseSchemaTest extends RefineTest {
@Test
public void testEvaluateRespectsFacets()
throws JSONException, IOException {
JSONObject serialized = TestingData.jsonFromFile("schema/inception.json");
throws IOException {
String serialized = TestingData.jsonFromFile("schema/inception.json");
WikibaseSchema schema = WikibaseSchema.reconstruct(serialized);
Engine engine = new Engine(project);
EngineConfig engineConfig = EngineConfig.reconstruct(new JSONObject("{\n"
EngineConfig engineConfig = EngineConfig.reconstruct("{\n"
+ " \"mode\": \"row-based\",\n"
+ " \"facets\": [\n"
+ " {\n"
@ -163,7 +149,7 @@ public class WikibaseSchemaTest extends RefineTest {
+ " \"columnName\": \"reference\"\n"
+ " }\n"
+ " ]\n"
+ " }"));
+ " }");
engine.initializeFromConfig(engineConfig);
List<ItemUpdate> updates = schema.evaluate(project, engine);
List<ItemUpdate> expected = new ArrayList<>();

View File

@ -33,16 +33,19 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.tests.util.TestUtils;
import com.google.refine.util.ParsingUtilities;
public class JacksonSerializationTest {
private static ObjectMapper mapper = new ObjectMapper();
private static ObjectMapper mapper = ParsingUtilities.mapper;
public static void testSerialize(Object pojo, String expectedJson) {
// Test that the pojo is correctly serialized
try {
String actualJson = mapper.writeValueAsString(pojo);
assertJsonEquals(expectedJson, actualJson);
String actualJson = ParsingUtilities.defaultWriter.writeValueAsString(pojo);
TestUtils.assertEqualAsJson(expectedJson, actualJson);
} catch (JsonProcessingException e) {
e.printStackTrace();
Assert.fail("Failed to serialize object");
@ -66,15 +69,4 @@ public class JacksonSerializationTest {
testSerialize(pojo, json);
testDeserialize(targetClass, pojo, json);
}
public static void assertJsonEquals(String expectedJson, String actualJson) {
JsonNode parsedExpectedJson;
try {
parsedExpectedJson = mapper.readValue(expectedJson, JsonNode.class);
JsonNode parsedActualJson = mapper.readValue(actualJson, JsonNode.class);
assertEquals(parsedExpectedJson, parsedActualJson);
} catch (IOException e) {
Assert.fail("Invalid JSON");
}
}
}

View File

@ -25,13 +25,9 @@ package org.openrefine.wikidata.testing;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collections;
import org.apache.commons.io.IOUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.openrefine.wikidata.schema.WbLanguageConstant;
import org.openrefine.wikidata.schema.WbMonolingualExpr;
import org.openrefine.wikidata.schema.WbStringConstant;
@ -48,7 +44,6 @@ import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Recon;
import com.google.refine.model.ReconCandidate;
import com.google.refine.util.ParsingUtilities;
public class TestingData {
@ -133,11 +128,11 @@ public class TestingData {
return generateStatement(from, pid, to);
}
public static JSONObject jsonFromFile(String filename)
throws IOException, JSONException {
public static String jsonFromFile(String filename)
throws IOException {
InputStream f = TestingData.class.getClassLoader().getResourceAsStream(filename);
String decoded = IOUtils.toString(f);
return ParsingUtilities.evaluateJsonStringToObject(decoded);
return decoded.trim();
}
public static void reconcileInceptionCells(Project project) {

View File

@ -1,21 +0,0 @@
Copyright (c) 2002 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -136,38 +136,6 @@
<goal>install-file</goal>
</goals>
</execution>
<execution>
<id>install-tableschema</id>
<phase>process-resources</phase>
<configuration>
<file>${basedir}/lib-local/tableschema-java-1.0-SNAPSHOT.jar</file>
<repositoryLayout>default</repositoryLayout>
<groupId>io.frictionlessdata</groupId>
<artifactId>tableschema-java</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<generatePom>true</generatePom>
</configuration>
<goals>
<goal>install-file</goal>
</goals>
</execution>
<execution>
<id>install-datapackage</id>
<phase>process-resources</phase>
<configuration>
<file>${basedir}/lib-local/datapackage-java-1.0-SNAPSHOT.jar</file>
<repositoryLayout>default</repositoryLayout>
<groupId>io.frictionlessdata</groupId>
<artifactId>datapackage-java</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<generatePom>true</generatePom>
</configuration>
<goals>
<goal>install-file</goal>
</goals>
</execution>
<execution>
<id>install-opencsv</id>
<phase>process-resources</phase>
@ -308,20 +276,15 @@
<artifactId>rhino</artifactId>
<version>1.7.10</version>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20160810</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.9.1</version>
<version>2.9.7</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.9.1</version>
<version>2.9.7</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
@ -349,9 +312,14 @@
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.2</version>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>3.2.2</version>
</dependency>
<dependency>
<groupId>commons-validator</groupId>
<artifactId>commons-validator</artifactId>
<version>1.5.1</version>
</dependency>
<dependency>
<groupId>velocity</groupId>
@ -428,21 +396,6 @@
<artifactId>swc-parser-lazy</artifactId>
<version>3.1.7</version>
</dependency>
<dependency>
<groupId>org.everit.json</groupId>
<artifactId>org.everit.json.schema</artifactId>
<version>1.5.1</version>
</dependency>
<dependency>
<groupId>io.frictionlessdata</groupId>
<artifactId>datapackage-java</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.frictionlessdata</groupId>
<artifactId>tableschema-java</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
@ -483,6 +436,11 @@
<artifactId>arithcode</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>19.0</version>
</dependency>
<!-- test dependencies -->

View File

@ -1,213 +0,0 @@
{
"version": "1.0.0",
"errors": {
"io-error": {
"name": "IO Error",
"type": "source",
"context": "table",
"weight": 100,
"message": "The data source returned an IO Error of type {error_type}",
"description": "Data reading error because of IO error.\n\n How it could be resolved:\n - Fix path if it's not correct."
},
"http-error": {
"name": "HTTP Error",
"type": "source",
"context": "table",
"weight": 100,
"message": "The data source returned an HTTP error with a status code of {status_code}",
"description": "Data reading error because of HTTP error.\n\n How it could be resolved:\n - Fix url link if it's not correct."
},
"source-error": {
"name": "Source Error",
"type": "source",
"context": "table",
"weight": 100,
"message": "The data source has not supported or has inconsistent contents; no tabular data can be extracted",
"description": "Data reading error because of not supported or inconsistent contents.\n\n How it could be resolved:\n - Fix data contents (e.g. change JSON data to array or arrays/objects).\n - Set correct source settings in {validator}."
},
"scheme-error": {
"name": "Scheme Error",
"type": "source",
"context": "table",
"weight": 100,
"message": "The data source is in an unknown scheme; no tabular data can be extracted",
"description": "Data reading error because of incorrect scheme.\n\n How it could be resolved:\n - Fix data scheme (e.g. change scheme from `ftp` to `http`).\n - Set correct scheme in {validator}."
},
"format-error": {
"name": "Format Error",
"type": "source",
"context": "table",
"weight": 100,
"message": "The data source is in an unknown format; no tabular data can be extracted",
"description": "Data reading error because of incorrect format.\n\n How it could be resolved:\n - Fix data format (e.g. change file extension from `txt` to `csv`).\n - Set correct format in {validator}."
},
"encoding-error": {
"name": "Encoding Error",
"type": "source",
"context": "table",
"weight": 100,
"message": "The data source could not be successfully decoded with {encoding} encoding",
"description": "Data reading error because of an encoding problem.\n\n How it could be resolved:\n - Fix data source if it's broken.\n - Set correct encoding in {validator}."
},
"blank-header": {
"name": "Blank Header",
"type": "structure",
"context": "head",
"weight": 3,
"message": "Header in column {column_number} is blank",
"description": "A column in the header row is missing a value. Column names should be provided.\n\n How it could be resolved:\n - Add the missing column name to the first row of the data source.\n - If the first row starts with, or ends with a comma, remove it.\n - If this error should be ignored disable `blank-header` check in {validator}."
},
"duplicate-header": {
"name": "Duplicate Header",
"type": "structure",
"context": "head",
"weight": 3,
"message": "Header in column {column_number} is duplicated to header in column(s) {column_numbers}",
"description": "Two columns in the header row have the same value. Column names should be unique.\n\n How it could be resolved:\n - Add the missing column name to the first row of the data.\n - If the first row starts with, or ends with a comma, remove it.\n - If this error should be ignored disable `duplicate-header` check in {validator}."
},
"blank-row": {
"name": "Blank Row",
"type": "structure",
"context": "body",
"weight": 9,
"message": "Row {row_number} is completely blank",
"description": "This row is empty. A row should contain at least one value.\n\n How it could be resolved:\n - Delete the row.\n - If this error should be ignored disable `blank-row` check in {validator}."
},
"duplicate-row": {
"name": "Duplicate Row",
"type": "structure",
"context": "body",
"weight": 5,
"message": "Row {row_number} is duplicated to row(s) {row_numbers}",
"description": "The exact same data has been seen in another row.\n\n How it could be resolved:\n - If some of the data is incorrect, correct it.\n - If the whole row is an incorrect duplicate, remove it.\n - If this error should be ignored disable `duplicate-row` check in {validator}."
},
"extra-value": {
"name": "Extra Value",
"type": "structure",
"context": "body",
"weight": 9,
"message": "Row {row_number} has an extra value in column {column_number}",
"description": "This row has more values compared to the header row (the first row in the data source). A key concept is that all the rows in tabular data must have the same number of columns.\n\n How it could be resolved:\n - Check data has an extra comma between the values in this row.\n - If this error should be ignored disable `extra-value` check in {validator}."
},
"missing-value": {
"name": "Missing Value",
"type": "structure",
"context": "body",
"weight": 9,
"message": "Row {row_number} has a missing value in column {column_number}",
"description": "This row has less values compared to the header row (the first row in the data source). A key concept is that all the rows in tabular data must have the same number of columns.\n\n How it could be resolved:\n - Check data is not missing a comma between the values in this row.\n - If this error should be ignored disable `missing-value` check in {validator}."
},
"schema-error": {
"name": "Table Schema Error",
"type": "schema",
"context": "table",
"weight": 15,
"message": "Table Schema error: {error_message}",
"description": "Provided schema is not valid.\n\n How it could be resolved:\n - Update schema descriptor to be a valid descriptor\n - If this error should be ignored disable schema checks in {validator}."
},
"non-matching-header": {
"name": "Non-Matching Header",
"type": "schema",
"context": "head",
"weight": 9,
"message": "Header in column {column_number} doesn't match field name {field_name} in the schema",
"description": "One of the data source headers doesn't match the field name defined in the schema.\n\n How it could be resolved:\n - Rename header in the data source or field in the schema\n - If this error should be ignored disable `non-matching-header` check in {validator}."
},
"extra-header": {
"name": "Extra Header",
"type": "schema",
"context": "head",
"weight": 9,
"message": "There is an extra header in column {column_number}",
"description": "The first row of the data source contains header that doesn't exist in the schema.\n\n How it could be resolved:\n - Remove the extra column from the data source or add the missing field to the schema\n - If this error should be ignored disable `extra-header` check in {validator}."
},
"missing-header": {
"name": "Missing Header",
"type": "schema",
"context": "head",
"weight": 9,
"message": "There is a missing header in column {column_number}",
"description": "Based on the schema there should be a header that is missing in the first row of the data source.\n\n How it could be resolved:\n - Add the missing column to the data source or remove the extra field from the schema\n - If this error should be ignored disable `missing-header` check in {validator}."
},
"type-or-format-error": {
"name": "Type or Format Error",
"type": "schema",
"context": "body",
"weight": 9,
"message": "The value {value} in row {row_number} and column {column_number} is not type {field_type} and format {field_format}",
"description": "The value does not match the schema type and format for this field.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If this value is correct, adjust the type and/or format.\n - To ignore the error, disable the `type-or-format-error` check in {validator}. In this case all schema checks for row values will be ignored."
},
"required-constraint": {
"name": "Required Constraint",
"type": "schema",
"context": "body",
"weight": 9,
"message": "Column {column_number} is a required field, but row {row_number} has no value",
"description": "This field is a required field, but it contains no value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove the `required` constraint from the schema.\n - If this error should be ignored disable `required-constraint` check in {validator}."
},
"pattern-constraint": {
"name": "Pattern Constraint",
"type": "schema",
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the pattern constraint of {constraint}",
"description": "This field value should conform to constraint pattern.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove or refine the `pattern` constraint in the schema.\n - If this error should be ignored disable `pattern-constraint` check in {validator}."
},
"unique-constraint": {
"name": "Unique Constraint",
"type": "schema",
"context": "body",
"weight": 9,
"message": "Rows {row_numbers} has unique constraint violation in column {column_number}",
"description": "This field is a unique field but it contains a value that has been used in another row.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then the values in this column are not unique. Remove the `unique` constraint from the schema.\n - If this error should be ignored disable `unique-constraint` check in {validator}."
},
"enumerable-constraint": {
"name": "Enumerable Constraint",
"type": "schema",
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the given enumeration: {constraint}",
"description": "This field value should be equal to one of the values in the enumeration constraint.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove or refine the `enum` constraint in the schema.\n - If this error should be ignored disable `enumerable-constraint` check in {validator}."
},
"minimum-constraint": {
"name": "Minimum Constraint",
"type": "schema",
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the minimum constraint of {constraint}",
"description": "This field value should be greater or equal than constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove or refine the `minimum` constraint in the schema.\n - If this error should be ignored disable `minimum-constraint` check in {validator}."
},
"maximum-constraint": {
"name": "Maximum Constraint",
"type": "schema",
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the maximum constraint of {constraint}",
"description": "This field value should be less or equal than constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove or refine the `maximum` constraint in the schema.\n - If this error should be ignored disable `maximum-constraint` check in {validator}."
},
"minimum-length-constraint": {
"name": "Minimum Length Constraint",
"type": "schema",
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the minimum length constraint of {constraint}",
"description": "A lenght of this field value should be greater or equal than schema constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove or refine the `minimumLength` constraint in the schema.\n - If this error should be ignored disable `minimum-length-constraint` check in {validator}."
},
"maximum-length-constraint": {
"name": "Maximum Length Constraint",
"type": "schema",
"context": "body",
"weight": 7,
"message": "The value {value} in row {row_number} and column {column_number} does not conform to the maximum length constraint of {constraint}",
"description": "A lenght of this field value should be less or equal than schema constraint value.\n\n How it could be resolved:\n - If this value is not correct, update the value.\n - If value is correct, then remove or refine the `maximumLength` constraint in the schema.\n - If this error should be ignored disable `maximum-length-constraint` check in {validator}."
}
}
}

View File

@ -1,16 +0,0 @@
{
"image": "",
"license": "",
"last_updated": "",
"keywords": [],
"sources": [{
"web": "",
"name": "",
"title": ""
}],
"name": "",
"description": "",
"resources": [],
"title": "",
"version": ""
}

View File

@ -1,49 +0,0 @@
/*
Copyright 2010, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.refine;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONWriter;
/**
* Interface for streaming out JSON, either into HTTP responses or
* serialization files.
*
* @author dfhuynh
*/
public interface Jsonizable {
public void write(JSONWriter writer, Properties options) throws JSONException;
}

View File

@ -37,7 +37,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -46,20 +45,20 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.tools.tar.TarOutputStream;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.history.HistoryEntryManager;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.IMetadata;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.preference.TopList;
import com.google.refine.util.ParsingUtilities;
/**
* ProjectManager is responsible for loading and saving the workspace and projects.
@ -76,6 +75,7 @@ public abstract class ProjectManager {
// Don't spend more than this much time saving projects if doing a quick save
static protected final int QUICK_SAVE_MAX_TIME = 1000 * 30; // 30 secs
protected Map<Long, ProjectMetadata> _projectsMetadata;
protected Map<String, Integer> _projectsTags;// TagName, number of projects having that tag
protected PreferenceStore _preferenceStore;
@ -101,8 +101,8 @@ public abstract class ProjectManager {
transient protected Map<Long, Project> _projects;
static public ProjectManager singleton;
protected ProjectManager() {
protected ProjectManager(){
_projectsMetadata = new HashMap<Long, ProjectMetadata>();
_preferenceStore = new PreferenceStore();
_projects = new HashMap<Long, Project>();
@ -193,7 +193,7 @@ public abstract class ProjectManager {
} catch (Exception e) {
e.printStackTrace();
}
}
}//FIXME what should be the behaviour if metadata is null? i.e. not found
Project project = getProject(id);
if (project != null && metadata != null && metadata.getModified().isAfter(project.getLastSave())) {
@ -202,7 +202,8 @@ public abstract class ProjectManager {
} catch (Exception e) {
e.printStackTrace();
}
}
}//FIXME what should be the behaviour if project is null? i.e. not found or loaded.
//FIXME what should happen if the metadata is found, but not the project? or vice versa?
}
}
@ -213,7 +214,7 @@ public abstract class ProjectManager {
* @param projectId
* @throws Exception
*/
public abstract void saveMetadata(IMetadata metadata, long projectId) throws Exception;
public abstract void saveMetadata(ProjectMetadata metadata, long projectId) throws Exception;
/**
* Save project to the data store
@ -266,23 +267,23 @@ public abstract class ProjectManager {
Project project = _projects.get(id); // don't call getProject() as that will load the project.
if (project != null) {
LocalDateTime projectLastSaveTime = project.getLastSave();
boolean hasUnsavedChanges =
!metadata.getModified().isBefore(projectLastSaveTime);
metadata.getModified().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() >= project.getLastSave().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
// We use >= instead of just > to avoid the case where a newly created project
// has the same modified and last save times, resulting in the project not getting
// saved at all.
if (hasUnsavedChanges) {
long msecsOverdue = ChronoUnit.MILLIS.between(projectLastSaveTime, startTimeOfSave);
long msecsOverdue = startTimeOfSave.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() - project.getLastSave().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
records.add(new SaveRecord(project, msecsOverdue));
} else if (!project.getProcessManager().hasPending()
&& ChronoUnit.MILLIS.between(projectLastSaveTime, startTimeOfSave) > PROJECT_FLUSH_DELAY) {
&& startTimeOfSave.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() - project.getLastSave().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() > PROJECT_FLUSH_DELAY) {
/*
* It's been a while since the project was last saved and it hasn't been
* modified. We can safely remove it from the cache to save some memory.
* It's been a while since the project was last saved and it hasn't been
* modified. We can safely remove it from the cache to save some memory.
*/
_projects.remove(id).dispose();
}
@ -308,10 +309,13 @@ public abstract class ProjectManager {
"Saving all modified projects ..." :
"Saving some modified projects ..."
);
for (int i = 0;i < records.size() &&
(allModified || (ChronoUnit.MILLIS.between(startTimeOfSave, LocalDateTime.now()) < QUICK_SAVE_MAX_TIME));
for (int i = 0;
i < records.size() &&
(allModified || (LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() -
startTimeOfSave.atZone(ZoneId.systemDefault()).toInstant().toEpochMilli() < QUICK_SAVE_MAX_TIME));
i++) {
try {
saveProject(records.get(i).project);
} catch (Exception e) {
@ -342,6 +346,7 @@ public abstract class ProjectManager {
/**
* Gets the InterProjectModel from memory
*/
@JsonIgnore
public InterProjectModel getInterProjectModel() {
return _interProjectModel;
}
@ -349,14 +354,14 @@ public abstract class ProjectManager {
/**
* Gets the project metadata from memory
* Requires that the metadata has already been loaded from the data store.
* Requires that the metadata has already been loaded from the data store
* @param id
* @return
*/
public ProjectMetadata getProjectMetadata(long id) {
return _projectsMetadata.get(id);
}
/**
* Gets the project metadata from memory
* Requires that the metadata has already been loaded from the data store
@ -366,7 +371,7 @@ public abstract class ProjectManager {
public ProjectMetadata getProjectMetadata(String name) {
for (ProjectMetadata pm : _projectsMetadata.values()) {
if (pm.getName().equals(name)) {
return pm;
return pm;
}
}
return null;
@ -394,7 +399,7 @@ public abstract class ProjectManager {
* @param placeHolderJsonObj
* @return
*/
private boolean isValidUserMetadataDefinition(JSONObject placeHolderJsonObj) {
private boolean isValidUserMetadataDefinition(ObjectNode placeHolderJsonObj) {
return (placeHolderJsonObj != null &&
placeHolderJsonObj.has("name") &&
placeHolderJsonObj.has("display"));
@ -405,51 +410,46 @@ public abstract class ProjectManager {
return;
// place holder
JSONArray userMetadataPreference = null;
ArrayNode userMetadataPreference = null;
// actual metadata for project
JSONArray jsonObjArray = metadata.getUserMetadata();
ArrayNode jsonObjArray = metadata.getUserMetadata();
initDisplay(jsonObjArray);
try {
String userMeta = (String)_preferenceStore.get(PreferenceStore.USER_METADATA_KEY);
if (userMeta == null)
return;
userMetadataPreference = new JSONArray(userMeta);
} catch (JSONException e1) {
logger.warn("wrong definition of userMetadata format. Please use form [{\"name\": \"client name\", \"display\":true}, {\"name\": \"progress\", \"display\":false}]");
logger.error(ExceptionUtils.getStackTrace(e1));
}
String userMeta = (String)_preferenceStore.get(PreferenceStore.USER_METADATA_KEY);
if (userMeta == null)
return;
userMetadataPreference = ParsingUtilities.mapper.createArrayNode();
for (int index = 0; index < userMetadataPreference.length(); index++) {
try {
boolean found = false;
JSONObject placeHolderJsonObj = userMetadataPreference.getJSONObject(index);
if (!isValidUserMetadataDefinition(placeHolderJsonObj)) {
logger.warn("Skipped invalid user metadata definition" + placeHolderJsonObj.toString());
continue;
}
for (int i = 0; i < jsonObjArray.length(); i++) {
JSONObject jsonObj = jsonObjArray.getJSONObject(i);
if (jsonObj.getString("name").equals(placeHolderJsonObj.getString("name"))) {
found = true;
jsonObj.put("display", placeHolderJsonObj.get("display"));
break;
}
}
if (!found) {
placeHolderJsonObj.put("value", "");
metadata.getUserMetadata().put(placeHolderJsonObj);
logger.info("Put the placeholder {} for project {}",
placeHolderJsonObj.getString("name"),
metadata.getName());
}
} catch (JSONException e) {
logger.warn("Exception when mergeEmptyUserMetadata",e);
for (int index = 0; index < userMetadataPreference.size(); index++) {
boolean found = false;
ObjectNode placeHolderJsonObj = (ObjectNode) userMetadataPreference.get(index);
if (!isValidUserMetadataDefinition(placeHolderJsonObj)) {
logger.warn("Skipped invalid user metadata definition" + placeHolderJsonObj.toString());
continue;
}
for (int i = 0; i < jsonObjArray.size(); i++) {
JsonNode jsonObj = jsonObjArray.get(i);
if (!(jsonObj instanceof ObjectNode)) {
continue;
}
ObjectNode node = (ObjectNode)jsonObj;
if (node.get("name").asText("").equals(placeHolderJsonObj.get("name").asText(""))) {
found = true;
node.put("display", placeHolderJsonObj.get("display"));
break;
}
}
if (!found) {
placeHolderJsonObj.put("value", "");
metadata.getUserMetadata().add(placeHolderJsonObj);
logger.info("Put the placeholder {} for project {}",
placeHolderJsonObj.get("name").asText(""),
metadata.getName());
}
}
}
@ -457,13 +457,11 @@ public abstract class ProjectManager {
* honor the meta data preference
* @param jsonObjArray
*/
private void initDisplay(JSONArray jsonObjArray) {
for (int index = 0; index < jsonObjArray.length(); index++) {
try {
JSONObject projectMetaJsonObj = jsonObjArray.getJSONObject(index);
private void initDisplay(ArrayNode jsonObjArray) {
for (int index = 0; index < jsonObjArray.size(); index++) {
if (jsonObjArray.get(index) instanceof ObjectNode) {
ObjectNode projectMetaJsonObj = (ObjectNode) jsonObjArray.get(index);
projectMetaJsonObj.put("display", false);
} catch (JSONException e) {
logger.error(ExceptionUtils.getStackTrace(e));
}
}
}
@ -472,7 +470,7 @@ public abstract class ProjectManager {
* Gets all the project Metadata currently held in memory.
* @return
*/
@JsonIgnore
public Map<Long, ProjectMetadata> getAllProjectMetadata() {
for(Project project : _projects.values()) {
mergeEmptyUserMetadata(project.getMetadata());
@ -483,14 +481,14 @@ public abstract class ProjectManager {
/**
* Gets all the project tags currently held in memory
*
*
* @return
*/
@JsonIgnore
public Map<String, Integer> getAllProjectTags() {
return _projectsTags;
}
/**
* Gets the required project from the data store
* If project does not already exist in memory, it is loaded from the data store
@ -517,6 +515,7 @@ public abstract class ProjectManager {
* Gets the preference store
* @return
*/
@JsonProperty("preferences")
public PreferenceStore getPreferenceStore() {
return _preferenceStore;
}
@ -525,6 +524,7 @@ public abstract class ProjectManager {
* Gets all expressions from the preference store
* @return
*/
@JsonIgnore
public List<String> getExpressions() {
return ((TopList) _preferenceStore.get("scripting.expressions")).getList();
}
@ -533,6 +533,7 @@ public abstract class ProjectManager {
* The history entry manager deals with changes
* @return manager for handling history
*/
@JsonIgnore
public abstract HistoryEntryManager getHistoryEntryManager();
@ -596,9 +597,8 @@ public abstract class ProjectManager {
*
* @param ps
*/
public static void preparePreferenceStore(PreferenceStore ps) {
static protected void preparePreferenceStore(PreferenceStore ps) {
ps.put("scripting.expressions", new TopList(s_expressionHistoryMax));
ps.put("scripting.starred-expressions", new TopList(Integer.MAX_VALUE));
}
}

View File

@ -0,0 +1,376 @@
/*
Copyright 2010, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.refine;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonView;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.util.JsonViews;
import com.google.refine.util.ParsingUtilities;
public class ProjectMetadata {
public final static String DEFAULT_FILE_NAME = "metadata.json";
public final static String TEMP_FILE_NAME = "metadata.temp.json";
public final static String OLD_FILE_NAME = "metadata.old.json";
@JsonProperty("created")
private final LocalDateTime _created;
@JsonProperty("modified")
private LocalDateTime _modified;
@JsonIgnore
private LocalDateTime written = null;
@JsonProperty("name")
private String _name = "";
@JsonProperty("password")
@JsonView(JsonViews.SaveMode.class)
private String _password = "";
@JsonProperty("encoding")
@JsonView(JsonViews.SaveMode.class)
private String _encoding = "";
@JsonProperty("encodingConfidence")
@JsonView(JsonViews.SaveMode.class)
private int _encodingConfidence;
@JsonProperty("tags")
private String[] _tags = new String[0];
@JsonProperty("creator")
private String _creator = "";
@JsonProperty("contributors")
private String _contributors = "";
@JsonProperty("subject")
private String _subject = ""; // Several refine projects may be linked
@JsonProperty("description")
private String _description = ""; // free form of comment
@JsonProperty("rowCount")
private int _rowCount; // at the creation. Essential for cleaning old projects too heavy
@JsonProperty("title")
private String _title = "";
@JsonProperty("version")
private String _version = "";
@JsonProperty("license")
private String license = "";
@JsonProperty("homepage")
private String homepage = "";
@JsonProperty("image")
private String image = "";
// import options is an array for 1-n data sources
@JsonProperty("importOptionMetadata")
private ArrayNode _importOptionMetadata = ParsingUtilities.mapper.createArrayNode();
// user metadata
@JsonIgnore
private ArrayNode _userMetadata = ParsingUtilities.mapper.createArrayNode();
@JsonProperty("customMetadata")
private Map<String, Serializable> _customMetadata = new HashMap<String, Serializable>();
@JsonProperty("preferences")
@JsonView(JsonViews.SaveMode.class)
private PreferenceStore _preferenceStore = new PreferenceStore();
private final static Logger logger = LoggerFactory.getLogger("project_metadata");
protected ProjectMetadata(LocalDateTime date) {
_created = date;
preparePreferenceStore(_preferenceStore);
}
public ProjectMetadata() {
this(LocalDateTime.now());
_modified = _created;
}
public ProjectMetadata(LocalDateTime created, LocalDateTime modified, String name) {
this(created);
_modified = modified;
_name = name;
}
@JsonIgnore
public boolean isDirty() {
return written == null || _modified.isAfter(written);
}
static protected void preparePreferenceStore(PreferenceStore ps) {
ProjectManager.preparePreferenceStore(ps);
// Any project specific preferences?
}
@JsonIgnore
public LocalDateTime getCreated() {
return _created;
}
@JsonIgnore
public void setName(String name) {
this._name = name;
updateModified();
}
@JsonIgnore
public String getName() {
return _name;
}
@JsonIgnore
public void setEncoding(String encoding) {
this._encoding = encoding;
updateModified();
}
@JsonIgnore
public String getEncoding() {
return _encoding;
}
@JsonIgnore
public void setEncodingConfidence(int confidence) {
this._encodingConfidence = confidence;
updateModified();
}
@JsonIgnore
public void setEncodingConfidence(String confidence) {
if (confidence != null) {
this.setEncodingConfidence(Integer.parseInt(confidence));
}
}
@JsonIgnore
public int getEncodingConfidence() {
return _encodingConfidence;
}
@JsonIgnore
public void setTags(String[] tags) {
if (tags != null) {
List<String> tmpTags = new ArrayList<String>(tags.length);
for (String tag : tags) {
if (tag != null) {
String trimmedTag = tag.trim();
if (!trimmedTag.isEmpty()) {
tmpTags.add(trimmedTag);
}
}
}
this._tags = tmpTags.toArray(new String[tmpTags.size()]);
} else {
this._tags = tags;
}
updateModified();
}
@JsonIgnore
public String[] getTags() {
if (_tags == null) this._tags = new String[0];
return _tags;
}
@JsonIgnore
public void setPassword(String password) {
this._password = password;
updateModified();
}
@JsonIgnore
public String getPassword() {
return _password;
}
@JsonIgnore
public LocalDateTime getModified() {
return _modified;
}
@JsonIgnore
public void updateModified() {
_modified = LocalDateTime.now();
}
@JsonIgnore
public PreferenceStore getPreferenceStore() {
return _preferenceStore;
}
@JsonIgnore
public Serializable getCustomMetadata(String key) {
return _customMetadata.get(key);
}
public void setCustomMetadata(String key, Serializable value) {
if (value == null) {
_customMetadata.remove(key);
} else {
_customMetadata.put(key, value);
}
updateModified();
}
@JsonIgnore
public ArrayNode getImportOptionMetadata() {
return _importOptionMetadata;
}
@JsonIgnore
public void setImportOptionMetadata(ArrayNode jsonArray) {
_importOptionMetadata = jsonArray;
updateModified();
}
public void appendImportOptionMetadata(ObjectNode options) {
_importOptionMetadata.add(options);
updateModified();
}
@JsonIgnore
public String getCreator() {
return _creator;
}
@JsonIgnore
public void setCreator(String creator) {
this._creator = creator;
updateModified();
}
@JsonIgnore
public String getContributors() {
return _contributors;
}
@JsonIgnore
public void setContributors(String contributors) {
this._contributors = contributors;
updateModified();
}
@JsonIgnore
public String getSubject() {
return _subject;
}
@JsonIgnore
public void setSubject(String subject) {
this._subject = subject;
updateModified();
}
@JsonIgnore
public String getDescription() {
return _description;
}
@JsonIgnore
public void setDescription(String description) {
this._description = description;
updateModified();
}
@JsonIgnore
public int getRowCount() {
return _rowCount;
}
@JsonIgnore
public void setRowCount(int rowCount) {
this._rowCount = rowCount;
updateModified();
}
@JsonIgnore
public ArrayNode getUserMetadata() {
return _userMetadata;
}
@JsonProperty("userMetadata")
@JsonInclude(Include.NON_NULL)
public ArrayNode getUserMetadataJson() {
if (_userMetadata != null && _userMetadata.size() > 0) {
return _userMetadata;
}
return null;
}
@JsonIgnore
public void setUserMetadata(ArrayNode userMetadata) {
this._userMetadata = userMetadata;
}
private void updateUserMetadata(String metaName, String valueString) {
for (int i = 0; i < _userMetadata.size(); i++) {
ObjectNode obj = (ObjectNode)_userMetadata.get(i);
if (obj.get("name").asText("").equals(metaName)) {
obj.put("value", valueString);
}
}
}
public void setAnyField(String metaName, String valueString) {
Class<? extends ProjectMetadata> metaClass = this.getClass();
try {
Field metaField = metaClass.getDeclaredField("_" + metaName);
if (metaName.equals("tags")) {
metaField.set(this, valueString.split(","));
} else {
metaField.set(this, valueString);
}
} catch (NoSuchFieldException e) {
updateUserMetadata(metaName, valueString);
} catch (SecurityException | IllegalArgumentException | IllegalAccessException e) {
logger.error(ExceptionUtils.getFullStackTrace(e));
}
}
}

View File

@ -34,12 +34,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.browsing;
import java.time.OffsetDateTime;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONWriter;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.util.StringUtils;
/**
@ -49,11 +46,18 @@ import com.google.refine.util.StringUtils;
*
* Facet choices that are presented to the user as text are stored as decorated values.
*/
public class DecoratedValue implements Jsonizable {
public class DecoratedValue {
@JsonProperty("v")
final public Object value;
@JsonProperty("l")
final public String label;
public DecoratedValue(Object value, String label) {
@JsonCreator
public DecoratedValue(
@JsonProperty("v")
Object value,
@JsonProperty("l")
String label) {
if (value instanceof OffsetDateTime) {
this.value = StringUtils.toString(value);
} else {
@ -61,13 +65,4 @@ public class DecoratedValue implements Jsonizable {
}
this.label = label;
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("v"); writer.value(value);
writer.key("l"); writer.value(label);
writer.endObject();
}
}

View File

@ -36,14 +36,10 @@ package com.google.refine.browsing;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.facets.Facet;
import com.google.refine.browsing.util.ConjunctiveFilteredRecords;
import com.google.refine.browsing.util.ConjunctiveFilteredRows;
@ -54,10 +50,13 @@ import com.google.refine.model.Row;
/**
* Faceted browsing engine.
*/
public class Engine implements Jsonizable {
public class Engine {
static public enum Mode {
@JsonProperty("row-based")
RowBased,
@JsonProperty("record-based")
RecordBased
}
public final static String INCLUDE_DEPENDENT = "includeDependent";
@ -65,8 +64,11 @@ public class Engine implements Jsonizable {
public final static String MODE_ROW_BASED = "row-based";
public final static String MODE_RECORD_BASED = "record-based";
@JsonIgnore
protected Project _project;
@JsonProperty("facets")
protected List<Facet> _facets = new LinkedList<Facet>();
@JsonIgnore
protected EngineConfig _config = new EngineConfig(Collections.emptyList(), Mode.RowBased);
static public String modeToString(Mode mode) {
@ -80,6 +82,7 @@ public class Engine implements Jsonizable {
_project = project;
}
@JsonProperty("engine-mode")
public Mode getMode() {
return _config.getMode();
}
@ -87,6 +90,7 @@ public class Engine implements Jsonizable {
_config = new EngineConfig(_config.getFacetConfigs(), mode);
}
@JsonIgnore
public FilteredRows getAllRows() {
return new FilteredRows() {
@Override
@ -108,6 +112,7 @@ public class Engine implements Jsonizable {
};
}
@JsonIgnore
public FilteredRows getAllFilteredRows() {
return getFilteredRows(null);
}
@ -130,6 +135,7 @@ public class Engine implements Jsonizable {
throw new InternalError("Unknown mode.");
}
@JsonIgnore
public FilteredRecords getAllRecords() {
return new FilteredRecords() {
@Override
@ -148,6 +154,7 @@ public class Engine implements Jsonizable {
};
}
@JsonIgnore
public FilteredRecords getFilteredRecords() {
return getFilteredRecords(null);
}
@ -167,12 +174,6 @@ public class Engine implements Jsonizable {
}
throw new InternalError("This method should not be called when the engine is not in record mode.");
}
@Deprecated
public void initializeFromJSON(JSONObject o) throws JSONException {
EngineConfig config = EngineConfig.reconstruct(o);
initializeFromConfig(config);
}
public void initializeFromConfig(EngineConfig config) {
_config = config;
@ -181,7 +182,7 @@ public class Engine implements Jsonizable {
.collect(Collectors.toList());
}
public void computeFacets() throws JSONException {
public void computeFacets() {
if (_config.getMode().equals(Mode.RowBased)) {
for (Facet facet : _facets) {
FilteredRows filteredRows = getFilteredRows(facet);
@ -198,19 +199,4 @@ public class Engine implements Jsonizable {
throw new InternalError("Unknown mode.");
}
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("facets");
writer.array();
for (Facet facet : _facets) {
facet.write(writer, options);
}
writer.endArray();
writer.key(MODE); writer.value(_config.getMode().equals(Mode.RowBased) ? MODE_ROW_BASED : MODE_RECORD_BASED);
writer.endObject();
}
}

View File

@ -1,102 +1,50 @@
package com.google.refine.browsing;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.Engine.Mode;
import com.google.refine.browsing.facets.FacetConfig;
import com.google.refine.browsing.facets.ListFacet.ListFacetConfig;
import com.google.refine.browsing.facets.RangeFacet.RangeFacetConfig;
import com.google.refine.browsing.facets.ScatterplotFacet.ScatterplotFacetConfig;
import com.google.refine.browsing.facets.TextSearchFacet.TextSearchFacetConfig;
import com.google.refine.browsing.facets.TimeRangeFacet.TimeRangeFacetConfig;
import com.google.refine.util.ParsingUtilities;
public class EngineConfig implements Jsonizable {
public class EngineConfig {
protected final List<FacetConfig> _facets;
protected final Mode _mode;
public EngineConfig(List<FacetConfig> facets, Mode mode) {
_facets = facets;
_mode = mode;
@JsonCreator
public EngineConfig(
@JsonProperty("facets")
List<FacetConfig> facets,
@JsonProperty("mode")
Mode mode) {
_facets = facets == null ? Collections.emptyList() : facets;
_mode = mode == null ? Mode.RowBased : mode;
}
@JsonProperty("mode")
public Mode getMode() {
return _mode;
}
@JsonProperty("facets")
public List<FacetConfig> getFacetConfigs() {
return _facets;
}
public static EngineConfig reconstruct(JSONObject o) {
if (o == null) {
public static EngineConfig reconstruct(String json) {
if(json == null) {
return new EngineConfig(Collections.emptyList(), Mode.RowBased);
}
List<FacetConfig> facets = new LinkedList<>();
if (o.has("facets") && !o.isNull("facets")) {
JSONArray a = o.getJSONArray("facets");
int length = a.length();
for (int i = 0; i < length; i++) {
JSONObject fo = a.getJSONObject(i);
String type = fo.has("type") ? fo.getString("type") : "list";
FacetConfig facet = null;
if ("list".equals(type)) {
facet = new ListFacetConfig();
} else if ("range".equals(type)) {
facet = new RangeFacetConfig();
} else if ("timerange".equals(type)) {
facet = new TimeRangeFacetConfig();
} else if ("scatterplot".equals(type)) {
facet = new ScatterplotFacetConfig();
} else if ("text".equals(type)) {
facet = new TextSearchFacetConfig();
}
if (facet != null) {
facet.initializeFromJSON(fo);
facets.add(facet);
}
}
try {
return ParsingUtilities.mapper.readValue(json, EngineConfig.class);
} catch (IOException e) {
e.printStackTrace();
return null;
}
Mode mode = Mode.RowBased;
// for backward compatibility
if (o.has(Engine.INCLUDE_DEPENDENT) && !o.isNull(Engine.INCLUDE_DEPENDENT)) {
mode = o.getBoolean(Engine.INCLUDE_DEPENDENT) ? Mode.RecordBased : Mode.RowBased;
}
if (o.has(Engine.MODE) && !o.isNull(Engine.MODE)) {
mode = Engine.MODE_ROW_BASED.equals(o.getString(Engine.MODE)) ? Mode.RowBased : Mode.RecordBased;
}
return new EngineConfig(facets, mode);
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("facets");
writer.array();
for (FacetConfig facet : _facets) {
facet.write(writer, options);
}
writer.endArray();
writer.key(Engine.MODE); writer.value(_mode == Mode.RowBased ? Engine.MODE_ROW_BASED : Engine.MODE_RECORD_BASED);
writer.endObject();
}
}

View File

@ -33,7 +33,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.browsing.facets;
import com.google.refine.Jsonizable;
import com.google.refine.browsing.FilteredRecords;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RecordFilter;
@ -43,7 +42,7 @@ import com.google.refine.model.Project;
/**
* Interface of facets.
*/
public interface Facet extends Jsonizable {
public interface Facet {
public RowFilter getRowFilter(Project project);
public RecordFilter getRecordFilter(Project project);

View File

@ -1,8 +1,9 @@
package com.google.refine.browsing.facets;
import org.json.JSONObject;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.refine.model.Project;
@ -12,20 +13,30 @@ import com.google.refine.model.Project;
* of operations. It does not contain the actual values displayed by
* the facet.
*
* @author antonin
* @author Antonin Delpeuch
*
*/
public interface FacetConfig extends Jsonizable {
/**
* Reads the facet configuration from a JSON object (will be removed once we migrate to Jackson)
* @param fo
*/
public void initializeFromJSON(JSONObject fo);
@JsonTypeInfo(
use=JsonTypeInfo.Id.NAME,
include=JsonTypeInfo.As.PROPERTY,
property="type")
@JsonSubTypes({
@Type(value = ListFacet.ListFacetConfig.class, name = "list"),
@Type(value = RangeFacet.RangeFacetConfig.class, name = "range"),
@Type(value = TimeRangeFacet.TimeRangeFacetConfig.class, name = "timerange"),
@Type(value = TextSearchFacet.TextSearchFacetConfig.class, name = "text"),
@Type(value = ScatterplotFacet.ScatterplotFacetConfig.class, name = "scatterplot") })
public interface FacetConfig {
/**
* Instantiates the given facet on a particular project.
* @param project
* @return a computed facet on the given project.
*/
public Facet apply(Project project);
/**
* The facet type as stored in json.
*/
@JsonProperty("type")
public String getJsonType();
}

View File

@ -0,0 +1,35 @@
package com.google.refine.browsing.facets;
import java.io.IOException;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.databind.DatabindContext;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.google.refine.model.recon.ReconConfig;
public class FacetConfigResolver extends TypeIdResolverBase {
protected TypeFactory factory = TypeFactory.defaultInstance();
@Override
public Id getMechanism() {
return Id.NAME;
}
@Override
public String idFromValue(Object instance) {
return ((ReconConfig)instance).getMode();
}
@Override
public String idFromValueAndType(Object instance, Class<?> type) {
return ReconConfig.s_opClassToName.get(type);
}
@Override
public JavaType typeFromId(DatabindContext context, String id) throws IOException {
return factory.constructSimpleType(ReconConfig.getClassFromMode(id), new JavaType[0]);
}
}

View File

@ -35,13 +35,13 @@ package com.google.refine.browsing.facets;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.ProjectManager;
import com.google.refine.browsing.DecoratedValue;
import com.google.refine.browsing.FilteredRecords;
@ -57,84 +57,67 @@ import com.google.refine.expr.MetaParser;
import com.google.refine.expr.ParsingException;
import com.google.refine.model.Column;
import com.google.refine.model.Project;
import com.google.refine.util.JSONUtilities;
public class ListFacet implements Facet {
public static final String ERR_TOO_MANY_CHOICES = "Too many choices";
/**
* Wrapper to respect the serialization format
*/
public static class DecoratedValueWrapper {
@JsonProperty("v")
public final DecoratedValue value;
@JsonCreator
public DecoratedValueWrapper(
@JsonProperty("v") DecoratedValue value) {
this.value = value;
}
}
/*
* Configuration
*/
public static class ListFacetConfig implements FacetConfig {
@JsonProperty("name")
public String name;
@JsonProperty("expression")
public String expression;
@JsonProperty("columnName")
public String columnName;
@JsonProperty("invert")
public boolean invert;
// If true, then facet won't show the blank and error choices
@JsonProperty("omitBlank")
public boolean omitBlank;
@JsonProperty("omitError")
public boolean omitError;
@JsonIgnore
public List<DecoratedValue> selection = new LinkedList<>();
@JsonProperty("selectNumber")
public boolean selectNumber;
@JsonProperty("selectDateTime")
public boolean selectDateTime;
@JsonProperty("selectBoolean")
public boolean selectBoolean;
@JsonProperty("selectBlank")
public boolean selectBlank;
@JsonProperty("selectError")
public boolean selectError;
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("type"); writer.value("list");
writer.key("name"); writer.value(name);
writer.key("expression"); writer.value(expression);
writer.key("columnName"); writer.value(columnName);
writer.key("invert"); writer.value(invert);
writer.key("selection"); writer.array();
for (DecoratedValue choice : selection) {
writer.object();
writer.key("v");
choice.write(writer, options);
writer.endObject();
}
writer.endArray();
writer.key("selectNumber"); writer.value(selectNumber);
writer.key("selectDateTime"); writer.value(selectDateTime);
writer.key("selectBoolean"); writer.value(selectBoolean);
writer.key("omitBlank"); writer.value(omitBlank);
writer.key("selectBlank"); writer.value(selectBlank);
writer.key("omitError"); writer.value(omitError);
writer.key("selectError"); writer.value(selectError);
writer.endObject();
@JsonProperty("selection")
public List<DecoratedValueWrapper> getWrappedSelection() {
return selection.stream()
.map(e -> new DecoratedValueWrapper(e))
.collect(Collectors.toList());
}
@Override
public void initializeFromJSON(JSONObject o) {
name = o.getString("name");
expression = o.getString("expression");
columnName = o.getString("columnName");
invert = o.has("invert") && o.getBoolean("invert");
JSONArray a = o.getJSONArray("selection");
int length = a.length();
for (int i = 0; i < length; i++) {
JSONObject oc = a.getJSONObject(i);
JSONObject ocv = oc.getJSONObject("v");
DecoratedValue decoratedValue = new DecoratedValue(
ocv.get("v"), ocv.getString("l"));
selection.add(decoratedValue);
}
omitBlank = JSONUtilities.getBoolean(o, "omitBlank", false);
omitError = JSONUtilities.getBoolean(o, "omitError", false);
selectNumber = JSONUtilities.getBoolean(o, "selectNumber", false);
selectDateTime = JSONUtilities.getBoolean(o, "selectDateTime", false);
selectBoolean = JSONUtilities.getBoolean(o, "selectBoolean", false);
selectBlank = JSONUtilities.getBoolean(o, "selectBlank", false);
selectError = JSONUtilities.getBoolean(o, "selectError", false);
@JsonProperty("selection")
public void setSelection(List<DecoratedValueWrapper> wrapped) {
selection = wrapped.stream()
.map(e -> e.value)
.collect(Collectors.toList());
}
@Override
@ -143,6 +126,27 @@ public class ListFacet implements Facet {
facet.initializeFromConfig(this, project);
return facet;
}
@Override
public String getJsonType() {
return "list";
}
}
/**
* Wrapper class for choice counts and selection status for blank and error
*/
public static class OtherChoice {
@JsonProperty("s")
boolean selected;
@JsonProperty("c")
int count;
public OtherChoice(
@JsonProperty("s") boolean selected,
@JsonProperty("c") int count) {
this.selected = selected;
this.count = count;
}
}
ListFacetConfig _config = new ListFacetConfig();
@ -166,66 +170,70 @@ public class ListFacet implements Facet {
public ListFacet() {
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("name"); writer.value(_config.name);
writer.key("expression"); writer.value(_config.expression);
writer.key("columnName"); writer.value(_config.columnName);
writer.key("invert"); writer.value(_config.invert);
if (_errorMessage != null) {
writer.key("error"); writer.value(_errorMessage);
} else if (_choices.size() > getLimit()) {
writer.key("error"); writer.value("Too many choices");
writer.key("choiceCount"); writer.value(_choices.size());
} else {
writer.key("choices"); writer.array();
for (NominalFacetChoice choice : _choices) {
choice.write(writer, options);
}
writer.endArray();
if (_config.selectNumber || _numberCount > 0) {
writer.key("numberChoice");
writer.object();
writer.key("s"); writer.value(_config.selectNumber);
writer.key("c"); writer.value(_numberCount);
writer.endObject();
}
if (_config.selectDateTime || _datetimeCount > 0) {
writer.key("datetimeChoice");
writer.object();
writer.key("s"); writer.value(_config.selectDateTime);
writer.key("c"); writer.value(_datetimeCount);
writer.endObject();
}
if (_config.selectBoolean || _booleanCount > 0) {
writer.key("booleanChoice");
writer.object();
writer.key("s"); writer.value(_config.selectBoolean);
writer.key("c"); writer.value(_booleanCount);
writer.endObject();
}
if (!_config.omitBlank && (_config.selectBlank || _blankCount > 0)) {
writer.key("blankChoice");
writer.object();
writer.key("s"); writer.value(_config.selectBlank);
writer.key("c"); writer.value(_blankCount);
writer.endObject();
}
if (!_config.omitError && (_config.selectError || _errorCount > 0)) {
writer.key("errorChoice");
writer.object();
writer.key("s"); writer.value(_config.selectError);
writer.key("c"); writer.value(_errorCount);
writer.endObject();
}
@JsonProperty("name")
public String getName() {
return _config.name;
}
@JsonProperty("columnName")
public String getColumnName() {
return _config.columnName;
}
@JsonProperty("expression")
public String getExpression() {
return _config.expression;
}
@JsonProperty("invert")
public boolean getInvert() {
return _config.invert;
}
@JsonProperty("error")
@JsonInclude(Include.NON_NULL)
public String getError() {
if (_errorMessage == null && _choices.size() > getLimit()) {
return ERR_TOO_MANY_CHOICES;
}
writer.endObject();
return _errorMessage;
}
@JsonProperty("choiceCount")
@JsonInclude(Include.NON_NULL)
public Integer getChoiceCount() {
if (_errorMessage == null && _choices.size() > getLimit()) {
return _choices.size();
}
return null;
}
@JsonProperty("choices")
@JsonInclude(Include.NON_NULL)
public List<NominalFacetChoice> getChoices() {
if (getError() == null) {
return _choices;
}
return null;
}
@JsonProperty("blankChoice")
@JsonInclude(Include.NON_NULL)
public OtherChoice getBlankChoice() {
if (getError() == null && !_config.omitBlank && (_config.selectBlank || _blankCount > 0)) {
return new OtherChoice(_config.selectBlank, _blankCount);
}
return null;
}
@JsonProperty("errorChoice")
@JsonInclude(Include.NON_NULL)
public OtherChoice getErrorChoice() {
if (getError() == null && !_config.omitError && (_config.selectError || _errorCount > 0)) {
return new OtherChoice(_config.selectError, _errorCount);
}
return null;
}
protected int getLimit() {
@ -359,4 +367,4 @@ public class ListFacet implements Facet {
}
return a;
}
}
}

View File

@ -33,34 +33,22 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.browsing.facets;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONWriter;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.DecoratedValue;
/**
* Store a facet choice that has a decorated value, a count of matched rows,
* and a flag of whether it has been selected.
*/
public class NominalFacetChoice implements Jsonizable {
public class NominalFacetChoice {
@JsonProperty("v")
final public DecoratedValue decoratedValue;
@JsonProperty("c")
public int count;
@JsonProperty("s")
public boolean selected;
public NominalFacetChoice(DecoratedValue decoratedValue) {
this.decoratedValue = decoratedValue;
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("v"); decoratedValue.write(writer, options);
writer.key("c"); writer.value(count);
writer.key("s"); writer.value(selected);
writer.endObject();
}
}

View File

@ -33,12 +33,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.browsing.facets;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.FilteredRecords;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RecordFilter;
@ -56,64 +55,70 @@ import com.google.refine.expr.MetaParser;
import com.google.refine.expr.ParsingException;
import com.google.refine.model.Column;
import com.google.refine.model.Project;
import com.google.refine.util.JSONUtilities;
public class RangeFacet implements Facet {
public static final String ERR_NO_NUMERIC_VALUE_PRESENT = "No numeric value present.";
/*
* Configuration, from the client side
*/
public static class RangeFacetConfig implements FacetConfig {
@JsonProperty("name")
protected String _name; // name of facet
@JsonProperty("expression")
protected String _expression; // expression to compute numeric value(s) per row
@JsonProperty("columnName")
protected String _columnName; // column to base expression on, if any
@JsonProperty(FROM)
protected double _from; // the numeric selection
@JsonProperty(TO)
protected double _to;
@JsonProperty("selectNumeric")
protected boolean _selectNumeric; // whether the numeric selection applies, default true
@JsonProperty("selectNonNumeric")
protected boolean _selectNonNumeric;
@JsonProperty("selectBlank")
protected boolean _selectBlank;
@JsonProperty("selectError")
protected boolean _selectError;
@JsonIgnore
protected boolean _selected; // false if we're certain that all rows will match
// and there isn't any filtering to do
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("type"); writer.value("range");
writer.key("name"); writer.value(_name);
writer.key("expression"); writer.value(_expression);
writer.key("columnName"); writer.value(_columnName);
writer.key(FROM); writer.value(_from);
writer.key(TO); writer.value(_to);
writer.key("selectNumeric"); writer.value(_selectNumeric);
writer.key("selectNonNumeric"); writer.value(_selectNonNumeric);
writer.key("selectError"); writer.value(_selectError);
writer.key("selectBlank"); writer.value(_selectBlank);
writer.endObject();
}
@Override
public void initializeFromJSON(JSONObject o) {
_name = o.getString("name");
_expression = o.getString("expression");
_columnName = o.getString("columnName");
if (o.has(FROM) || o.has(TO)) {
_from = o.has(FROM) ? o.getDouble(FROM) : 0;
_to = o.has(TO) ? o.getDouble(TO) : 0;
_selected = true;
}
_selectNumeric = JSONUtilities.getBoolean(o, "selectNumeric", true);
_selectNonNumeric = JSONUtilities.getBoolean(o, "selectNonNumeric", true);
_selectBlank = JSONUtilities.getBoolean(o, "selectBlank", true);
_selectError = JSONUtilities.getBoolean(o, "selectError", true);
if (!_selectNumeric || !_selectNonNumeric || !_selectBlank || !_selectError) {
_selected = true;
}
@JsonCreator
public RangeFacetConfig(
@JsonProperty("name")
String name,
@JsonProperty("expression")
String expression,
@JsonProperty("columnName")
String columnName,
@JsonProperty(FROM)
Double from,
@JsonProperty(TO)
Double to,
@JsonProperty("selectNumeric")
Boolean selectNumeric,
@JsonProperty("selectNonNumeric")
Boolean selectNonNumeric,
@JsonProperty("selectBlank")
Boolean selectBlank,
@JsonProperty("selectError")
Boolean selectError) {
_name = name;
_expression = expression;
_columnName = columnName;
_from = from == null ? 0 : from;
_to = to == null ? 0 : to;
_selectNumeric = selectNumeric == null ? true : selectNumeric;
_selectNonNumeric = selectNonNumeric == null ? true : selectNonNumeric;
_selectBlank = selectBlank == null ? true : selectBlank;
_selectError = selectError == null ? true : selectError;
_selected = !_selectNumeric || !_selectNonNumeric || !_selectBlank || !_selectError || from != null || to != null;
}
@Override
@ -122,8 +127,13 @@ public class RangeFacet implements Facet {
facet.initializeFromConfig(this, project);
return facet;
}
@Override
public String getJsonType() {
return "range";
}
}
RangeFacetConfig _config = new RangeFacetConfig();
RangeFacetConfig _config = null;
/*
* Derived configuration data
@ -141,16 +151,24 @@ public class RangeFacet implements Facet {
protected int[] _baseBins;
protected int[] _bins;
@JsonProperty("baseNumericCount")
protected int _baseNumericCount;
@JsonProperty("baseNonNumericCount")
protected int _baseNonNumericCount;
@JsonProperty("baseBlankCount")
protected int _baseBlankCount;
@JsonProperty("baseErrorCount")
protected int _baseErrorCount;
@JsonProperty("numericCount")
protected int _numericCount;
@JsonProperty("nonNumericCount")
protected int _nonNumericCount;
@JsonProperty("blankCount")
protected int _blankCount;
@JsonProperty("errorCount")
protected int _errorCount;
public RangeFacet() {
}
@ -159,54 +177,100 @@ public class RangeFacet implements Facet {
protected static final String TO = "to";
protected static final String FROM = "from";
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("name"); writer.value(_config._name);
writer.key("expression"); writer.value(_config._expression);
writer.key("columnName"); writer.value(_config._columnName);
if (_errorMessage != null) {
writer.key("error"); writer.value(_errorMessage);
} else {
if (!Double.isInfinite(_min) && !Double.isInfinite(_max)) {
writer.key(MIN); writer.value(_min);
writer.key(MAX); writer.value(_max);
writer.key("step"); writer.value(_step);
writer.key("bins"); writer.array();
for (int b : _bins) {
writer.value(b);
}
writer.endArray();
writer.key("baseBins"); writer.array();
for (int b : _baseBins) {
writer.value(b);
}
writer.endArray();
writer.key(FROM); writer.value(_config._from);
writer.key(TO); writer.value(_config._to);
} else {
writer.key("error"); writer.value("No numeric value present.");
}
writer.key("baseNumericCount"); writer.value(_baseNumericCount);
writer.key("baseNonNumericCount"); writer.value(_baseNonNumericCount);
writer.key("baseBlankCount"); writer.value(_baseBlankCount);
writer.key("baseErrorCount"); writer.value(_baseErrorCount);
writer.key("numericCount"); writer.value(_numericCount);
writer.key("nonNumericCount"); writer.value(_nonNumericCount);
writer.key("blankCount"); writer.value(_blankCount);
writer.key("errorCount"); writer.value(_errorCount);
}
writer.endObject();
@JsonProperty("name")
public String getName() {
return _config._name;
}
@JsonProperty("expression")
public String getExpression() {
return _config._expression;
}
@JsonProperty("columnName")
public String getColumnName() {
return _config._columnName;
}
@JsonProperty("error")
@JsonInclude(Include.NON_NULL)
public String getError() {
if (_errorMessage != null) {
return _errorMessage;
} else if (!isFiniteRange()) {
return ERR_NO_NUMERIC_VALUE_PRESENT;
}
return null;
}
@JsonIgnore
public boolean isFiniteRange() {
return !Double.isInfinite(_min) && !Double.isInfinite(_max);
}
@JsonProperty(MIN)
@JsonInclude(Include.NON_NULL)
public Double getMin() {
if (getError() == null) {
return _min;
}
return null;
}
@JsonProperty(MAX)
@JsonInclude(Include.NON_NULL)
public Double getMax() {
if (getError() == null) {
return _max;
}
return null;
}
@JsonProperty("step")
@JsonInclude(Include.NON_NULL)
public Double getStep() {
if (getError() == null) {
return _step;
}
return null;
}
@JsonProperty("bins")
@JsonInclude(Include.NON_NULL)
public int[] getBins() {
if (getError() == null) {
return _bins;
}
return null;
}
@JsonProperty("baseBins")
@JsonInclude(Include.NON_NULL)
public int[] getBaseBins() {
if (getError() == null) {
return _baseBins;
}
return null;
}
@JsonProperty(FROM)
@JsonInclude(Include.NON_NULL)
public Double getFrom() {
if (getError() == null) {
return _config._from;
}
return null;
}
@JsonProperty(TO)
@JsonInclude(Include.NON_NULL)
public Double getTo() {
if (getError() == null) {
return _config._to;
}
return null;
}
public void initializeFromConfig(RangeFacetConfig config, Project project) {
_config = config;

View File

@ -40,17 +40,17 @@ import java.awt.image.BufferedImage;
import java.awt.image.RenderedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Properties;
import javax.imageio.ImageIO;
import org.apache.commons.codec.binary.Base64;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.FilteredRecords;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RecordFilter;
@ -80,61 +80,64 @@ public class ScatterplotFacet implements Facet {
* Configuration, from the client side
*/
public static class ScatterplotFacetConfig implements FacetConfig {
@JsonProperty("name")
protected String name; // name of facet
@JsonProperty(X_EXPRESSION)
protected String expression_x; // expression to compute the x numeric value(s) per row
@JsonProperty(Y_EXPRESSION)
protected String expression_y; // expression to compute the y numeric value(s) per row
@JsonProperty(X_COLUMN_NAME)
protected String columnName_x; // column to base the x expression on, if any
@JsonProperty(Y_COLUMN_NAME)
protected String columnName_y; // column to base the y expression on, if any
@JsonProperty(SIZE)
protected int size;
@JsonIgnore
protected int dim_x;
@JsonIgnore
protected int dim_y;
@JsonIgnore
protected String rotation_str;
@JsonIgnore
protected int rotation;
@JsonIgnore
protected double l;
@JsonProperty(DOT)
protected double dot;
protected String color_str;
protected Color color;
@JsonIgnore
protected String color_str = "000000";
@JsonIgnore
protected Color getColor() {
return new Color(Integer.parseInt(color_str,16));
}
@JsonProperty(FROM_X)
protected double from_x; // the numeric selection for the x axis, from 0 to 1
@JsonProperty(TO_X)
protected double to_x;
@JsonProperty(FROM_Y)
protected double from_y; // the numeric selection for the y axis, from 0 to 1
@JsonProperty(TO_Y)
protected double to_y;
protected boolean selected; // false if we're certain that all rows will match
// false if we're certain that all rows will match
// and there isn't any filtering to do
protected boolean isSelected() {
return from_x > 0 || to_x < 1 || from_y > 0 || to_y < 1;
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("type"); writer.value("scatterplot");
writer.key(NAME); writer.value(name);
writer.key(X_COLUMN_NAME); writer.value(columnName_x);
writer.key(X_EXPRESSION); writer.value(expression_x);
writer.key(Y_COLUMN_NAME); writer.value(columnName_y);
writer.key(Y_EXPRESSION); writer.value(expression_y);
writer.key(SIZE); writer.value(size);
writer.key(DOT); writer.value(dot);
if(!rotation_str.isEmpty()) {
writer.key(ROTATION); writer.value(rotation_str);
}
writer.key(DIM_X); writer.value(dim_x == LIN ? "lin" : "log");
writer.key(DIM_Y); writer.value(dim_y == LIN ? "lin" : "log");
if(!"000000".equals(color_str)) {
writer.key(COLOR); writer.value(color_str);
}
writer.key(FROM_X); writer.value(from_x);
writer.key(TO_X); writer.value(to_x);
writer.key(FROM_Y); writer.value(from_y);
writer.key(TO_Y); writer.value(to_y);
writer.endObject();
@JsonProperty(DIM_X)
public String getDimX() {
return dim_x == LIN ? "lin" : "log";
}
@JsonProperty(DIM_Y)
public String getDimY() {
return dim_y == LIN ? "lin" : "log";
}
@Override
@ -144,45 +147,6 @@ public class ScatterplotFacet implements Facet {
return facet;
}
@Override
public void initializeFromJSON(JSONObject o) {
name = o.getString(NAME);
l = size = (o.has(SIZE)) ? o.getInt(SIZE) : 100;
dot = (o.has(DOT)) ? o.getInt(DOT) : 0.5d;
dim_x = (o.has(DIM_X)) ? getAxisDim(o.getString(DIM_X)) : LIN;
if (o.has(FROM_X) && o.has(TO_X)) {
from_x = o.getDouble(FROM_X);
to_x = o.getDouble(TO_X);
selected = true;
} else {
from_x = 0;
to_x = 1;
}
dim_y = (o.has(DIM_Y)) ? getAxisDim(o.getString(DIM_Y)) : LIN;
if (o.has(FROM_Y) && o.has(TO_Y)) {
from_y = o.getDouble(FROM_Y);
to_y = o.getDouble(TO_Y);
selected = true;
} else {
from_y = 0;
to_y = 1;
}
rotation_str = (o.has(ROTATION) ? o.getString(ROTATION) : "");
rotation = getRotation(rotation_str);
color_str = (o.has(COLOR)) ? o.getString(COLOR) : "000000";
color = new Color(Integer.parseInt(color_str,16));
columnName_x = o.getString(X_COLUMN_NAME);
expression_x = o.getString(X_EXPRESSION);
columnName_y = o.getString(Y_COLUMN_NAME);
expression_y = o.getString(Y_EXPRESSION);
}
public static int getRotation(String rotation) {
rotation = rotation.toLowerCase();
if ("cw".equals(rotation) || "right".equals(rotation)) {
@ -193,6 +157,11 @@ public class ScatterplotFacet implements Facet {
return NO_ROTATION;
}
}
@Override
public String getJsonType() {
return "scatterplot";
}
}
ScatterplotFacetConfig config;
@ -255,46 +224,116 @@ public class ScatterplotFacet implements Facet {
}
}
@Override
public void write(JSONWriter writer, Properties options) throws JSONException {
writer.object();
writer.key(NAME); writer.value(config.name);
writer.key(X_COLUMN_NAME); writer.value(config.columnName_x);
writer.key(X_EXPRESSION); writer.value(config.expression_x);
writer.key(Y_COLUMN_NAME); writer.value(config.columnName_y);
writer.key(Y_EXPRESSION); writer.value(config.expression_y);
writer.key(SIZE); writer.value(config.size);
writer.key(DOT); writer.value(config.dot);
writer.key(ROTATION); writer.value(config.rotation);
writer.key(DIM_X); writer.value(config.dim_x);
writer.key(DIM_Y); writer.value(config.dim_y);
writer.key(COLOR); writer.value(config.color_str);
if (IMAGE_URI) {
writer.key(IMAGE); writer.value(image);
@JsonProperty(NAME)
public String getName() {
return config.name;
}
@JsonProperty(X_COLUMN_NAME)
public String getXColumnName() {
return config.columnName_x;
}
@JsonProperty(X_EXPRESSION)
public String getXExpression() {
return config.expression_x;
}
@JsonProperty(Y_COLUMN_NAME)
public String getYColumnName() {
return config.columnName_y;
}
@JsonProperty(Y_EXPRESSION)
public String getYExpression() {
return config.expression_y;
}
@JsonProperty(SIZE)
public int getSize() {
return config.size;
}
@JsonProperty(DIM_X)
public int getDimX() {
return config.dim_x;
}
@JsonProperty(DIM_Y)
public int getDimY() {
return config.dim_y;
}
@JsonProperty(DOT)
public double getDot() {
return config.dot;
}
@JsonProperty(ROTATION)
public double getRotation() {
return config.rotation;
}
@JsonProperty(COLOR)
public String getColorString() {
return config.color_str;
}
@JsonProperty(IMAGE)
@JsonInclude(Include.NON_NULL)
public String getImage() {
if(IMAGE_URI) {
return image;
}
if (errorMessage_x != null) {
writer.key(ERROR_X); writer.value(errorMessage_x);
} else {
if (!Double.isInfinite(min_x) && !Double.isInfinite(max_x)) {
writer.key(FROM_X); writer.value(config.from_x);
writer.key(TO_X); writer.value(config.to_x);
}
return null;
}
@JsonProperty(ERROR_X)
@JsonInclude(Include.NON_NULL)
public String getErrorX() {
return errorMessage_x;
}
@JsonProperty(FROM_X)
@JsonInclude(Include.NON_NULL)
public Double getFromX() {
if (errorMessage_x == null && !Double.isInfinite(min_x) && !Double.isInfinite(max_x)) {
return config.from_x;
}
if (errorMessage_y != null) {
writer.key(ERROR_Y); writer.value(errorMessage_y);
} else {
if (!Double.isInfinite(min_y) && !Double.isInfinite(max_y)) {
writer.key(FROM_Y); writer.value(config.from_y);
writer.key(TO_Y); writer.value(config.to_y);
}
return null;
}
@JsonProperty(TO_X)
@JsonInclude(Include.NON_NULL)
public Double getToX() {
if (errorMessage_x == null && !Double.isInfinite(min_x) && !Double.isInfinite(max_x)) {
return config.to_x;
}
writer.endObject();
return null;
}
@JsonProperty(ERROR_Y)
@JsonInclude(Include.NON_NULL)
public String getErrorY() {
return errorMessage_y;
}
@JsonProperty(FROM_Y)
@JsonInclude(Include.NON_NULL)
public Double getFromY() {
if (errorMessage_y == null && !Double.isInfinite(min_y) && !Double.isInfinite(max_y)) {
return config.from_y;
}
return null;
}
@JsonProperty(TO_Y)
@JsonInclude(Include.NON_NULL)
public Double getToY() {
if (errorMessage_y == null && !Double.isInfinite(min_y) && !Double.isInfinite(max_y)) {
return config.to_y;
}
return null;
}
public void initializeFromConfig(ScatterplotFacetConfig configuration, Project project) {
@ -348,7 +387,7 @@ public class ScatterplotFacet implements Facet {
@Override
public RowFilter getRowFilter(Project project) {
if (config.selected &&
if (config.isSelected() &&
eval_x != null && errorMessage_x == null &&
eval_y != null && errorMessage_y == null)
{
@ -393,7 +432,7 @@ public class ScatterplotFacet implements Facet {
if (index_x.isNumeric() && index_y.isNumeric()) {
ScatterplotDrawingRowVisitor drawer = new ScatterplotDrawingRowVisitor(
columnIndex_x, columnIndex_y, min_x, max_x, min_y, max_y,
config.size, config.dim_x, config.dim_y, config.rotation, config.dot, config.color
config.size, config.dim_x, config.dim_y, config.rotation, config.dot, config.getColor()
);
filteredRows.accept(project, drawer);
@ -424,7 +463,7 @@ public class ScatterplotFacet implements Facet {
if (index_x.isNumeric() && index_y.isNumeric()) {
ScatterplotDrawingRowVisitor drawer = new ScatterplotDrawingRowVisitor(
columnIndex_x, columnIndex_y, min_x, max_x, min_y, max_y,
config.size, config.dim_x, config.dim_y, config.rotation, config.dot, config.color
config.size, config.dim_x, config.dim_y, config.rotation, config.dot, config.getColor()
);
filteredRecords.accept(project, drawer);

View File

@ -33,13 +33,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.browsing.facets;
import java.util.Properties;
import java.util.regex.Pattern;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.FilteredRecords;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RecordFilter;
@ -58,27 +54,19 @@ public class TextSearchFacet implements Facet {
* Configuration
*/
public static class TextSearchFacetConfig implements FacetConfig {
@JsonProperty("name")
protected String _name;
@JsonProperty("columnName")
protected String _columnName;
@JsonProperty("query")
protected String _query = null;
@JsonProperty("mode")
protected String _mode;
@JsonProperty("caseSensitive")
protected boolean _caseSensitive;
@JsonProperty("invert")
protected boolean _invert;
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("name"); writer.value(_name);
writer.key("columnName"); writer.value(_columnName);
writer.key("query"); writer.value(_query);
writer.key("mode"); writer.value(_mode);
writer.key("caseSensitive"); writer.value(_caseSensitive);
writer.key("invert"); writer.value(_invert);
writer.key("type"); writer.value("text");
writer.endObject();
}
@Override
public TextSearchFacet apply(Project project) {
TextSearchFacet facet = new TextSearchFacet();
@ -87,15 +75,8 @@ public class TextSearchFacet implements Facet {
}
@Override
public void initializeFromJSON(JSONObject o) {
_name = o.getString("name");
_columnName = o.getString("columnName");
_mode = o.getString("mode");
_caseSensitive = o.getBoolean("caseSensitive");
if (!o.isNull("query")) {
_query = o.getString("query");
}
_invert = o.has("invert") && o.getBoolean("invert");
public String getJsonType() {
return "text";
}
}
TextSearchFacetConfig _config = new TextSearchFacetConfig();
@ -109,21 +90,37 @@ public class TextSearchFacet implements Facet {
public TextSearchFacet() {
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("name"); writer.value(_config._name);
writer.key("columnName"); writer.value(_config._columnName);
writer.key("query"); writer.value(_config._query);
writer.key("mode"); writer.value(_config._mode);
writer.key("caseSensitive"); writer.value(_config._caseSensitive);
writer.key("invert"); writer.value(_config._invert);
writer.endObject();
@JsonProperty("name")
public String getName() {
return _config._name;
}
@JsonProperty("columnName")
public String getColumnName() {
return _config._columnName;
}
@JsonProperty("query")
public String getQuery() {
return _config._query;
}
@JsonProperty("mode")
public String getMode() {
return _config._mode;
}
@JsonProperty("caseSensitive")
public boolean isCaseSensitive() {
return _config._caseSensitive;
}
@JsonProperty("invert")
public boolean isInverted() {
return _config._invert;
}
public void initializeFromConfig(TextSearchFacetConfig config, Project project) {
_config = config;
@ -139,7 +136,7 @@ public class TextSearchFacet implements Facet {
_config._caseSensitive ? 0 : Pattern.CASE_INSENSITIVE);
} catch (java.util.regex.PatternSyntaxException e) {
PatternSyntaxExceptionParser err = new PatternSyntaxExceptionParser(e);
throw new JSONException(err.getUserMessage());
throw new IllegalArgumentException(err.getUserMessage());
}
} else if (!_config._caseSensitive) {
_query = _query.toLowerCase();

View File

@ -33,12 +33,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.browsing.facets;
import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.FilteredRecords;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RecordFilter;
@ -56,66 +54,39 @@ import com.google.refine.expr.MetaParser;
import com.google.refine.expr.ParsingException;
import com.google.refine.model.Column;
import com.google.refine.model.Project;
import com.google.refine.util.JSONUtilities;
public class TimeRangeFacet implements Facet {
/*
* Configuration, from the client side
*/
public static class TimeRangeFacetConfig implements FacetConfig {
@JsonProperty("name")
protected String _name; // name of facet
@JsonProperty("expression")
protected String _expression; // expression to compute numeric value(s) per row
@JsonProperty("columnName")
protected String _columnName; // column to base expression on, if any
protected double _from; // the numeric selection
protected double _to;
@JsonProperty(FROM)
protected double _from = 0; // the numeric selection
@JsonProperty(TO)
protected double _to = 0;
@JsonProperty("selectTime")
protected boolean _selectTime; // whether the time selection applies, default true
@JsonProperty("selectNonTime")
protected boolean _selectNonTime;
@JsonProperty("selectBlank")
protected boolean _selectBlank;
@JsonProperty("selectError")
protected boolean _selectError;
protected boolean _selected; // false if we're certain that all rows will match
// and there isn't any filtering to do
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("type"); writer.value("timerange");
writer.key("name"); writer.value(_name);
writer.key("expression"); writer.value(_expression);
writer.key("columnName"); writer.value(_columnName);
writer.key("selectTime"); writer.value(_selectTime);
writer.key("selectNonTime"); writer.value(_selectNonTime);
writer.key("selectBlank"); writer.value(_selectBlank);
writer.key("selectError"); writer.value(_selectError);
writer.key(FROM); writer.value((long)_from);
writer.key(TO); writer.value((long)_to);
writer.endObject();
}
@Override
public void initializeFromJSON(JSONObject o) throws JSONException {
_name = o.getString("name");
_expression = o.getString("expression");
_columnName = o.getString("columnName");
if (o.has(FROM) || o.has(TO)) {
_from = o.has(FROM) ? o.getDouble(FROM) : 0;
_to = o.has(TO) ? o.getDouble(TO) : 0;
_selected = true;
}
_selectTime = JSONUtilities.getBoolean(o, "selectTime", true);
_selectNonTime = JSONUtilities.getBoolean(o, "selectNonTime", true);
_selectBlank = JSONUtilities.getBoolean(o, "selectBlank", true);
_selectError = JSONUtilities.getBoolean(o, "selectError", true);
if (!_selectTime || !_selectNonTime || !_selectBlank || !_selectError) {
_selected = true;
}
}
// false if we're certain that all rows will match
// and there isn't any filtering to do
@JsonIgnore
protected boolean isSelected() {
return _from != 0 || _to != 0 || !_selectTime || !_selectNonTime || !_selectBlank || !_selectError;
};
@Override
public TimeRangeFacet apply(Project project) {
@ -123,6 +94,11 @@ public class TimeRangeFacet implements Facet {
facet.initializeFromConfig(this, project);
return facet;
}
@Override
public String getJsonType() {
return "timerange";
}
}
protected TimeRangeFacetConfig _config;
@ -142,14 +118,22 @@ public class TimeRangeFacet implements Facet {
/*
* Computed data
*/
@JsonProperty("baseTimeCount")
protected int _baseTimeCount;
@JsonProperty("baseNonTimeCount")
protected int _baseNonTimeCount;
@JsonProperty("baseBlankCount")
protected int _baseBlankCount;
@JsonProperty("baseErrorCount")
protected int _baseErrorCount;
@JsonProperty("timeCount")
protected int _timeCount;
@JsonProperty("nonTimeCount")
protected int _nonTimeCount;
@JsonProperty("blankCount")
protected int _blankCount;
@JsonProperty("errorCount")
protected int _errorCount;
protected static final String MIN = "min";
@ -157,49 +141,85 @@ public class TimeRangeFacet implements Facet {
protected static final String TO = "to";
protected static final String FROM = "from";
@Override
public void write(JSONWriter writer, Properties options) throws JSONException {
writer.object();
writer.key("name"); writer.value(_config._name);
writer.key("expression"); writer.value(_config._expression);
writer.key("columnName"); writer.value(_config._columnName);
if (_errorMessage != null) {
writer.key("error"); writer.value(_errorMessage);
} else {
if (!Double.isInfinite(_min) && !Double.isInfinite(_max)) {
writer.key(MIN); writer.value(_min);
writer.key(MAX); writer.value(_max);
writer.key("step"); writer.value(_step);
writer.key("bins"); writer.array();
for (int b : _bins) {
writer.value(b);
}
writer.endArray();
writer.key("baseBins"); writer.array();
for (int b : _baseBins) {
writer.value(b);
}
writer.endArray();
writer.key(FROM); writer.value(_config._from);
writer.key(TO); writer.value(_config._to);
}
writer.key("baseTimeCount"); writer.value(_baseTimeCount);
writer.key("baseNonTimeCount"); writer.value(_baseNonTimeCount);
writer.key("baseBlankCount"); writer.value(_baseBlankCount);
writer.key("baseErrorCount"); writer.value(_baseErrorCount);
writer.key("timeCount"); writer.value(_timeCount);
writer.key("nonTimeCount"); writer.value(_nonTimeCount);
writer.key("blankCount"); writer.value(_blankCount);
writer.key("errorCount"); writer.value(_errorCount);
@JsonProperty("name")
public String getName() {
return _config._name;
}
@JsonProperty("expression")
public String getExpression() {
return _config._expression;
}
@JsonProperty("columnName")
public String getColumnName() {
return _config._columnName;
}
@JsonProperty("error")
@JsonInclude(Include.NON_NULL)
public String getError() {
return _errorMessage;
}
@JsonProperty(MIN)
@JsonInclude(Include.NON_NULL)
public Double getMin() {
if(getError() == null) {
return _min;
}
writer.endObject();
return null;
}
@JsonProperty(MAX)
@JsonInclude(Include.NON_NULL)
public Double getMax() {
if(getError() == null) {
return _max;
}
return null;
}
@JsonProperty("step")
@JsonInclude(Include.NON_NULL)
public Double getStep() {
return _step;
}
@JsonProperty("bins")
@JsonInclude(Include.NON_NULL)
public int[] getBins() {
if (getError() == null) {
return _bins;
}
return null;
}
@JsonProperty("baseBins")
@JsonInclude(Include.NON_NULL)
public int[] getBaseBins() {
if (getError() == null) {
return _baseBins;
}
return null;
}
@JsonProperty(FROM)
@JsonInclude(Include.NON_NULL)
public Double getFrom() {
if (getError() == null) {
return _config._from;
}
return null;
}
@JsonProperty(TO)
@JsonInclude(Include.NON_NULL)
public Double getTo() {
if (getError() == null) {
return _config._to;
}
return null;
}
public void initializeFromConfig(TimeRangeFacetConfig config, Project project) {
@ -224,7 +244,7 @@ public class TimeRangeFacet implements Facet {
@Override
public RowFilter getRowFilter(Project project) {
if (_eval != null && _errorMessage == null && _config._selected) {
if (_eval != null && _errorMessage == null && _config.isSelected()) {
return new ExpressionTimeComparisonRowFilter(
getRowEvaluable(project), _config._selectTime, _config._selectNonTime, _config._selectBlank, _config._selectError) {
@ -294,7 +314,7 @@ public class TimeRangeFacet implements Facet {
_baseBlankCount = index.getBlankRowCount();
_baseErrorCount = index.getErrorRowCount();
if (_config._selected) {
if (_config.isSelected()) {
_config._from = Math.max(_config._from, _min);
_config._to = Math.min(_config._to, _max);
} else {

View File

@ -38,12 +38,11 @@ import java.time.OffsetDateTime;
import java.util.Collection;
import java.util.Properties;
import org.json.JSONArray;
import org.json.JSONException;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.refine.browsing.RowFilter;
import com.google.refine.expr.Evaluable;
import com.google.refine.expr.ExpressionUtils;
import com.google.refine.expr.util.JsonValueConverter;
import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Row;
@ -122,17 +121,13 @@ public class ExpressionEqualRowFilter implements RowFilter {
}
}
return false;
} else if (value instanceof JSONArray) {
JSONArray a = (JSONArray) value;
int l = a.length();
} else if (value instanceof ArrayNode) {
ArrayNode a = (ArrayNode) value;
int l = a.size();
for (int i = 0; i < l; i++) {
try {
if (testValue(a.get(i))) {
return true;
}
} catch (JSONException e) {
// ignore
if (testValue(JsonValueConverter.convert(a.get(i)))) {
return true;
}
}
return false;
@ -165,17 +160,13 @@ public class ExpressionEqualRowFilter implements RowFilter {
}
}
return true;
} else if (value instanceof JSONArray) {
JSONArray a = (JSONArray) value;
int l = a.length();
} else if (value instanceof ArrayNode) {
ArrayNode a = (ArrayNode) value;
int l = a.size();
for (int i = 0; i < l; i++) {
try {
if (testValue(a.get(i))) {
return false;
}
} catch (JSONException e) {
// ignore
if (testValue(JsonValueConverter.convert(a.get(i)))) {
return false;
}
}
return true;

View File

@ -36,12 +36,11 @@ package com.google.refine.browsing.filters;
import java.util.Collection;
import java.util.Properties;
import org.json.JSONArray;
import org.json.JSONException;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.refine.browsing.RowFilter;
import com.google.refine.browsing.util.RowEvaluable;
import com.google.refine.expr.ExpressionUtils;
import com.google.refine.expr.util.JsonValueConverter;
import com.google.refine.model.Project;
import com.google.refine.model.Row;
@ -93,17 +92,13 @@ abstract public class ExpressionNumberComparisonRowFilter implements RowFilter {
}
}
return false;
} else if (value instanceof JSONArray) {
JSONArray a = (JSONArray) value;
int l = a.length();
} else if (value instanceof ArrayNode) {
ArrayNode a = (ArrayNode) value;
int l = a.size();
for (int i = 0; i < l; i++) {
try {
if (checkValue(a.get(i))) {
return true;
}
} catch (JSONException e) {
// ignore
if (checkValue(JsonValueConverter.convert(a.get(i)))) {
return true;
}
}
return false;

View File

@ -36,12 +36,11 @@ package com.google.refine.browsing.filters;
import java.util.Collection;
import java.util.Properties;
import org.json.JSONArray;
import org.json.JSONException;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.refine.browsing.RowFilter;
import com.google.refine.expr.Evaluable;
import com.google.refine.expr.ExpressionUtils;
import com.google.refine.expr.util.JsonValueConverter;
import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Row;
@ -86,17 +85,13 @@ abstract public class ExpressionStringComparisonRowFilter implements RowFilter {
}
}
return invert;
} else if (value instanceof JSONArray) {
JSONArray a = (JSONArray) value;
int l = a.length();
} else if (value instanceof ArrayNode) {
ArrayNode a = (ArrayNode) value;
int l = a.size();
for (int i = 0; i < l; i++) {
try {
if (checkValue(a.get(i).toString())) {
return !invert;
}
} catch (JSONException e) {
// ignore
if (checkValue(JsonValueConverter.convert(a.get(i)).toString())) {
return !invert;
}
}
return invert;

View File

@ -0,0 +1,27 @@
package com.google.refine.clustering;
import java.io.Serializable;
import java.util.Comparator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class ClusteredEntry {
@JsonProperty("v")
protected final Serializable value;
@JsonProperty("c")
protected final int count;
public ClusteredEntry(
Serializable value,
int count) {
this.value = value;
this.count = count;
}
public static Comparator<ClusteredEntry> comparator = new Comparator<ClusteredEntry>() {
@Override
public int compare(ClusteredEntry o1, ClusteredEntry o2) {
return o2.count - o1.count;
}
};
}

View File

@ -33,12 +33,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.clustering;
import com.google.refine.Jsonizable;
import com.google.refine.browsing.Engine;
import com.google.refine.model.Column;
import com.google.refine.model.Project;
public abstract class Clusterer implements Jsonizable {
public abstract class Clusterer {
protected Project _project;
protected int _colindex;

View File

@ -1,8 +1,11 @@
package com.google.refine.clustering;
import org.json.JSONObject;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.refine.clustering.binning.BinningClusterer.BinningClustererConfig;
import com.google.refine.clustering.knn.kNNClusterer.kNNClustererConfig;
import com.google.refine.model.Project;
/**
@ -10,26 +13,37 @@ import com.google.refine.model.Project;
* @author Antonin Delpeuch
*
*/
public abstract class ClustererConfig implements Jsonizable {
@JsonTypeInfo(
use=JsonTypeInfo.Id.NAME,
include=JsonTypeInfo.As.PROPERTY,
property="type")
@JsonSubTypes({
@Type(value = kNNClustererConfig.class, name = "knn"),
@Type(value = BinningClustererConfig.class, name = "binning") })
public abstract class ClustererConfig {
protected String columnName;
/**
* Reads the configuration from a JSON payload (TODO: delete)
* @param o
*/
public void initializeFromJSON(JSONObject o) {
columnName = o.getString("column");
}
@JsonProperty("column")
public String getColumnName() {
return columnName;
}
@JsonProperty("column")
public void setColumnName(String name) {
columnName = name;
}
/**
* Instantiate the configuration on a particular project.
* @param project
* @return
*/
public abstract Clusterer apply(Project project);
/**
* Type string used in Json serialization
*/
@JsonProperty("type")
public abstract String getType();
}

View File

@ -41,19 +41,21 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
import java.util.stream.Collectors;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RowVisitor;
import com.google.refine.clustering.ClusteredEntry;
import com.google.refine.clustering.Clusterer;
import com.google.refine.clustering.ClustererConfig;
import com.google.refine.model.Cell;
@ -63,43 +65,39 @@ import com.google.refine.model.Row;
public class BinningClusterer extends Clusterer {
public static class BinningClustererConfig extends ClustererConfig {
@JsonIgnore
private String _keyerName;
@JsonIgnore
private Keyer _keyer;
private BinningParameters _parameters;
@Override
public void initializeFromJSON(JSONObject o) {
super.initializeFromJSON(o);
_keyerName = o.getString("function");
_keyer = _keyers.get(_keyerName.toLowerCase());
if(o.has("params")) {
_parameters = BinningParameters.reconstruct(o.getJSONObject("params"));
} else {
_parameters = null;
}
}
@JsonIgnore
private BinningParameters _parameters = null;
@JsonIgnore
public Keyer getKeyer() {
return _keyer;
}
@JsonProperty("function")
public void setKeyer(String keyerName) {
_keyerName = keyerName;
_keyer = _keyers.get(_keyerName.toLowerCase());
}
@JsonProperty("function")
public String getKeyerName() {
return _keyerName;
}
@JsonProperty("params")
@JsonInclude(Include.NON_NULL)
public BinningParameters getParameters() {
return _parameters;
}
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("function"); writer.value(_keyerName);
writer.key("type"); writer.value("binning");
writer.key("column"); writer.value(getColumnName());
if(_parameters != null) {
writer.key("params");
_parameters.write(writer, options);
}
writer.endObject();
@JsonProperty("params")
public void setParameters(BinningParameters params) {
_parameters = params;
}
@Override
@ -108,28 +106,18 @@ public class BinningClusterer extends Clusterer {
clusterer.initializeFromConfig(project, this);
return clusterer;
}
@Override
public String getType() {
return "binning";
}
}
public static class BinningParameters implements Jsonizable {
public int ngramSize;
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
if(ngramSize > 0) {
writer.key("ngram-size");
writer.value(ngramSize);
}
writer.endObject();
}
public static BinningParameters reconstruct(JSONObject o) {
BinningParameters parameters = new BinningParameters();
parameters.ngramSize = o.has("ngram-size") ? o.getInt("ngram-size") : 0;
return parameters;
}
public static class BinningParameters {
@JsonProperty("ngram-size")
@JsonInclude(Include.NON_DEFAULT)
public int ngramSize = 0;
}
protected Keyer _keyer;
@ -255,25 +243,21 @@ public class BinningClusterer extends Clusterer {
Collections.sort(_clusters, new SizeComparator());
}
@Override
public void write(JSONWriter writer, Properties options) throws JSONException {
protected static Map<String,Object> entryToMap(Entry<String,Integer> entry) {
Map<String,Object> map = new HashMap<>();
map.put("v", entry.getKey());
map.put("c", entry.getValue());
return map;
}
@JsonValue
public List<List<ClusteredEntry>> getJsonRepresentation() {
EntriesComparator c = new EntriesComparator();
writer.array();
for (Map<String,Integer> m : _clusters) {
if (m.size() > 1) {
writer.array();
List<Entry<String,Integer>> entries = new ArrayList<Entry<String,Integer>>(m.entrySet());
Collections.sort(entries,c);
for (Entry<String,Integer> e : entries) {
writer.object();
writer.key("v"); writer.value(e.getKey());
writer.key("c"); writer.value(e.getValue());
writer.endObject();
}
writer.endArray();
}
}
writer.endArray();
return _clusters.stream()
.map(m -> m.entrySet().stream()
.sorted(c)
.map(e -> new ClusteredEntry(e.getKey(), e.getValue()))
.collect(Collectors.toList()))
.collect(Collectors.toList());
}
}

View File

@ -37,7 +37,7 @@ import java.util.Iterator;
import java.util.TreeSet;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
public class FingerprintKeyer extends Keyer {

View File

@ -34,27 +34,25 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.clustering.knn;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.RowVisitor;
import com.google.refine.clustering.ClusteredEntry;
import com.google.refine.clustering.Clusterer;
import com.google.refine.clustering.ClustererConfig;
import com.google.refine.model.Cell;
@ -76,42 +74,38 @@ import edu.mit.simile.vicino.distances.PPMDistance;
public class kNNClusterer extends Clusterer {
public static class kNNClustererConfig extends ClustererConfig {
@JsonIgnore
private String _distanceStr;
@JsonIgnore
private Distance _distance;
private kNNClustererConfigParameters _parameters;
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("function"); writer.value(_distanceStr);
writer.key("type"); writer.value("knn");
writer.key("column"); writer.value(getColumnName());
if(_parameters != null) {
writer.key("params");
_parameters.write(writer, options);
}
writer.endObject();
}
public void initializeFromJSON(JSONObject o) {
super.initializeFromJSON(o);
_distanceStr = o.getString("function");
_distance = _distances.get(_distanceStr.toLowerCase());
if(o.has("params")) {
_parameters = kNNClustererConfigParameters.reconstruct(o.getJSONObject("params"));
} else {
_parameters = null;
}
}
@JsonIgnore
private kNNClustererConfigParameters _parameters = null;
@JsonIgnore
public Distance getDistance() {
return _distance;
}
@JsonProperty("function")
public void setDistance(String distanceStr) {
_distanceStr = distanceStr;
_distance = _distances.get(_distanceStr.toLowerCase());
}
@JsonProperty("function")
public String getDistanceStr() {
return _distanceStr;
}
@JsonProperty("params")
public kNNClustererConfigParameters getParameters() {
return _parameters;
}
@JsonProperty("params")
public void setParameters(kNNClustererConfigParameters params) {
_parameters = params;
}
@Override
public kNNClusterer apply(Project project) {
@ -119,35 +113,21 @@ public class kNNClusterer extends Clusterer {
clusterer.initializeFromConfig(project, this);
return clusterer;
}
@Override
public String getType() {
return "knn";
}
}
public static class kNNClustererConfigParameters implements Jsonizable {
public static class kNNClustererConfigParameters {
public static final double defaultRadius = 1.0d;
public static final int defaultBlockingNgramSize = 6;
@JsonProperty("radius")
public double radius = defaultRadius;
@JsonProperty("blocking-ngram-size")
public int blockingNgramSize = defaultBlockingNgramSize;
@Override
public void write(JSONWriter writer, Properties options)
throws JSONException {
writer.object();
writer.key("radius"); writer.value(radius);
writer.key("blocking-ngram-size");
writer.value(blockingNgramSize);
writer.endObject();
}
public static kNNClustererConfigParameters reconstruct(JSONObject o) {
kNNClustererConfigParameters params = new kNNClustererConfigParameters();
if(o.has("radius")) {
params.radius = o.getDouble("radius");
}
if(o.has("blocking-ngram-size")) {
params.blockingNgramSize = o.getInt("blocking-ngram-size");
}
return params;
}
}
private Distance _distance;
@ -278,28 +258,19 @@ public class kNNClusterer extends Clusterer {
}
}
@Override
public void write(JSONWriter writer, Properties options) throws JSONException {
writer.array();
for (Set<Serializable> m : _clusters) {
if (m.size() > 1) {
Map<Serializable,Integer> internal_counts = new HashMap<Serializable,Integer>();
for (Serializable s : m) {
internal_counts.put(s,_counts.get(s));
}
List<Entry<Serializable,Integer>> values = new ArrayList<Entry<Serializable,Integer>>(internal_counts.entrySet());
Collections.sort(values, new ValuesComparator());
writer.array();
for (Entry<Serializable,Integer> e : values) {
writer.object();
writer.key("v"); writer.value(e.getKey());
writer.key("c"); writer.value(e.getValue());
writer.endObject();
}
writer.endArray();
}
}
writer.endArray();
protected List<ClusteredEntry> getClusteredEntries(Set<Serializable> s) {
return s.stream()
.map(e -> new ClusteredEntry(e, _counts.get(e)))
.sorted(ClusteredEntry.comparator)
.collect(Collectors.toList());
}
@JsonValue
public List<List<ClusteredEntry>> getJsonRepresentation() {
return _clusters.stream()
.filter(m -> m.size() > 1)
.map(m -> getClusteredEntries(m))
.collect(Collectors.toList());
}
private void count(Serializable s) {

View File

@ -44,20 +44,18 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.VelocityContext;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.refine.ProjectManager;
import com.google.refine.ProjectMetadata;
import com.google.refine.RefineServlet;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.EngineConfig;
import com.google.refine.history.HistoryEntry;
import com.google.refine.model.Project;
import com.google.refine.model.metadata.ProjectMetadata;
import com.google.refine.process.Process;
import com.google.refine.util.ParsingUtilities;
@ -116,20 +114,14 @@ public abstract class Command {
* @return
* @throws JSONException
*/
static protected EngineConfig getEngineConfig(HttpServletRequest request)
throws JSONException {
static protected EngineConfig getEngineConfig(HttpServletRequest request) {
if (request == null) {
throw new IllegalArgumentException("parameter 'request' should not be null");
}
String json = request.getParameter("engine");
try{
return (json == null) ? null :
EngineConfig.reconstruct(ParsingUtilities.evaluateJsonStringToObject(json));
} catch (JSONException e){
logger.debug( json + " could not be parsed to JSON");
return null;
}
return (json == null) ? null :
EngineConfig.reconstruct(json);
}
/**
@ -196,7 +188,7 @@ public abstract class Command {
* @return
* @throws ServletException
*/
protected ProjectMetadata getMetadata(HttpServletRequest request) throws ServletException {
protected ProjectMetadata getProjectMetadata(HttpServletRequest request) throws ServletException {
if (request == null) {
throw new IllegalArgumentException("parameter 'request' should not be null");
}
@ -222,20 +214,15 @@ public abstract class Command {
}
return def;
}
static protected JSONObject getJsonParameter(HttpServletRequest request, String name) {
if (request == null) {
throw new IllegalArgumentException("parameter 'request' should not be null");
protected static class HistoryEntryResponse {
@JsonProperty("code")
protected String getCode() { return "ok"; }
@JsonProperty("historyEntry")
protected HistoryEntry historyEntry;
protected HistoryEntryResponse(HistoryEntry entry) {
historyEntry = entry;
}
String value = request.getParameter(name);
if (value != null) {
try {
return ParsingUtilities.evaluateJsonStringToObject(value);
} catch (JSONException e) {
logger.warn("error getting json parameter",e);
}
}
return null;
}
static protected void performProcessAndRespond(
@ -244,23 +231,18 @@ public abstract class Command {
Project project,
Process process
) throws Exception {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
HistoryEntry historyEntry = project.processManager.queueProcess(process);
if (historyEntry != null) {
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
Properties options = new Properties();
writer.object();
writer.key("code"); writer.value("ok");
writer.key("historyEntry"); historyEntry.write(writer, options);
writer.endObject();
ParsingUtilities.defaultWriter.writeValue(w, new HistoryEntryResponse(historyEntry));
w.flush();
w.close();
} else {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
respond(response, "{ \"code\" : \"pending\" }");
}
}
@ -281,53 +263,41 @@ public abstract class Command {
}
static protected void respond(HttpServletResponse response, String status, String message)
throws IOException, JSONException {
throws IOException {
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
writer.object();
writer.key("status"); writer.value(status);
writer.key("message"); writer.value(message);
writer.endObject();
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.writeStartObject();
writer.writeStringField("status", status);
writer.writeStringField("message", message);
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}
static protected void respondJSON(HttpServletResponse response, Jsonizable o)
throws IOException, JSONException {
static protected void respondJSON(HttpServletResponse response, Object o)
throws IOException {
respondJSON(response, o, new Properties());
}
static protected void respondJSON(
HttpServletResponse response, Jsonizable o, Properties options)
throws IOException, JSONException {
HttpServletResponse response, Object o, Properties options)
throws IOException {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
response.setHeader("Cache-Control", "no-cache");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
ParsingUtilities.defaultWriter.writeValue(w, o);
o.write(writer, options);
w.flush();
w.close();
}
static protected void respondJSONObject(
HttpServletResponse response, JSONObject o)
throws IOException, JSONException {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
response.setHeader("Cache-Control", "no-cache");
Writer w = response.getWriter();
w.append(o.toString());
w.flush();
w.close();
}
static protected void respondException(HttpServletResponse response, Exception e)
throws IOException, ServletException {
@ -337,25 +307,26 @@ public abstract class Command {
throw new ServletException("Response object can't be null");
}
try {
JSONObject o = new JSONObject();
o.put("code", "error");
o.put("message", e.getMessage());
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
pw.flush();
sw.flush();
o.put("stack", sw.toString());
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
respond(response, o.toString());
} catch (JSONException e1) {
e.printStackTrace(response.getWriter());
}
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
pw.flush();
sw.flush();
Writer w = response.getWriter();
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.writeStartObject();
writer.writeStringField("code", "error");
writer.writeStringField("message", e.getMessage());
writer.writeStringField("stack", sw.toString());
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}
protected void respondWithErrorPage(

View File

@ -34,14 +34,13 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.commands;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONWriter;
import com.google.refine.ProjectManager;
import com.google.refine.model.Project;
import com.google.refine.preference.PreferenceStore;
@ -56,26 +55,16 @@ public class GetAllPreferencesCommand extends Command {
project.getMetadata().getPreferenceStore() :
ProjectManager.singleton.getPreferenceStore();
try {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
JSONWriter writer = new JSONWriter(response.getWriter());
writer.object();
for (String key : ps.getKeys()) {
Object pref = ps.get(key);
if (pref == null || pref instanceof String || pref instanceof Number || pref instanceof Boolean) {
writer.key(key);
writer.value(pref);
}
Map<String, Object> map = new HashMap<>();
for (String key : ps.getKeys()) {
Object pref = ps.get(key);
if (pref == null || pref instanceof String || pref instanceof Number || pref instanceof Boolean) {
map.put(key, pref);
}
writer.endObject();
} catch (JSONException e) {
respondException(response, e);
}
respondJSON(response, map);
}
}

View File

@ -34,52 +34,40 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.commands;
import java.io.IOException;
import java.util.Properties;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONWriter;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.ProjectManager;
import com.google.refine.model.Project;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.preference.TopList;
public class GetPreferenceCommand extends Command {
protected static class PreferenceValue {
@JsonProperty("value")
protected Object value;
protected PreferenceValue(Object pref) {
if (pref == null || pref instanceof String || pref instanceof Number || pref instanceof Boolean || pref instanceof TopList) {
value = pref;
} else {
value = pref.toString();
}
}
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
Project project = request.getParameter("project") != null ? getProject(request) : null;
PreferenceStore ps = ProjectManager.singleton.getPreferenceStore();
String prefName = request.getParameter("name");
Object pref = ps.get(prefName);
try {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
JSONWriter writer = new JSONWriter(response.getWriter());
writer.object();
writer.key("value");
if (pref == null || pref instanceof String || pref instanceof Number || pref instanceof Boolean) {
writer.value(pref);
} else if (pref instanceof TopList) {
TopList tl = (TopList) pref;
tl.write(writer, new Properties());
} else {
writer.value(pref.toString());
}
writer.endObject();
} catch (JSONException e) {
respondException(response, e);
}
respondJSON(response, new PreferenceValue(pref));
}
}

View File

@ -39,27 +39,24 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.RefineServlet;
public class GetVersionCommand extends Command {
protected class VersionResponse {
@JsonProperty("version")
public String version = RefineServlet.VERSION;
@JsonProperty("revision")
public String revision = RefineServlet.REVISION;
@JsonProperty("full_version")
public String full_version = RefineServlet.FULL_VERSION;
@JsonProperty("full_name")
public String full_name = RefineServlet.FULLNAME;
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
JSONObject o = new JSONObject();
o.put("version", RefineServlet.VERSION);
o.put("revision", RefineServlet.REVISION);
o.put("full_version", RefineServlet.FULL_VERSION);
o.put("full_name", RefineServlet.FULLNAME);
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
respond(response, o.toString());
} catch (JSONException e) {
e.printStackTrace(response.getWriter());
}
respondJSON(response, new VersionResponse());
}
}

View File

@ -37,6 +37,8 @@ import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.RefineServlet;
abstract public class HttpHeadersSupport {
@ -44,8 +46,11 @@ abstract public class HttpHeadersSupport {
static final protected Map<String, HttpHeaderInfo> s_headers = new HashMap<String, HttpHeaderInfo>();
static public class HttpHeaderInfo {
@JsonIgnore
final public String name;
@JsonProperty("header")
final public String header;
@JsonProperty("defaultValue")
final public String defaultValue;
HttpHeaderInfo(String header, String defaultValue) {

View File

@ -11,13 +11,10 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.VelocityContext;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.Jsonizable;
import com.fasterxml.jackson.core.JsonGenerator;
import com.google.refine.RefineServlet;
import com.google.refine.util.ParsingUtilities;
@ -43,36 +40,32 @@ abstract public class HttpUtilities {
throws IOException {
Writer w = response.getWriter();
try {
JSONWriter writer = new JSONWriter(w);
writer.object();
writer.key("status"); writer.value(status);
writer.key("message"); writer.value(message);
writer.endObject();
w.flush();
w.close();
} catch (JSONException e) {
// This can never occue
}
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.writeStartObject();
writer.writeStringField("status", status);
writer.writeStringField("message", message);
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}
static public void respondJSON(HttpServletResponse response, Jsonizable o)
throws IOException, JSONException {
static public void respondJSON(HttpServletResponse response, Object o)
throws IOException {
respondJSON(response, o, new Properties());
}
static public void respondJSON(
HttpServletResponse response, Jsonizable o, Properties options)
throws IOException, JSONException {
HttpServletResponse response, Object o, Properties options)
throws IOException {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JSONWriter writer = new JSONWriter(w);
o.write(writer, options);
ParsingUtilities.defaultWriter.writeValue(w, o);
w.flush();
w.close();
}
@ -86,25 +79,26 @@ abstract public class HttpUtilities {
throw new ServletException("Response object can't be null");
}
try {
JSONObject o = new JSONObject();
o.put("code", "error");
o.put("message", e.getMessage());
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
pw.flush();
sw.flush();
o.put("stack", sw.toString());
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
respond(response, o.toString());
} catch (JSONException e1) {
e.printStackTrace(response.getWriter());
}
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
pw.flush();
sw.flush();
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
Writer w = response.getWriter();
JsonGenerator writer = ParsingUtilities.mapper.getFactory().createGenerator(w);
writer.writeStartObject();
writer.writeStringField("code", "error");
writer.writeStringField("message", e.getMessage());
writer.writeStringField("stack", sw.toString());
writer.writeEndObject();
writer.flush();
writer.close();
w.flush();
w.close();
}
static public void redirect(HttpServletResponse response, String url) throws IOException {
@ -123,21 +117,6 @@ abstract public class HttpUtilities {
return def;
}
static public JSONObject getJsonParameter(HttpServletRequest request, String name) {
if (request == null) {
throw new IllegalArgumentException("parameter 'request' should not be null");
}
String value = request.getParameter(name);
if (value != null) {
try {
return ParsingUtilities.evaluateJsonStringToObject(value);
} catch (JSONException e) {
logger.warn("Error getting json parameter", e);
}
}
return null;
}
static public void respondWithErrorPage(
RefineServlet servlet,
HttpServletRequest request,

View File

@ -39,12 +39,11 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONTokener;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.refine.ProjectManager;
import com.google.refine.model.Project;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.util.ParsingUtilities;
public class SetPreferenceCommand extends Command {
@Override
@ -52,18 +51,20 @@ public class SetPreferenceCommand extends Command {
throws ServletException, IOException {
Project project = request.getParameter("project") != null ? getProject(request) : null;
PreferenceStore ps = ProjectManager.singleton.getPreferenceStore();
PreferenceStore ps = project != null ?
project.getMetadata().getPreferenceStore() :
ProjectManager.singleton.getPreferenceStore();
String prefName = request.getParameter("name");
String valueString = request.getParameter("value");
try {
Object o = valueString == null ? null : new JSONTokener(valueString).nextValue();
JsonNode o = valueString == null ? null : ParsingUtilities.mapper.readTree(valueString);
ps.put(prefName, PreferenceStore.loadObject(o));
respond(response, "{ \"code\" : \"ok\" }");
} catch (JSONException e) {
} catch (IOException e) {
respondException(response, e);
}
}

View File

@ -42,14 +42,14 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import oauth.signpost.OAuthConsumer;
import oauth.signpost.OAuthProvider;
import com.google.refine.commands.Command;
import com.google.refine.oauth.Credentials;
import com.google.refine.oauth.OAuthUtilities;
import com.google.refine.oauth.Provider;
import oauth.signpost.OAuthConsumer;
import oauth.signpost.OAuthProvider;
public class AuthorizeCommand extends Command {
private static final String OAUTH_VERIFIER_PARAM = "oauth_verifier";

View File

@ -39,17 +39,15 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.browsing.Engine;
import com.google.refine.clustering.Clusterer;
import com.google.refine.clustering.ClustererConfig;
import com.google.refine.clustering.binning.BinningClusterer.BinningClustererConfig;
import com.google.refine.clustering.knn.kNNClusterer.kNNClustererConfig;
import com.google.refine.commands.Command;
import com.google.refine.model.Project;
import com.google.refine.util.ParsingUtilities;
public class ComputeClustersCommand extends Command {
@ -63,24 +61,15 @@ public class ComputeClustersCommand extends Command {
long start = System.currentTimeMillis();
Project project = getProject(request);
Engine engine = getEngine(request, project);
JSONObject clusterer_conf = getJsonParameter(request,"clusterer");
String clusterer_conf = request.getParameter("clusterer");
ClustererConfig clustererConfig = ParsingUtilities.mapper.readValue(clusterer_conf, ClustererConfig.class);
String type = clusterer_conf.has("type") ? clusterer_conf.getString("type") : "binning";
ClustererConfig clustererConfig = null;
if ("knn".equals(type)) {
clustererConfig = new kNNClustererConfig();
} else {
clustererConfig = new BinningClustererConfig();
}
clustererConfig.initializeFromJSON(clusterer_conf);
Clusterer clusterer = clustererConfig.apply(project);
clusterer.computeClusters(engine);
respondJSON(response, clusterer);
logger.info("computed clusters [{},{}] in {}ms", new Object[] { type, clusterer_conf.getString("function"), Long.toString(System.currentTimeMillis() - start) });
logger.info("computed clusters [{}] in {}ms", new Object[] { clustererConfig.getType(), Long.toString(System.currentTimeMillis() - start) });
} catch (Exception e) {
respondException(response, e);
}

View File

@ -44,16 +44,14 @@ import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.browsing.Engine;
import com.google.refine.browsing.FilteredRows;
import com.google.refine.browsing.facets.ScatterplotDrawingRowVisitor;
import com.google.refine.browsing.facets.ScatterplotFacet;
import com.google.refine.browsing.facets.ScatterplotFacet.ScatterplotFacetConfig;
import com.google.refine.browsing.util.NumericBinIndex;
import com.google.refine.commands.Command;
import com.google.refine.expr.Evaluable;
@ -61,6 +59,7 @@ import com.google.refine.expr.MetaParser;
import com.google.refine.expr.ParsingException;
import com.google.refine.model.Column;
import com.google.refine.model.Project;
import com.google.refine.util.ParsingUtilities;
public class GetScatterplotCommand extends Command {
@ -75,7 +74,9 @@ public class GetScatterplotCommand extends Command {
Project project = getProject(request);
Engine engine = getEngine(request, project);
JSONObject conf = getJsonParameter(request,"plotter");
PlotterConfig conf = ParsingUtilities.mapper.readValue(
request.getParameter("plotter"),
PlotterConfig.class);
response.setHeader("Content-Type", "image/png");
@ -95,7 +96,32 @@ public class GetScatterplotCommand extends Command {
}
}
public void draw(OutputStream output, Project project, Engine engine, JSONObject o) throws IOException, JSONException {
protected static class PlotterConfig {
@JsonProperty(ScatterplotFacet.SIZE)
public int size = 100;
@JsonProperty(ScatterplotFacet.DOT)
double dot = 100;
@JsonProperty(ScatterplotFacet.DIM_X)
public int dim_x = ScatterplotFacet.LIN;
@JsonProperty(ScatterplotFacet.DIM_Y)
public int dim_y = ScatterplotFacet.LIN;
@JsonProperty(ScatterplotFacet.ROTATION)
public int rotation = ScatterplotFacet.NO_ROTATION;
@JsonProperty(ScatterplotFacet.COLOR)
public String color_str = "000000";
@JsonProperty(ScatterplotFacet.BASE_COLOR)
public String base_color_str = null;
@JsonProperty(ScatterplotFacet.X_COLUMN_NAME)
public String columnName_x = "";
@JsonProperty(ScatterplotFacet.X_EXPRESSION)
public String expression_x = "value";
@JsonProperty(ScatterplotFacet.Y_COLUMN_NAME)
public String columnName_y = "";
@JsonProperty(ScatterplotFacet.Y_EXPRESSION)
public String expression_y = "value";
}
public void draw(OutputStream output, Project project, Engine engine, PlotterConfig o) throws IOException {
double min_x = 0;
double min_y = 0;
@ -108,26 +134,12 @@ public class GetScatterplotCommand extends Command {
Evaluable eval_x = null;
Evaluable eval_y = null;
int size = (o.has(ScatterplotFacet.SIZE)) ? o.getInt(ScatterplotFacet.SIZE) : 100;
double dot = (o.has(ScatterplotFacet.DOT)) ? o.getDouble(ScatterplotFacet.DOT) : 100;
Color color = new Color(Integer.parseInt(o.color_str,16));
int dim_x = (o.has(ScatterplotFacet.DIM_X)) ? ScatterplotFacet.getAxisDim(o.getString(ScatterplotFacet.DIM_X)) : ScatterplotFacet.LIN;
int dim_y = (o.has(ScatterplotFacet.DIM_Y)) ? ScatterplotFacet.getAxisDim(o.getString(ScatterplotFacet.DIM_Y)) : ScatterplotFacet.LIN;
int rotation = (o.has(ScatterplotFacet.ROTATION)) ? ScatterplotFacetConfig.getRotation(o.getString(ScatterplotFacet.ROTATION)) : ScatterplotFacet.NO_ROTATION;
Color base_color = o.base_color_str != null ? new Color(Integer.parseInt(o.base_color_str,16)) : null;
String color_str = (o.has(ScatterplotFacet.COLOR)) ? o.getString(ScatterplotFacet.COLOR) : "000000";
Color color = new Color(Integer.parseInt(color_str,16));
String base_color_str = (o.has(ScatterplotFacet.BASE_COLOR)) ? o.getString(ScatterplotFacet.BASE_COLOR) : null;
Color base_color = base_color_str != null ? new Color(Integer.parseInt(base_color_str,16)) : null;
String columnName_x = o.getString(ScatterplotFacet.X_COLUMN_NAME);
String expression_x = (o.has(ScatterplotFacet.X_EXPRESSION)) ? o.getString(ScatterplotFacet.X_EXPRESSION) : "value";
if (columnName_x.length() > 0) {
Column x_column = project.columnModel.getColumnByName(columnName_x);
if (o.columnName_x.length() > 0) {
Column x_column = project.columnModel.getColumnByName(o.columnName_x);
if (x_column != null) {
columnIndex_x = x_column.getCellIndex();
}
@ -136,16 +148,13 @@ public class GetScatterplotCommand extends Command {
}
try {
eval_x = MetaParser.parse(expression_x);
eval_x = MetaParser.parse(o.expression_x);
} catch (ParsingException e) {
logger.warn("error parsing expression", e);
}
String columnName_y = o.getString(ScatterplotFacet.Y_COLUMN_NAME);
String expression_y = (o.has(ScatterplotFacet.Y_EXPRESSION)) ? o.getString(ScatterplotFacet.Y_EXPRESSION) : "value";
if (columnName_y.length() > 0) {
Column y_column = project.columnModel.getColumnByName(columnName_y);
if (o.columnName_y.length() > 0) {
Column y_column = project.columnModel.getColumnByName(o.columnName_y);
if (y_column != null) {
columnIndex_y = y_column.getCellIndex();
}
@ -154,7 +163,7 @@ public class GetScatterplotCommand extends Command {
}
try {
eval_y = MetaParser.parse(expression_y);
eval_y = MetaParser.parse(o.expression_y);
} catch (ParsingException e) {
logger.warn("error parsing expression", e);
}
@ -162,20 +171,18 @@ public class GetScatterplotCommand extends Command {
NumericBinIndex index_x = null;
NumericBinIndex index_y = null;
String col_x_name = o.getString(ScatterplotFacet.X_COLUMN_NAME);
Column column_x = project.columnModel.getColumnByName(col_x_name);
Column column_x = project.columnModel.getColumnByName(o.columnName_x);
if (column_x != null) {
columnIndex_x = column_x.getCellIndex();
index_x = ScatterplotFacet.getBinIndex(project, column_x, eval_x, expression_x);
index_x = ScatterplotFacet.getBinIndex(project, column_x, eval_x, o.expression_x);
min_x = index_x.getMin();
max_x = index_x.getMax();
}
String col_y_name = o.getString(ScatterplotFacet.Y_COLUMN_NAME);
Column column_y = project.columnModel.getColumnByName(col_y_name);
Column column_y = project.columnModel.getColumnByName(o.columnName_y);
if (column_y != null) {
columnIndex_y = column_y.getCellIndex();
index_y = ScatterplotFacet.getBinIndex(project, column_y, eval_y, expression_y);
index_y = ScatterplotFacet.getBinIndex(project, column_y, eval_y, o.expression_y);
min_y = index_y.getMin();
max_y = index_y.getMax();
}
@ -183,7 +190,7 @@ public class GetScatterplotCommand extends Command {
if (index_x != null && index_y != null && index_x.isNumeric() && index_y.isNumeric()) {
ScatterplotDrawingRowVisitor drawer = new ScatterplotDrawingRowVisitor(
columnIndex_x, columnIndex_y, min_x, max_x, min_y, max_y,
size, dim_x, dim_y, rotation, dot, color
o.size, o.dim_x, o.dim_y, o.rotation, o.dot, color
);
if (base_color != null) {

View File

@ -35,14 +35,14 @@ package com.google.refine.commands.cell;
import java.io.IOException;
import java.io.Serializable;
import java.util.Properties;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONWriter;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.refine.commands.Command;
import com.google.refine.history.Change;
import com.google.refine.history.HistoryEntry;
@ -55,6 +55,32 @@ import com.google.refine.util.ParsingUtilities;
import com.google.refine.util.Pool;
public class EditOneCellCommand extends Command {
protected static class EditResult {
@JsonProperty("code")
protected String code;
@JsonProperty("historyEntry")
@JsonInclude(Include.NON_NULL)
protected HistoryEntry historyEntry;
@JsonProperty("cell")
@JsonInclude(Include.NON_NULL)
protected Cell cell;
@JsonProperty("pool")
@JsonInclude(Include.NON_NULL)
protected Pool pool;
protected EditResult(
String code,
HistoryEntry historyEntry,
Cell cell,
Pool pool) {
this.code = code;
this.historyEntry = historyEntry;
this.cell = cell;
this.pool = pool;
}
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
@ -96,18 +122,11 @@ public class EditOneCellCommand extends Command {
* If the operation has been done, return the new cell's data
* so the client side can update the cell's rendering right away.
*/
JSONWriter writer = new JSONWriter(response.getWriter());
Pool pool = new Pool();
Properties options = new Properties();
options.put("pool", pool);
writer.object();
writer.key("code"); writer.value("ok");
writer.key("historyEntry"); historyEntry.write(writer, options);
writer.key("cell"); process.newCell.write(writer, options);
writer.key("pool"); pool.write(writer, options);
writer.endObject();
if(process.newCell != null && process.newCell.recon != null) {
pool.pool(process.newCell.recon);
}
respondJSON(response, new EditResult("ok", historyEntry, process.newCell, pool));
} else {
respond(response, "{ \"code\" : \"pending\" }");
}

View File

@ -33,13 +33,17 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.commands.cell;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.refine.browsing.EngineConfig;
import com.google.refine.commands.EngineDependentCommand;
import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Project;
import com.google.refine.operations.cell.MassEditOperation;
import com.google.refine.operations.cell.MassEditOperation.Edit;
import com.google.refine.util.ParsingUtilities;
public class MassEditCommand extends EngineDependentCommand {
@ -55,7 +59,7 @@ public class MassEditCommand extends EngineDependentCommand {
engineConfig,
columnName,
expression,
MassEditOperation.reconstructEdits(ParsingUtilities.evaluateJsonStringToArray(editsString))
ParsingUtilities.mapper.readValue(editsString, new TypeReference<List<Edit>>() {})
);
}
}

View File

@ -40,14 +40,13 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.json.JSONArray;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.refine.commands.Command;
import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Project;
import com.google.refine.operations.cell.MultiValuedCellSplitOperation;
import com.google.refine.util.ParsingUtilities;
import com.google.refine.process.Process;
import com.google.refine.util.ParsingUtilities;
public class SplitMultiValueCellsCommand extends Command {
@Override
@ -74,12 +73,7 @@ public class SplitMultiValueCellsCommand extends Command {
} else {
String s = request.getParameter("fieldLengths");
JSONArray a = ParsingUtilities.evaluateJsonStringToArray(s);
int[] fieldLengths = new int[a.length()];
for (int i = 0; i < fieldLengths.length; i++) {
fieldLengths[i] = a.getInt(i);
}
int[] fieldLengths = ParsingUtilities.mapper.readValue(s, new TypeReference<int[]>() {});
AbstractOperation op = new MultiValuedCellSplitOperation(columnName,
keyColumnName,

View File

@ -33,16 +33,19 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.commands.column;
import java.util.Arrays;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.json.JSONArray;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.refine.browsing.EngineConfig;
import com.google.refine.commands.EngineDependentCommand;
import com.google.refine.model.AbstractOperation;
import com.google.refine.model.Project;
import com.google.refine.operations.cell.TextTransformOperation;
import com.google.refine.operations.column.ColumnAdditionByFetchingURLsOperation;
import com.google.refine.operations.column.ColumnAdditionByFetchingURLsOperation.HttpHeader;
public class AddColumnByFetchingURLsCommand extends EngineDependentCommand {
@Override
@ -56,7 +59,8 @@ public class AddColumnByFetchingURLsCommand extends EngineDependentCommand {
int delay = Integer.parseInt(request.getParameter("delay"));
String onError = request.getParameter("onError");
boolean cacheResponses = Boolean.parseBoolean(request.getParameter("cacheResponses"));
JSONArray httpHeadersJson = new JSONArray(request.getParameter("httpHeaders"));
ObjectMapper mapper = new ObjectMapper();
List<HttpHeader> headers = Arrays.asList(mapper.readValue(request.getParameter("httpHeaders"), HttpHeader[].class));
return new ColumnAdditionByFetchingURLsOperation(
engineConfig,
@ -67,7 +71,7 @@ public class AddColumnByFetchingURLsCommand extends EngineDependentCommand {
columnInsertIndex,
delay,
cacheResponses,
httpHeadersJson
headers
);
}

Some files were not shown because too many files have changed in this diff Show More