Migrate importers to use Jackson classes
This commit is contained in:
parent
9cab735dfc
commit
e4e73d0b92
@ -56,7 +56,6 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.history.HistoryEntryManager;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.IMetadata;
|
||||
|
@ -37,7 +37,6 @@ import java.time.OffsetDateTime;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.util.StringUtils;
|
||||
|
||||
/**
|
||||
|
@ -43,7 +43,6 @@ import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.facets.Facet;
|
||||
import com.google.refine.browsing.util.ConjunctiveFilteredRecords;
|
||||
import com.google.refine.browsing.util.ConjunctiveFilteredRows;
|
||||
|
@ -8,7 +8,6 @@ import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.Engine.Mode;
|
||||
import com.google.refine.browsing.facets.FacetConfig;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
@ -4,7 +4,6 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
|
||||
import com.google.refine.model.Project;
|
||||
|
||||
|
||||
|
@ -7,7 +7,6 @@ import com.fasterxml.jackson.databind.DatabindContext;
|
||||
import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase;
|
||||
import com.fasterxml.jackson.databind.type.TypeFactory;
|
||||
|
||||
import com.google.refine.model.recon.ReconConfig;
|
||||
|
||||
public class FacetConfigResolver extends TypeIdResolverBase {
|
||||
|
@ -42,7 +42,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.browsing.DecoratedValue;
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
|
@ -34,7 +34,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
package com.google.refine.browsing.facets;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.DecoratedValue;
|
||||
|
||||
/**
|
||||
|
@ -38,7 +38,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
import com.google.refine.browsing.RecordFilter;
|
||||
|
@ -51,7 +51,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
import com.google.refine.browsing.RecordFilter;
|
||||
|
@ -38,7 +38,6 @@ import java.util.regex.Pattern;
|
||||
import org.json.JSONException;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
import com.google.refine.browsing.RecordFilter;
|
||||
|
@ -37,7 +37,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
import com.google.refine.browsing.RecordFilter;
|
||||
|
@ -3,7 +3,6 @@ package com.google.refine.clustering;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.model.Project;
|
||||
|
||||
/**
|
||||
|
@ -53,7 +53,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
import com.google.refine.browsing.RowVisitor;
|
||||
|
@ -50,7 +50,6 @@ import org.slf4j.LoggerFactory;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
import com.google.refine.browsing.RowVisitor;
|
||||
|
@ -51,7 +51,6 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.RefineServlet;
|
||||
import com.google.refine.browsing.Engine;
|
||||
|
@ -40,7 +40,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.preference.PreferenceStore;
|
||||
import com.google.refine.preference.TopList;
|
||||
|
@ -39,7 +39,6 @@ import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.RefineServlet;
|
||||
|
||||
abstract public class HttpHeadersSupport {
|
||||
|
@ -17,7 +17,6 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
|
||||
import com.google.refine.RefineServlet;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
|
@ -39,9 +39,6 @@ import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -43,7 +43,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.history.Change;
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
|
@ -38,7 +38,6 @@ import java.util.List;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.commands.EngineDependentCommand;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
@ -39,7 +39,6 @@ import java.util.List;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.commands.EngineDependentCommand;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
@ -40,7 +40,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
|
||||
import com.google.refine.browsing.util.ExpressionBasedRowEvaluable;
|
||||
import com.google.refine.browsing.util.NumericBinIndex;
|
||||
import com.google.refine.browsing.util.NumericBinRowIndex;
|
||||
|
@ -45,7 +45,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.preference.TopList;
|
||||
|
@ -41,7 +41,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.grel.Control;
|
||||
import com.google.refine.grel.ControlFunctionRegistry;
|
||||
|
@ -9,7 +9,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.preference.TopList;
|
||||
|
@ -52,7 +52,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.expr.EvalError;
|
||||
import com.google.refine.expr.Evaluable;
|
||||
|
@ -44,7 +44,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
@ -40,7 +40,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.importing.ImportingManager.ImportingConfiguration;
|
||||
|
||||
|
@ -43,7 +43,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingManager;
|
||||
|
@ -48,7 +48,6 @@ import org.apache.commons.io.filefilter.WildcardFileFilter;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
|
||||
import edu.mit.simile.butterfly.ButterflyModule;
|
||||
|
@ -42,11 +42,11 @@ import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.commands.HttpUtilities;
|
||||
@ -69,7 +69,7 @@ public class CreateProjectCommand extends Command {
|
||||
try {
|
||||
Properties parameters = ParsingUtilities.parseUrlParameters(request);
|
||||
ImportingJob job = ImportingManager.createJob();
|
||||
JSONObject config = job.getOrCreateDefaultConfig();
|
||||
ObjectNode config = job.getOrCreateDefaultConfig();
|
||||
ImportingUtilities.loadDataAndPrepareJob(
|
||||
request, response, parameters, job, config);
|
||||
|
||||
@ -93,9 +93,9 @@ public class CreateProjectCommand extends Command {
|
||||
"\\t".equals(parameters.getProperty("separator"))) {
|
||||
format = "text/line-based/*sv";
|
||||
} else {
|
||||
JSONArray rankedFormats = JSONUtilities.getArray(config, "rankedFormats");
|
||||
if (rankedFormats != null && rankedFormats.length() > 0) {
|
||||
format = rankedFormats.getString(0);
|
||||
ArrayNode rankedFormats = JSONUtilities.getArray(config, "rankedFormats");
|
||||
if (rankedFormats != null && rankedFormats.size() > 0) {
|
||||
format = rankedFormats.get(0).asText();
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,10 +105,10 @@ public class CreateProjectCommand extends Command {
|
||||
}
|
||||
}
|
||||
|
||||
JSONObject optionObj = null;
|
||||
String optionsString = parameters.getProperty("options");
|
||||
ObjectNode optionObj = null;
|
||||
String optionsString = parameters.getParameter("options");
|
||||
if (optionsString != null && !optionsString.isEmpty()) {
|
||||
optionObj = ParsingUtilities.evaluateJsonStringToObject(optionsString);
|
||||
optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(optionsString);
|
||||
} else {
|
||||
Format formatRecord = ImportingManager.formatToRecord.get(format);
|
||||
optionObj = formatRecord.parser.createParserUIInitializationData(
|
||||
@ -133,7 +133,7 @@ public class CreateProjectCommand extends Command {
|
||||
}
|
||||
}
|
||||
|
||||
static private void adjustLegacyOptions(String format, Properties parameters, JSONObject optionObj) {
|
||||
static private void adjustLegacyOptions(String format, Properties parameters, ObjectNode optionObj) {
|
||||
if (",".equals(parameters.getProperty("separator"))) {
|
||||
JSONUtilities.safePut(optionObj, "separator", ",");
|
||||
} else if ("\\t".equals(parameters.getProperty("separator"))) {
|
||||
@ -150,7 +150,7 @@ public class CreateProjectCommand extends Command {
|
||||
}
|
||||
|
||||
static private void adjustLegacyIntegerOption(
|
||||
String format, Properties parameters, JSONObject optionObj, String legacyName, String newName) {
|
||||
String format, Properties parameters, ObjectNode optionObj, String legacyName, String newName) {
|
||||
|
||||
String s = parameters.getProperty(legacyName);
|
||||
if (s != null && !s.isEmpty()) {
|
||||
@ -165,7 +165,7 @@ public class CreateProjectCommand extends Command {
|
||||
static private void adjustLegacyBooleanOption(
|
||||
String format,
|
||||
Properties parameters,
|
||||
JSONObject optionObj,
|
||||
ObjectNode optionObj,
|
||||
String legacyName,
|
||||
String newName,
|
||||
boolean invert) {
|
||||
|
@ -44,7 +44,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import org.json.JSONException;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.commands.HttpHeadersSupport;
|
||||
import com.google.refine.commands.HttpHeadersSupport.HttpHeaderInfo;
|
||||
|
@ -36,7 +36,6 @@ package com.google.refine.commands.recon;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
@ -59,7 +58,6 @@ import org.json.JSONObject;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.model.Column;
|
||||
|
@ -48,7 +48,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import org.json.JSONArray;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.model.Cell;
|
||||
import com.google.refine.model.Column;
|
||||
|
@ -40,7 +40,6 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.history.Change;
|
||||
|
@ -2,15 +2,10 @@ package com.google.refine.commands.recon;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.commands.EngineDependentCommand;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.ReconCandidate;
|
||||
import com.google.refine.model.recon.StandardReconConfig;
|
||||
import com.google.refine.operations.recon.ReconMatchSpecificTopicOperation;
|
||||
import com.google.refine.operations.recon.ReconUseValuesAsIdentifiersOperation;
|
||||
|
||||
public class ReconUseValuesAsIdentifiersCommand extends EngineDependentCommand {
|
||||
|
@ -49,7 +49,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonUnwrapped;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.Engine.Mode;
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
|
@ -46,7 +46,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonRawValue;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.commands.Command;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
|
@ -37,7 +37,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import org.json.JSONException;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.commands.Command;
|
||||
|
||||
|
@ -43,7 +43,6 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
@ -39,10 +39,8 @@ import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -45,7 +45,6 @@ import org.odftoolkit.odfdom.doc.table.OdfTableCell;
|
||||
import org.odftoolkit.odfdom.doc.table.OdfTableRow;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -41,7 +41,6 @@ import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.Engine.Mode;
|
||||
import com.google.refine.browsing.FilteredRecords;
|
||||
|
@ -50,7 +50,6 @@ import org.apache.poi.ss.util.WorkbookUtil;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -35,12 +35,10 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.exporters.CustomizableTabularExporterUtilities;
|
||||
|
@ -41,7 +41,6 @@ import java.util.Set;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.grel.Parser;
|
||||
|
||||
import clojure.lang.IFn;
|
||||
|
@ -36,7 +36,6 @@ package com.google.refine.expr.functions.strings;
|
||||
import java.util.Properties;
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
|
||||
import com.google.refine.expr.EvalError;
|
||||
import com.google.refine.grel.Function;
|
||||
|
||||
|
@ -38,7 +38,6 @@ import java.util.Properties;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.expr.Evaluable;
|
||||
|
||||
/**
|
||||
|
@ -47,7 +47,6 @@ import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.RefineServlet;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -45,7 +45,6 @@ import org.slf4j.LoggerFactory;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonView;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -35,7 +35,6 @@ package com.google.refine.history;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.process.Process;
|
||||
import com.google.refine.process.ProcessManager;
|
||||
|
@ -54,12 +54,12 @@ import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.ss.usermodel.Sheet;
|
||||
import org.apache.poi.ss.usermodel.Workbook;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Cell;
|
||||
@ -69,6 +69,7 @@ import com.google.refine.model.Recon.Judgment;
|
||||
import com.google.refine.model.ReconCandidate;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
public class ExcelImporter extends TabularImportingParserBase {
|
||||
static final Logger logger = LoggerFactory.getLogger(ExcelImporter.class);
|
||||
@ -78,15 +79,15 @@ public class ExcelImporter extends TabularImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
JSONArray sheetRecords = new JSONArray();
|
||||
ArrayNode sheetRecords = ParsingUtilities.mapper.createArrayNode();
|
||||
JSONUtilities.safePut(options, "sheetRecords", sheetRecords);
|
||||
try {
|
||||
for (int index = 0;index < fileRecords.size();index++) {
|
||||
JSONObject fileRecord = fileRecords.get(index);
|
||||
ObjectNode fileRecord = fileRecords.get(index);
|
||||
File file = ImportingUtilities.getFile(job, fileRecord);
|
||||
InputStream is = new FileInputStream(file);
|
||||
|
||||
@ -104,7 +105,7 @@ public class ExcelImporter extends TabularImportingParserBase {
|
||||
Sheet sheet = wb.getSheetAt(i);
|
||||
int rows = sheet.getLastRowNum() - sheet.getFirstRowNum() + 1;
|
||||
|
||||
JSONObject sheetRecord = new JSONObject();
|
||||
ObjectNode sheetRecord = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(sheetRecord, "name", file.getName() + "#" + sheet.getSheetName());
|
||||
JSONUtilities.safePut(sheetRecord, "fileNameAndSheetIndex", file.getName() + "#" + i);
|
||||
JSONUtilities.safePut(sheetRecord, "rows", rows);
|
||||
@ -138,7 +139,7 @@ public class ExcelImporter extends TabularImportingParserBase {
|
||||
String fileSource,
|
||||
InputStream inputStream,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
Workbook wb = null;
|
||||
@ -182,14 +183,14 @@ public class ExcelImporter extends TabularImportingParserBase {
|
||||
return;
|
||||
}
|
||||
|
||||
JSONArray sheets = JSONUtilities.getArray(options, "sheets");
|
||||
ArrayNode sheets = (ArrayNode) options.get("sheets");
|
||||
|
||||
for(int i=0;i<sheets.length();i++) {
|
||||
for(int i=0;i<sheets.size();i++) {
|
||||
String[] fileNameAndSheetIndex = new String[2];
|
||||
try {
|
||||
JSONObject sheetObj = sheets.getJSONObject(i);
|
||||
ObjectNode sheetObj = (ObjectNode) sheets.get(i);
|
||||
// value is fileName#sheetIndex
|
||||
fileNameAndSheetIndex = sheetObj.getString("fileNameAndSheetIndex").split("#");
|
||||
fileNameAndSheetIndex = sheetObj.get("fileNameAndSheetIndex").asText().split("#");
|
||||
} catch (JSONException e) {
|
||||
logger.error(ExceptionUtils.getStackTrace(e));
|
||||
}
|
||||
|
@ -11,14 +11,14 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
public class FixedWidthImporter extends TabularImportingParserBase {
|
||||
public FixedWidthImporter() {
|
||||
@ -26,12 +26,12 @@ public class FixedWidthImporter extends TabularImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
JSONArray columnWidths = new JSONArray();
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
ArrayNode columnWidths = ParsingUtilities.mapper.createArrayNode();
|
||||
if (fileRecords.size() > 0) {
|
||||
JSONObject firstFileRecord = fileRecords.get(0);
|
||||
ObjectNode firstFileRecord = fileRecords.get(0);
|
||||
String encoding = ImportingUtilities.getEncoding(firstFileRecord);
|
||||
String location = JSONUtilities.getString(firstFileRecord, "location", null);
|
||||
if (location != null) {
|
||||
@ -59,7 +59,7 @@ public class FixedWidthImporter extends TabularImportingParserBase {
|
||||
String fileSource,
|
||||
Reader reader,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
final int[] columnWidths = JSONUtilities.getIntArray(options, "columnWidths");
|
||||
|
@ -43,8 +43,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Column;
|
||||
@ -197,9 +196,9 @@ public class ImporterUtilities {
|
||||
}
|
||||
|
||||
static public MultiFileReadingProgress createMultiFileReadingProgress(
|
||||
final ImportingJob job, List<JSONObject> fileRecords) {
|
||||
final ImportingJob job, List<ObjectNode> fileRecords) {
|
||||
long totalSize = 0;
|
||||
for (JSONObject fileRecord : fileRecords) {
|
||||
for (ObjectNode fileRecord : fileRecords) {
|
||||
File file = ImportingUtilities.getFile(job, fileRecord);
|
||||
totalSize += file.length();
|
||||
}
|
||||
|
@ -40,10 +40,10 @@ import java.io.Reader;
|
||||
import java.util.List;
|
||||
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importers.ImporterUtilities.MultiFileReadingProgress;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingParser;
|
||||
@ -53,6 +53,7 @@ import com.google.refine.model.ModelException;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
abstract public class ImportingParserBase implements ImportingParser {
|
||||
final static Logger logger = LoggerFactory.getLogger("ImportingParserBase");
|
||||
@ -67,9 +68,9 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(ImportingJob job,
|
||||
List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = new JSONObject();
|
||||
public ObjectNode createParserUIInitializationData(ImportingJob job,
|
||||
List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(options, "includeFileSources", fileRecords.size() > 1);
|
||||
|
||||
return options;
|
||||
@ -77,10 +78,10 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
|
||||
@Override
|
||||
public void parse(Project project, ProjectMetadata metadata,
|
||||
final ImportingJob job, List<JSONObject> fileRecords, String format,
|
||||
int limit, JSONObject options, List<Exception> exceptions) {
|
||||
final ImportingJob job, List<ObjectNode> fileRecords, String format,
|
||||
int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
MultiFileReadingProgress progress = ImporterUtilities.createMultiFileReadingProgress(job, fileRecords);
|
||||
for (JSONObject fileRecord : fileRecords) {
|
||||
for (ObjectNode fileRecord : fileRecords) {
|
||||
if (job.canceled) {
|
||||
break;
|
||||
}
|
||||
@ -101,9 +102,9 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
Project project,
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
JSONObject fileRecord,
|
||||
ObjectNode fileRecord,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions,
|
||||
final MultiFileReadingProgress progress
|
||||
) throws IOException {
|
||||
@ -142,13 +143,13 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
String fileSource,
|
||||
Reader reader,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
pushImportingOptions(metadata, fileSource, options);
|
||||
}
|
||||
|
||||
private void pushImportingOptions(ProjectMetadata metadata, String fileSource, JSONObject options) {
|
||||
private void pushImportingOptions(ProjectMetadata metadata, String fileSource, ObjectNode options) {
|
||||
try {
|
||||
options.put("fileSource", fileSource);
|
||||
} catch (JSONException e) {
|
||||
@ -165,7 +166,7 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
String fileSource,
|
||||
InputStream inputStream,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
pushImportingOptions(metadata, fileSource, options);
|
||||
|
@ -39,8 +39,6 @@ import java.io.InputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -49,7 +47,13 @@ import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonParser.NumberType;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.BooleanNode;
|
||||
import com.fasterxml.jackson.databind.node.DoubleNode;
|
||||
import com.fasterxml.jackson.databind.node.LongNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
import com.google.refine.importers.tree.ImportColumnGroup;
|
||||
import com.google.refine.importers.tree.TreeImportingParserBase;
|
||||
import com.google.refine.importers.tree.TreeReader;
|
||||
@ -59,6 +63,7 @@ import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
public class JsonImporter extends TreeImportingParserBase {
|
||||
static final Logger logger = LoggerFactory.getLogger(JsonImporter.class);
|
||||
@ -76,18 +81,18 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
final static private int PREVIEW_PARSING_LIMIT = 1000;
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(ImportingJob job,
|
||||
List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
if (fileRecords.size() > 0) {
|
||||
try {
|
||||
JSONObject firstFileRecord = fileRecords.get(0);
|
||||
ObjectNode firstFileRecord = fileRecords.get(0);
|
||||
File file = ImportingUtilities.getFile(job, firstFileRecord);
|
||||
JsonFactory factory = new JsonFactory();
|
||||
JsonParser parser = factory.createJsonParser(file);
|
||||
|
||||
PreviewParsingState state = new PreviewParsingState();
|
||||
Object rootValue = parseForPreview(parser, state);
|
||||
JsonNode rootValue = parseForPreview(parser, state);
|
||||
if (rootValue != null) {
|
||||
JSONUtilities.safePut(options, "dom", rootValue);
|
||||
}
|
||||
@ -99,7 +104,7 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
return options;
|
||||
}
|
||||
|
||||
final static private Object parseForPreview(JsonParser parser, PreviewParsingState state, JsonToken token)
|
||||
final static private JsonNode parseForPreview(JsonParser parser, PreviewParsingState state, JsonToken token)
|
||||
throws JsonParseException, IOException {
|
||||
if (token != null) {
|
||||
switch (token) {
|
||||
@ -108,15 +113,15 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
case START_OBJECT:
|
||||
return parseObjectForPreview(parser, state);
|
||||
case VALUE_STRING:
|
||||
return parser.getText();
|
||||
return new TextNode(parser.getText());
|
||||
case VALUE_NUMBER_INT:
|
||||
return Long.valueOf(parser.getLongValue());
|
||||
return new LongNode(parser.getLongValue());
|
||||
case VALUE_NUMBER_FLOAT:
|
||||
return Double.valueOf(parser.getDoubleValue());
|
||||
return new DoubleNode(parser.getDoubleValue());
|
||||
case VALUE_TRUE:
|
||||
return Boolean.TRUE;
|
||||
return BooleanNode.getTrue();
|
||||
case VALUE_FALSE:
|
||||
return Boolean.FALSE;
|
||||
return BooleanNode.getFalse();
|
||||
case VALUE_NULL:
|
||||
return null;
|
||||
case END_ARRAY:
|
||||
@ -131,7 +136,7 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
return null;
|
||||
}
|
||||
|
||||
final static private Object parseForPreview(JsonParser parser, PreviewParsingState state) {
|
||||
final static private JsonNode parseForPreview(JsonParser parser, PreviewParsingState state) {
|
||||
try {
|
||||
JsonToken token = parser.nextToken();
|
||||
state.tokenCount++;
|
||||
@ -141,8 +146,8 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
}
|
||||
}
|
||||
|
||||
final static private JSONObject parseObjectForPreview(JsonParser parser, PreviewParsingState state) {
|
||||
JSONObject result = new JSONObject();
|
||||
final static private ObjectNode parseObjectForPreview(JsonParser parser, PreviewParsingState state) {
|
||||
ObjectNode result = ParsingUtilities.mapper.createObjectNode();
|
||||
loop:while (state.tokenCount < PREVIEW_PARSING_LIMIT) {
|
||||
try {
|
||||
JsonToken token = parser.nextToken();
|
||||
@ -154,7 +159,7 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
switch (token) {
|
||||
case FIELD_NAME:
|
||||
String fieldName = parser.getText();
|
||||
Object fieldValue = parseForPreview(parser, state);
|
||||
JsonNode fieldValue = parseForPreview(parser, state);
|
||||
JSONUtilities.safePut(result, fieldName, fieldValue);
|
||||
break;
|
||||
case END_OBJECT:
|
||||
@ -169,8 +174,8 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
return result;
|
||||
}
|
||||
|
||||
final static private JSONArray parseArrayForPreview(JsonParser parser, PreviewParsingState state) {
|
||||
JSONArray result = new JSONArray();
|
||||
final static private ArrayNode parseArrayForPreview(JsonParser parser, PreviewParsingState state) {
|
||||
ArrayNode result = ParsingUtilities.mapper.createArrayNode();
|
||||
loop:while (state.tokenCount < PREVIEW_PARSING_LIMIT) {
|
||||
try {
|
||||
JsonToken token = parser.nextToken();
|
||||
@ -183,8 +188,8 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
case END_ARRAY:
|
||||
break loop;
|
||||
default:
|
||||
Object element = parseForPreview(parser, state, token);
|
||||
JSONUtilities.append(result, element);
|
||||
JsonNode element = parseForPreview(parser, state, token);
|
||||
result.add(element);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
break;
|
||||
@ -196,7 +201,7 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
@Override
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata,
|
||||
ImportingJob job, String fileSource, InputStream is,
|
||||
ImportColumnGroup rootColumnGroup, int limit, JSONObject options, List<Exception> exceptions) {
|
||||
ImportColumnGroup rootColumnGroup, int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
|
||||
parseOneFile(project, metadata, job, fileSource,
|
||||
new JSONTreeReader(is), rootColumnGroup, limit, options, exceptions);
|
||||
|
@ -6,10 +6,10 @@ import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
@ -23,9 +23,9 @@ public class LineBasedImporter extends TabularImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
JSONUtilities.safePut(options, "linesPerRow", 1);
|
||||
JSONUtilities.safePut(options, "headerLines", 0);
|
||||
@ -42,7 +42,7 @@ public class LineBasedImporter extends TabularImportingParserBase {
|
||||
String fileSource,
|
||||
Reader reader,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
final int linesPerRow = JSONUtilities.getInt(options, "linesPerRow", 1);
|
||||
|
@ -40,12 +40,12 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.marc4j.MarcPermissiveStreamReader;
|
||||
import org.marc4j.MarcWriter;
|
||||
import org.marc4j.MarcXmlWriter;
|
||||
import org.marc4j.marc.Record;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
@ -57,9 +57,9 @@ public class MarcImporter extends XmlImporter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(ImportingJob job, java.util.List<JSONObject> fileRecords, String format) {
|
||||
public ObjectNode createParserUIInitializationData(ImportingJob job, java.util.List<ObjectNode> fileRecords, String format) {
|
||||
if (fileRecords.size() > 0) {
|
||||
JSONObject firstFileRecord = fileRecords.get(0);
|
||||
ObjectNode firstFileRecord = fileRecords.get(0);
|
||||
File file = ImportingUtilities.getFile(job, firstFileRecord);
|
||||
File tempFile = new File(file.getAbsolutePath()+".xml");
|
||||
|
||||
@ -97,7 +97,7 @@ public class MarcImporter extends XmlImporter {
|
||||
logger.error("Failed to create temporary XML file from MARC file", e);
|
||||
}
|
||||
}
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
return options;
|
||||
};
|
||||
|
||||
|
@ -45,9 +45,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.odftoolkit.odfdom.doc.OdfDocument;
|
||||
import org.odftoolkit.odfdom.doc.table.OdfTable;
|
||||
import org.odftoolkit.odfdom.doc.table.OdfTableCell;
|
||||
@ -55,6 +53,8 @@ import org.odftoolkit.odfdom.doc.table.OdfTableRow;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Cell;
|
||||
@ -64,6 +64,7 @@ import com.google.refine.model.Recon.Judgment;
|
||||
import com.google.refine.model.ReconCandidate;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
|
||||
public class OdsImporter extends TabularImportingParserBase {
|
||||
@ -75,16 +76,16 @@ public class OdsImporter extends TabularImportingParserBase {
|
||||
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
JSONArray sheetRecords = new JSONArray();
|
||||
ArrayNode sheetRecords = ParsingUtilities.mapper.createArrayNode();
|
||||
JSONUtilities.safePut(options, "sheetRecords", sheetRecords);
|
||||
OdfDocument odfDoc = null;
|
||||
try {
|
||||
for (int index = 0;index < fileRecords.size();index++) {
|
||||
JSONObject fileRecord = fileRecords.get(index);
|
||||
ObjectNode fileRecord = fileRecords.get(index);
|
||||
File file = ImportingUtilities.getFile(job, fileRecord);
|
||||
InputStream is = new FileInputStream(file);
|
||||
odfDoc = OdfDocument.loadDocument(is);
|
||||
@ -95,7 +96,7 @@ public class OdsImporter extends TabularImportingParserBase {
|
||||
OdfTable sheet = tables.get(i);
|
||||
int rows = sheet.getRowCount();
|
||||
|
||||
JSONObject sheetRecord = new JSONObject();
|
||||
ObjectNode sheetRecord = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(sheetRecord, "name", file.getName() + "#" + sheet.getTableName());
|
||||
JSONUtilities.safePut(sheetRecord, "fileNameAndSheetIndex", file.getName() + "#" + i);
|
||||
JSONUtilities.safePut(sheetRecord, "rows", rows);
|
||||
@ -129,7 +130,7 @@ public class OdsImporter extends TabularImportingParserBase {
|
||||
String fileSource,
|
||||
InputStream inputStream,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
OdfDocument odfDoc;
|
||||
@ -142,13 +143,13 @@ public class OdsImporter extends TabularImportingParserBase {
|
||||
|
||||
List<OdfTable> tables = odfDoc.getTableList();
|
||||
|
||||
JSONArray sheets = JSONUtilities.getArray(options, "sheets");
|
||||
for(int i=0;i<sheets.length();i++) {
|
||||
ArrayNode sheets = JSONUtilities.getArray(options, "sheets");
|
||||
for(int i=0;i<sheets.size();i++) {
|
||||
String[] fileNameAndSheetIndex = new String[2];
|
||||
try {
|
||||
JSONObject sheetObj = sheets.getJSONObject(i);
|
||||
ObjectNode sheetObj = JSONUtilities.getObjectElement(sheets, i);
|
||||
// value is fileName#sheetIndex
|
||||
fileNameAndSheetIndex = sheetObj.getString("fileNameAndSheetIndex").split("#");
|
||||
fileNameAndSheetIndex = sheetObj.get("fileNameAndSheetIndex").asText().split("#");
|
||||
} catch (JSONException e) {
|
||||
logger.error(ExceptionUtils.getStackTrace(e));
|
||||
}
|
||||
|
@ -44,8 +44,8 @@ import org.apache.jena.rdf.model.Model;
|
||||
import org.apache.jena.rdf.model.ModelFactory;
|
||||
import org.apache.jena.rdf.model.Statement;
|
||||
import org.apache.jena.rdf.model.StmtIterator;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.model.Cell;
|
||||
@ -76,7 +76,7 @@ public class RdfTripleImporter extends ImportingParserBase {
|
||||
}
|
||||
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata, ImportingJob job, String fileSource,
|
||||
InputStream input, int limit, JSONObject options, List<Exception> exceptions) {
|
||||
InputStream input, int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
// create an empty model
|
||||
Model model = ModelFactory.createDefaultModel();
|
||||
|
||||
|
@ -50,8 +50,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Project;
|
||||
@ -66,9 +66,9 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(ImportingJob job,
|
||||
List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(ImportingJob job,
|
||||
List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
String separator = guessSeparator(job, fileRecords);
|
||||
JSONUtilities.safePut(options, "separator", separator != null ? separator : "\\t");
|
||||
@ -88,7 +88,7 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
String fileSource,
|
||||
Reader reader,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
String sep = JSONUtilities.getString(options, "separator", "\\t");
|
||||
@ -119,6 +119,7 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final List<Object> columnNames = retrievedColumnNames;
|
||||
|
||||
Character quote = CSVParser.DEFAULT_QUOTE_CHARACTER;
|
||||
@ -144,14 +145,14 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
if (columnNames != null && !usedColumnNames) {
|
||||
usedColumnNames = true;
|
||||
return columnNames;
|
||||
} else {
|
||||
String line = lnReader.readLine();
|
||||
if (line == null) {
|
||||
return null;
|
||||
} else {
|
||||
return getCells(line, parser, lnReader);
|
||||
String line = lnReader.readLine();
|
||||
if (line == null) {
|
||||
return null;
|
||||
} else {
|
||||
return getCells(line, parser, lnReader);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -172,9 +173,9 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
return cells;
|
||||
}
|
||||
|
||||
static public String guessSeparator(ImportingJob job, List<JSONObject> fileRecords) {
|
||||
static public String guessSeparator(ImportingJob job, List<ObjectNode> fileRecords) {
|
||||
for (int i = 0; i < 5 && i < fileRecords.size(); i++) {
|
||||
JSONObject fileRecord = fileRecords.get(i);
|
||||
ObjectNode fileRecord = fileRecords.get(i);
|
||||
String encoding = ImportingUtilities.getEncoding(fileRecord);
|
||||
String location = JSONUtilities.getString(fileRecord, "location", null);
|
||||
|
||||
|
@ -39,8 +39,7 @@ import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.model.Cell;
|
||||
@ -56,9 +55,9 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(ImportingJob job,
|
||||
List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(ImportingJob job,
|
||||
List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
JSONUtilities.safePut(options, "ignoreLines", -1); // number of blank lines at the beginning to ignore
|
||||
JSONUtilities.safePut(options, "headerLines", 1); // number of header lines
|
||||
@ -85,7 +84,7 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
TableDataReader reader,
|
||||
String fileSource,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
int ignoreLines = JSONUtilities.getInt(options, "ignoreLines", -1);
|
||||
@ -203,7 +202,7 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
}
|
||||
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata, ImportingJob job, String fileSource,
|
||||
Reader dataReader, int limit, JSONObject options, List<Exception> exceptions) {
|
||||
Reader dataReader, int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
super.parseOneFile(project, metadata, job, fileSource, dataReader, limit, options, exceptions);
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,6 @@ import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.sweble.wikitext.parser.ParserConfig;
|
||||
import org.sweble.wikitext.parser.WikitextEncodingValidator;
|
||||
import org.sweble.wikitext.parser.WikitextParser;
|
||||
@ -51,8 +50,8 @@ import org.sweble.wikitext.parser.parser.PreprocessorToParserTransformer;
|
||||
import org.sweble.wikitext.parser.preprocessor.PreprocessedWikitext;
|
||||
import org.sweble.wikitext.parser.utils.SimpleParserConfig;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.common.io.CharStreams;
|
||||
|
||||
import com.google.refine.importing.ImportingJob;
|
||||
import com.google.refine.model.Cell;
|
||||
import com.google.refine.model.Column;
|
||||
@ -77,9 +76,9 @@ public class WikitextImporter extends TabularImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
JSONUtilities.safePut(options, "guessCellValueTypes", false);
|
||||
JSONUtilities.safePut(options, "blankSpanningCells", true);
|
||||
@ -668,7 +667,7 @@ public class WikitextImporter extends TabularImportingParserBase {
|
||||
String fileSource,
|
||||
Reader reader,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
// Set-up a simple wiki configuration
|
||||
|
@ -46,11 +46,11 @@ import javax.xml.stream.XMLStreamConstants;
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamReader;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importers.tree.ImportColumnGroup;
|
||||
import com.google.refine.importers.tree.TreeImportingParserBase;
|
||||
import com.google.refine.importers.tree.TreeReader;
|
||||
@ -60,6 +60,7 @@ import com.google.refine.importing.ImportingUtilities;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
public class XmlImporter extends TreeImportingParserBase {
|
||||
static final Logger logger = LoggerFactory.getLogger(XmlImporter.class);
|
||||
@ -75,12 +76,12 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
final static private int PREVIEW_PARSING_LIMIT = 1000;
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
try {
|
||||
if (fileRecords.size() > 0) {
|
||||
JSONObject firstFileRecord = fileRecords.get(0);
|
||||
ObjectNode firstFileRecord = fileRecords.get(0);
|
||||
File file = ImportingUtilities.getFile(job, firstFileRecord);
|
||||
InputStream is = new FileInputStream(file);
|
||||
|
||||
@ -92,7 +93,7 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
int tokenType = parser.next();
|
||||
state.tokenCount++;
|
||||
if (tokenType == XMLStreamConstants.START_ELEMENT) {
|
||||
JSONObject rootElement = descendElement(parser, state);
|
||||
ObjectNode rootElement = descendElement(parser, state);
|
||||
if (rootElement != null) {
|
||||
JSONUtilities.safePut(options, "dom", rootElement);
|
||||
break;
|
||||
@ -114,8 +115,8 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
return options;
|
||||
}
|
||||
|
||||
final static private JSONObject descendElement(XMLStreamReader parser, PreviewParsingState state) {
|
||||
JSONObject result = new JSONObject();
|
||||
final static private ObjectNode descendElement(XMLStreamReader parser, PreviewParsingState state) {
|
||||
ObjectNode result = ParsingUtilities.mapper.createObjectNode();
|
||||
{
|
||||
String name = parser.getLocalName();
|
||||
JSONUtilities.safePut(result, "n", name);
|
||||
@ -132,12 +133,11 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
|
||||
int namespaceCount = parser.getNamespaceCount();
|
||||
if (namespaceCount > 0) {
|
||||
JSONArray namespaces = new JSONArray();
|
||||
JSONUtilities.safePut(result, "ns", namespaces);
|
||||
ArrayNode namespaces = result.putArray("ns");
|
||||
|
||||
for (int i = 0; i < namespaceCount; i++) {
|
||||
JSONObject namespace = new JSONObject();
|
||||
JSONUtilities.append(namespaces, namespace);
|
||||
ObjectNode namespace = ParsingUtilities.mapper.createObjectNode();
|
||||
namespaces.add(namespace);
|
||||
JSONUtilities.safePut(namespace, "p", parser.getNamespacePrefix(i));
|
||||
JSONUtilities.safePut(namespace, "uri", parser.getNamespaceURI(i));
|
||||
}
|
||||
@ -145,12 +145,11 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
|
||||
int attributeCount = parser.getAttributeCount();
|
||||
if (attributeCount > 0) {
|
||||
JSONArray attributes = new JSONArray();
|
||||
JSONUtilities.safePut(result, "a", attributes);
|
||||
ArrayNode attributes = result.putArray("a");
|
||||
|
||||
for (int i = 0; i < attributeCount; i++) {
|
||||
JSONObject attribute = new JSONObject();
|
||||
JSONUtilities.append(attributes, attribute);
|
||||
ObjectNode attribute = ParsingUtilities.mapper.createObjectNode();
|
||||
attributes.add(attribute);
|
||||
JSONUtilities.safePut(attribute, "n", parser.getAttributeLocalName(i));
|
||||
JSONUtilities.safePut(attribute, "v", parser.getAttributeValue(i));
|
||||
String prefix = parser.getAttributePrefix(i);
|
||||
@ -160,7 +159,7 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
}
|
||||
}
|
||||
|
||||
JSONArray children = new JSONArray();
|
||||
ArrayNode children = ParsingUtilities.mapper.createArrayNode();
|
||||
try {
|
||||
while (parser.hasNext() && state.tokenCount < PREVIEW_PARSING_LIMIT) {
|
||||
int tokenType = parser.next();
|
||||
@ -168,16 +167,16 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
if (tokenType == XMLStreamConstants.END_ELEMENT) {
|
||||
break;
|
||||
} else if (tokenType == XMLStreamConstants.START_ELEMENT) {
|
||||
JSONObject childElement = descendElement(parser, state);
|
||||
ObjectNode childElement = descendElement(parser, state);
|
||||
if (childElement != null) {
|
||||
JSONUtilities.append(children, childElement);
|
||||
children.add(childElement);
|
||||
}
|
||||
} else if (tokenType == XMLStreamConstants.CHARACTERS ||
|
||||
tokenType == XMLStreamConstants.CDATA ||
|
||||
tokenType == XMLStreamConstants.SPACE) {
|
||||
JSONObject childElement = new JSONObject();
|
||||
ObjectNode childElement = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(childElement, "t", parser.getText());
|
||||
JSONUtilities.append(children, childElement);
|
||||
children.add(childElement);
|
||||
} else {
|
||||
// ignore everything else
|
||||
}
|
||||
@ -186,8 +185,8 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
logger.error("Error generating parser UI initialization data for XML file", e);
|
||||
}
|
||||
|
||||
if (children.length() > 0) {
|
||||
JSONUtilities.safePut(result, "c", children);
|
||||
if (children.size() > 0) {
|
||||
result.put("c", children);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@ -195,7 +194,7 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
@Override
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata,
|
||||
ImportingJob job, String fileSource, InputStream inputStream,
|
||||
ImportColumnGroup rootColumnGroup, int limit, JSONObject options,
|
||||
ImportColumnGroup rootColumnGroup, int limit, ObjectNode options,
|
||||
List<Exception> exceptions) {
|
||||
|
||||
try {
|
||||
|
@ -40,8 +40,8 @@ import java.io.Reader;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.importers.ImporterUtilities;
|
||||
import com.google.refine.importers.ImporterUtilities.MultiFileReadingProgress;
|
||||
import com.google.refine.importers.ImportingParserBase;
|
||||
@ -62,9 +62,9 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
}
|
||||
|
||||
@Override
|
||||
public JSONObject createParserUIInitializationData(ImportingJob job,
|
||||
List<JSONObject> fileRecords, String format) {
|
||||
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
public ObjectNode createParserUIInitializationData(ImportingJob job,
|
||||
List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = super.createParserUIInitializationData(job, fileRecords, format);
|
||||
|
||||
JSONUtilities.safePut(options, "trimStrings", false);
|
||||
JSONUtilities.safePut(options, "guessCellValueTypes", false);
|
||||
@ -75,13 +75,13 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
|
||||
@Override
|
||||
public void parse(Project project, ProjectMetadata metadata,
|
||||
ImportingJob job, List<JSONObject> fileRecords, String format,
|
||||
int limit, JSONObject options, List<Exception> exceptions) {
|
||||
ImportingJob job, List<ObjectNode> fileRecords, String format,
|
||||
int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
|
||||
MultiFileReadingProgress progress = ImporterUtilities.createMultiFileReadingProgress(job, fileRecords);
|
||||
ImportColumnGroup rootColumnGroup = new ImportColumnGroup();
|
||||
|
||||
for (JSONObject fileRecord : fileRecords) {
|
||||
for (ObjectNode fileRecord : fileRecords) {
|
||||
try {
|
||||
parseOneFile(project, metadata, job, fileRecord, rootColumnGroup, limit, options, exceptions, progress);
|
||||
} catch (IOException e) {
|
||||
@ -102,10 +102,10 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
Project project,
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
JSONObject fileRecord,
|
||||
ObjectNode fileRecord,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions,
|
||||
final MultiFileReadingProgress progress
|
||||
) throws IOException {
|
||||
@ -151,7 +151,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
Reader reader,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
throw new NotImplementedException("project ID:" + project.id);
|
||||
@ -171,7 +171,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
InputStream inputStream,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
// throw new NotImplementedException();
|
||||
@ -190,7 +190,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
TreeReader treeParser,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
String[] recordPath = JSONUtilities.getStringArray(options, "recordPath");
|
||||
|
@ -45,13 +45,12 @@ import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.RefineServlet;
|
||||
import com.google.refine.commands.HttpUtilities;
|
||||
import com.google.refine.importing.ImportingManager.Format;
|
||||
@ -112,8 +111,8 @@ public class DefaultImportingController implements ImportingController {
|
||||
|
||||
job.updating = true;
|
||||
try {
|
||||
JSONObject config = job.getOrCreateDefaultConfig();
|
||||
if (!("new".equals(config.getString("state")))) {
|
||||
ObjectNode config = job.getOrCreateDefaultConfig();
|
||||
if (!("new".equals(JSONUtilities.getString(config, "state", null)))) {
|
||||
HttpUtilities.respond(response, "error", "Job already started; cannot load more data");
|
||||
return;
|
||||
}
|
||||
@ -140,13 +139,13 @@ public class DefaultImportingController implements ImportingController {
|
||||
|
||||
job.updating = true;
|
||||
try {
|
||||
JSONObject config = job.getOrCreateDefaultConfig();
|
||||
if (!("ready".equals(config.getString("state")))) {
|
||||
ObjectNode config = job.getOrCreateDefaultConfig();
|
||||
if (!("ready".equals(JSONUtilities.getString(config, "state", null)))) {
|
||||
HttpUtilities.respond(response, "error", "Job not ready");
|
||||
return;
|
||||
}
|
||||
|
||||
JSONArray fileSelectionArray = ParsingUtilities.evaluateJsonStringToArray(
|
||||
ArrayNode fileSelectionArray = ParsingUtilities.evaluateJsonStringToArrayNode(
|
||||
request.getParameter("fileSelection"));
|
||||
|
||||
ImportingUtilities.updateJobWithNewFileSelection(job, fileSelectionArray);
|
||||
@ -172,14 +171,14 @@ public class DefaultImportingController implements ImportingController {
|
||||
|
||||
job.updating = true;
|
||||
try {
|
||||
JSONObject config = job.getOrCreateDefaultConfig();
|
||||
if (!("ready".equals(config.getString("state")))) {
|
||||
ObjectNode config = job.getOrCreateDefaultConfig();
|
||||
if (!("ready".equals(JSONUtilities.getString(config, "state", null)))) {
|
||||
HttpUtilities.respond(response, "error", "Job not ready");
|
||||
return;
|
||||
}
|
||||
|
||||
String format = request.getParameter("format");
|
||||
JSONObject optionObj = ParsingUtilities.evaluateJsonStringToObject(
|
||||
ObjectNode optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(
|
||||
request.getParameter("options"));
|
||||
|
||||
List<Exception> exceptions = new LinkedList<Exception>();
|
||||
@ -230,9 +229,9 @@ public class DefaultImportingController implements ImportingController {
|
||||
String format = request.getParameter("format");
|
||||
Format formatRecord = ImportingManager.formatToRecord.get(format);
|
||||
if (formatRecord != null && formatRecord.parser != null) {
|
||||
JSONObject options = formatRecord.parser.createParserUIInitializationData(
|
||||
ObjectNode options = formatRecord.parser.createParserUIInitializationData(
|
||||
job, job.getSelectedFileRecords(), format);
|
||||
JSONObject result = new JSONObject();
|
||||
ObjectNode result = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(result, "status", "ok");
|
||||
JSONUtilities.safePut(result, "options", options);
|
||||
|
||||
@ -255,14 +254,14 @@ public class DefaultImportingController implements ImportingController {
|
||||
job.updating = true;
|
||||
job.touch();
|
||||
try {
|
||||
JSONObject config = job.getOrCreateDefaultConfig();
|
||||
if (!("ready".equals(config.getString("state")))) {
|
||||
ObjectNode config = job.getOrCreateDefaultConfig();
|
||||
if (!("ready".equals(JSONUtilities.getString(config, "state", null)))) {
|
||||
HttpUtilities.respond(response, "error", "Job not ready");
|
||||
return;
|
||||
}
|
||||
|
||||
String format = request.getParameter("format");
|
||||
JSONObject optionObj = ParsingUtilities.evaluateJsonStringToObject(
|
||||
ObjectNode optionObj = ParsingUtilities.evaluateJsonStringToObjectNode(
|
||||
request.getParameter("options"));
|
||||
|
||||
List<Exception> exceptions = new LinkedList<Exception>();
|
||||
@ -317,13 +316,13 @@ public class DefaultImportingController implements ImportingController {
|
||||
}
|
||||
}
|
||||
|
||||
static public JSONArray convertErrorsToJsonArray(List<Exception> exceptions) {
|
||||
JSONArray a = new JSONArray();
|
||||
static public ArrayNode convertErrorsToJsonArray(List<Exception> exceptions) {
|
||||
ArrayNode a = ParsingUtilities.mapper.createArrayNode();
|
||||
for (Exception e : exceptions) {
|
||||
StringWriter sw = new StringWriter();
|
||||
e.printStackTrace(new PrintWriter(sw));
|
||||
|
||||
JSONObject o = new JSONObject();
|
||||
ObjectNode o = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(o, "message", e.getLocalizedMessage());
|
||||
JSONUtilities.safePut(o, "stack", sw.toString());
|
||||
JSONUtilities.append(a, o);
|
||||
|
@ -39,24 +39,24 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonRawValue;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
|
||||
public class ImportingJob {
|
||||
final public long id;
|
||||
final public File dir; // Temporary directory where the data about this job is stored
|
||||
|
||||
private JSONObject config;
|
||||
private ObjectNode config;
|
||||
|
||||
public Project project;
|
||||
public ProjectMetadata metadata;
|
||||
@ -71,7 +71,7 @@ public class ImportingJob {
|
||||
this.id = id;
|
||||
this.dir = dir;
|
||||
|
||||
JSONObject cfg = new JSONObject();
|
||||
ObjectNode cfg = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(cfg, "state", "new");
|
||||
JSONUtilities.safePut(cfg, "hasData", false);
|
||||
this.config = cfg;
|
||||
@ -88,7 +88,7 @@ public class ImportingJob {
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public JSONObject getOrCreateDefaultConfig() {
|
||||
public ObjectNode getOrCreateDefaultConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
@ -114,9 +114,9 @@ public class ImportingJob {
|
||||
|
||||
public void setProgress(int percent, String message) {
|
||||
synchronized (config) {
|
||||
JSONObject progress = JSONUtilities.getObject(config, "progress");
|
||||
ObjectNode progress = JSONUtilities.getObject(config, "progress");
|
||||
if (progress == null) {
|
||||
progress = new JSONObject();
|
||||
progress = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(config, "progress", progress);
|
||||
}
|
||||
JSONUtilities.safePut(progress, "message", message);
|
||||
@ -126,13 +126,13 @@ public class ImportingJob {
|
||||
}
|
||||
}
|
||||
|
||||
public void setFileSelection(JSONArray fileSelectionArray) {
|
||||
public void setFileSelection(ArrayNode fileSelectionArray) {
|
||||
synchronized (config) {
|
||||
JSONUtilities.safePut(config, "fileSelection", fileSelectionArray);
|
||||
}
|
||||
}
|
||||
|
||||
public void setRankedFormats(JSONArray rankedFormats) {
|
||||
public void setRankedFormats(ArrayNode rankedFormats) {
|
||||
synchronized (config) {
|
||||
JSONUtilities.safePut(config, "rankedFormats", rankedFormats);
|
||||
}
|
||||
@ -140,7 +140,7 @@ public class ImportingJob {
|
||||
|
||||
|
||||
@JsonIgnore
|
||||
public JSONObject getRetrievalRecord() {
|
||||
public ObjectNode getRetrievalRecord() {
|
||||
synchronized(config) {
|
||||
return JSONUtilities.getObject(config,"retrievalRecord");
|
||||
}
|
||||
@ -149,25 +149,25 @@ public class ImportingJob {
|
||||
/**
|
||||
* TO check if the file record is a metadata file entry
|
||||
* @param fileRecordObject
|
||||
* @return JSONObject
|
||||
* @return ObjectNode
|
||||
*/
|
||||
public boolean isMetadataFileRecord(JSONObject fileRecordObject) {
|
||||
public boolean isMetadataFileRecord(ObjectNode fileRecordObject) {
|
||||
return fileRecordObject.has("metaDataFormat");
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public List<JSONObject> getSelectedFileRecords() {
|
||||
List<JSONObject> results = new ArrayList<JSONObject>();
|
||||
public List<ObjectNode> getSelectedFileRecords() {
|
||||
List<ObjectNode> results = new ArrayList<ObjectNode>();
|
||||
|
||||
JSONObject retrievalRecord = JSONUtilities.getObject(config,"retrievalRecord");
|
||||
ObjectNode retrievalRecord = JSONUtilities.getObject(config,"retrievalRecord");
|
||||
if (retrievalRecord != null) {
|
||||
JSONArray fileRecordArray = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
ArrayNode fileRecordArray = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
if (fileRecordArray != null) {
|
||||
JSONArray fileSelectionArray = JSONUtilities.getArray(config,"fileSelection");
|
||||
ArrayNode fileSelectionArray = JSONUtilities.getArray(config,"fileSelection");
|
||||
if (fileSelectionArray != null) {
|
||||
for (int i = 0; i < fileSelectionArray.length(); i++) {
|
||||
for (int i = 0; i < fileSelectionArray.size(); i++) {
|
||||
int index = JSONUtilities.getIntElement(fileSelectionArray, i, -1);
|
||||
if (index >= 0 && index < fileRecordArray.length()) {
|
||||
if (index >= 0 && index < fileRecordArray.size()) {
|
||||
results.add(JSONUtilities.getObjectElement(fileRecordArray, index));
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,6 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.RefineServlet;
|
||||
|
||||
import edu.mit.simile.butterfly.ButterflyModule;
|
||||
|
@ -35,8 +35,7 @@ package com.google.refine.importing;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
|
||||
@ -49,11 +48,11 @@ public interface ImportingParser {
|
||||
* @param job
|
||||
* @param fileRecords
|
||||
* @param format
|
||||
* @return JSONObject options
|
||||
* @return ObjectNode options
|
||||
*/
|
||||
public JSONObject createParserUIInitializationData(
|
||||
public ObjectNode createParserUIInitializationData(
|
||||
ImportingJob job,
|
||||
List<JSONObject> fileRecords,
|
||||
List<ObjectNode> fileRecords,
|
||||
String format
|
||||
);
|
||||
|
||||
@ -77,10 +76,10 @@ public interface ImportingParser {
|
||||
Project project,
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
List<JSONObject> fileRecords,
|
||||
List<ObjectNode> fileRecords,
|
||||
String format,
|
||||
int limit,
|
||||
JSONObject options,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
);
|
||||
}
|
||||
|
@ -83,11 +83,12 @@ import org.apache.http.util.EntityUtils;
|
||||
import org.apache.tools.bzip2.CBZip2InputStream;
|
||||
import org.apache.tools.tar.TarEntry;
|
||||
import org.apache.tools.tar.TarInputStream;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.RefineServlet;
|
||||
import com.google.refine.importing.ImportingManager.Format;
|
||||
@ -105,6 +106,7 @@ import com.google.refine.model.metadata.PackageExtension;
|
||||
import com.google.refine.model.metadata.ProjectMetadata;
|
||||
import com.google.refine.preference.PreferenceStore;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
import io.frictionlessdata.datapackage.Package;
|
||||
import io.frictionlessdata.tableschema.Field;
|
||||
@ -129,13 +131,13 @@ public class ImportingUtilities {
|
||||
HttpServletResponse response,
|
||||
Properties parameters,
|
||||
final ImportingJob job,
|
||||
JSONObject config) throws IOException, ServletException {
|
||||
ObjectNode config) throws IOException, ServletException {
|
||||
|
||||
JSONObject retrievalRecord = new JSONObject();
|
||||
ObjectNode retrievalRecord = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(config, "retrievalRecord", retrievalRecord);
|
||||
JSONUtilities.safePut(config, "state", "loading-raw-data");
|
||||
|
||||
final JSONObject progress = new JSONObject();
|
||||
final ObjectNode progress = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(config, "progress", progress);
|
||||
try {
|
||||
ImportingUtilities.retrieveContentFromPostRequest(
|
||||
@ -164,13 +166,13 @@ public class ImportingUtilities {
|
||||
return;
|
||||
}
|
||||
|
||||
JSONArray fileSelectionIndexes = new JSONArray();
|
||||
ArrayNode fileSelectionIndexes = ParsingUtilities.mapper.createArrayNode();
|
||||
JSONUtilities.safePut(config, "fileSelection", fileSelectionIndexes);
|
||||
|
||||
String bestFormat = ImportingUtilities.autoSelectFiles(job, retrievalRecord, fileSelectionIndexes);
|
||||
bestFormat = ImportingUtilities.guessBetterFormat(job, bestFormat);
|
||||
|
||||
JSONArray rankedFormats = new JSONArray();
|
||||
ArrayNode rankedFormats = ParsingUtilities.mapper.createArrayNode();
|
||||
ImportingUtilities.rankFormats(job, bestFormat, rankedFormats);
|
||||
JSONUtilities.safePut(config, "rankedFormats", rankedFormats);
|
||||
|
||||
@ -179,13 +181,13 @@ public class ImportingUtilities {
|
||||
config.remove("progress");
|
||||
}
|
||||
|
||||
static public void updateJobWithNewFileSelection(ImportingJob job, JSONArray fileSelectionArray) {
|
||||
static public void updateJobWithNewFileSelection(ImportingJob job, ArrayNode fileSelectionArray) {
|
||||
job.setFileSelection(fileSelectionArray);
|
||||
|
||||
String bestFormat = ImportingUtilities.getCommonFormatForSelectedFiles(job, fileSelectionArray);
|
||||
bestFormat = ImportingUtilities.guessBetterFormat(job, bestFormat);
|
||||
|
||||
JSONArray rankedFormats = new JSONArray();
|
||||
ArrayNode rankedFormats = ParsingUtilities.mapper.createArrayNode();
|
||||
ImportingUtilities.rankFormats(job, bestFormat, rankedFormats);
|
||||
job.setRankedFormats(rankedFormats);
|
||||
}
|
||||
@ -194,10 +196,10 @@ public class ImportingUtilities {
|
||||
HttpServletRequest request,
|
||||
Properties parameters,
|
||||
File rawDataDir,
|
||||
JSONObject retrievalRecord,
|
||||
ObjectNode retrievalRecord,
|
||||
final Progress progress
|
||||
) throws Exception {
|
||||
JSONArray fileRecords = new JSONArray();
|
||||
ArrayNode fileRecords = ParsingUtilities.mapper.createArrayNode();
|
||||
JSONUtilities.safePut(retrievalRecord, "files", fileRecords);
|
||||
JSONUtilities.safePut(retrievalRecord, "downloadCount", 0);
|
||||
JSONUtilities.safePut(retrievalRecord, "archiveCount", 0);
|
||||
@ -264,7 +266,7 @@ public class ImportingUtilities {
|
||||
|
||||
File file = allocateFile(rawDataDir, "clipboard.txt");
|
||||
|
||||
JSONObject fileRecord = new JSONObject();
|
||||
ObjectNode fileRecord = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(fileRecord, "origin", "clipboard");
|
||||
JSONUtilities.safePut(fileRecord, "declaredEncoding", encoding);
|
||||
JSONUtilities.safePut(fileRecord, "declaredMimeType", (String) null);
|
||||
@ -310,7 +312,7 @@ public class ImportingUtilities {
|
||||
|
||||
File file = allocateFile(rawDataDir, fileName);
|
||||
|
||||
JSONObject fileRecord = new JSONObject();
|
||||
ObjectNode fileRecord = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(fileRecord, "origin", "upload");
|
||||
JSONUtilities.safePut(fileRecord, "declaredEncoding", request.getCharacterEncoding());
|
||||
JSONUtilities.safePut(fileRecord, "declaredMimeType", fileItem.getContentType());
|
||||
@ -344,19 +346,19 @@ public class ImportingUtilities {
|
||||
JSONUtilities.safePut(retrievalRecord, "clipboardCount", clipboardCount);
|
||||
}
|
||||
|
||||
private static void processDataPackage(JSONObject retrievalRecord, JSONArray fileRecords) {
|
||||
private static void processDataPackage(ObjectNode retrievalRecord, ArrayNode fileRecords) {
|
||||
int dataPackageJSONFileIndex = getDataPackageJSONFile(fileRecords);
|
||||
if (dataPackageJSONFileIndex >= 0) {
|
||||
JSONObject dataPackageJSONFile = (JSONObject) fileRecords.get(dataPackageJSONFileIndex);
|
||||
ObjectNode dataPackageJSONFile = (ObjectNode) fileRecords.get(dataPackageJSONFileIndex);
|
||||
JSONUtilities.safePut(dataPackageJSONFile, "metaDataFormat", MetadataFormat.DATAPACKAGE_METADATA.name());
|
||||
JSONUtilities.safePut(retrievalRecord, METADATA_FILE_KEY, dataPackageJSONFile);
|
||||
fileRecords.remove(dataPackageJSONFileIndex);
|
||||
}
|
||||
}
|
||||
|
||||
private static int getDataPackageJSONFile(JSONArray fileRecords) {
|
||||
for (int i = 0; i < fileRecords.length(); i++) {
|
||||
JSONObject file = fileRecords.getJSONObject(i);
|
||||
private static int getDataPackageJSONFile(ArrayNode fileRecords) {
|
||||
for (int i = 0; i < fileRecords.size(); i++) {
|
||||
ObjectNode file = JSONUtilities.getObjectElement(fileRecords, i);
|
||||
if (file.has("archiveFileName") &&
|
||||
file.has("fileName") &&
|
||||
file.get("fileName").equals(DataPackageMetadata.DEFAULT_FILE_NAME)) {
|
||||
@ -366,8 +368,8 @@ public class ImportingUtilities {
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static void download(File rawDataDir, JSONObject retrievalRecord, final Progress progress,
|
||||
JSONArray fileRecords, final SavingUpdate update, String urlString)
|
||||
private static void download(File rawDataDir, ObjectNode retrievalRecord, final Progress progress,
|
||||
ArrayNode fileRecords, final SavingUpdate update, String urlString)
|
||||
throws URISyntaxException, IOException, ClientProtocolException, Exception {
|
||||
download(rawDataDir, retrievalRecord, progress, fileRecords, update, urlString, null);
|
||||
}
|
||||
@ -384,11 +386,11 @@ public class ImportingUtilities {
|
||||
* @throws ClientProtocolException
|
||||
* @throws Exception
|
||||
*/
|
||||
private static void download(File rawDataDir, JSONObject retrievalRecord, final Progress progress,
|
||||
JSONArray fileRecords, final SavingUpdate update, String urlString, String metaDataFormat)
|
||||
private static void download(File rawDataDir, ObjectNode retrievalRecord, final Progress progress,
|
||||
ArrayNode fileRecords, final SavingUpdate update, String urlString, String metaDataFormat)
|
||||
throws URISyntaxException, IOException, ClientProtocolException, Exception {
|
||||
URL url = new URL(urlString);
|
||||
JSONObject fileRecord = new JSONObject();
|
||||
ObjectNode fileRecord = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(fileRecord, "origin", "download");
|
||||
JSONUtilities.safePut(fileRecord, "url", urlString);
|
||||
|
||||
@ -482,7 +484,7 @@ public class ImportingUtilities {
|
||||
}
|
||||
|
||||
private static boolean saveStream(InputStream stream, URL url, File rawDataDir, final Progress progress,
|
||||
final SavingUpdate update, JSONObject fileRecord, JSONArray fileRecords, long length)
|
||||
final SavingUpdate update, ObjectNode fileRecord, ArrayNode fileRecords, long length)
|
||||
throws IOException, Exception {
|
||||
String localname = url.getPath();
|
||||
if (localname.isEmpty() || localname.endsWith("/")) {
|
||||
@ -538,17 +540,17 @@ public class ImportingUtilities {
|
||||
return file;
|
||||
}
|
||||
|
||||
static public Reader getFileReader(ImportingJob job, JSONObject fileRecord, String commonEncoding)
|
||||
static public Reader getFileReader(ImportingJob job, ObjectNode fileRecord, String commonEncoding)
|
||||
throws FileNotFoundException {
|
||||
|
||||
return getFileReader(getFile(job, JSONUtilities.getString(fileRecord, "location", "")), fileRecord, commonEncoding);
|
||||
}
|
||||
|
||||
static public Reader getFileReader(File file, JSONObject fileRecord, String commonEncoding) throws FileNotFoundException {
|
||||
static public Reader getFileReader(File file, ObjectNode fileRecord, String commonEncoding) throws FileNotFoundException {
|
||||
return getReaderFromStream(new FileInputStream(file), fileRecord, commonEncoding);
|
||||
}
|
||||
|
||||
static public Reader getReaderFromStream(InputStream inputStream, JSONObject fileRecord, String commonEncoding) {
|
||||
static public Reader getReaderFromStream(InputStream inputStream, ObjectNode fileRecord, String commonEncoding) {
|
||||
String encoding = getEncoding(fileRecord);
|
||||
if (encoding == null) {
|
||||
encoding = commonEncoding;
|
||||
@ -563,7 +565,7 @@ public class ImportingUtilities {
|
||||
return new InputStreamReader(inputStream);
|
||||
}
|
||||
|
||||
static public File getFile(ImportingJob job, JSONObject fileRecord) {
|
||||
static public File getFile(ImportingJob job, ObjectNode fileRecord) {
|
||||
return getFile(job, JSONUtilities.getString(fileRecord, "location", ""));
|
||||
}
|
||||
|
||||
@ -571,7 +573,7 @@ public class ImportingUtilities {
|
||||
return new File(job.getRawDataDir(), location);
|
||||
}
|
||||
|
||||
static public String getFileSource(JSONObject fileRecord) {
|
||||
static public String getFileSource(ObjectNode fileRecord) {
|
||||
return JSONUtilities.getString(
|
||||
fileRecord,
|
||||
"url",
|
||||
@ -608,7 +610,7 @@ public class ImportingUtilities {
|
||||
}
|
||||
|
||||
static public boolean postProcessRetrievedFile(
|
||||
File rawDataDir, File file, JSONObject fileRecord, JSONArray fileRecords, final Progress progress) {
|
||||
File rawDataDir, File file, ObjectNode fileRecord, ArrayNode fileRecords, final Progress progress) {
|
||||
|
||||
String mimeType = JSONUtilities.getString(fileRecord, "declaredMimeType", null);
|
||||
String contentEncoding = JSONUtilities.getString(fileRecord, "declaredEncoding", null);
|
||||
@ -654,7 +656,7 @@ public class ImportingUtilities {
|
||||
return false;
|
||||
}
|
||||
|
||||
static public void postProcessSingleRetrievedFile(File file, JSONObject fileRecord) {
|
||||
static public void postProcessSingleRetrievedFile(File file, ObjectNode fileRecord) {
|
||||
if (!fileRecord.has("format")) {
|
||||
JSONUtilities.safePut(fileRecord, "format",
|
||||
ImportingManager.getFormat(
|
||||
@ -693,8 +695,8 @@ public class ImportingUtilities {
|
||||
static public boolean explodeArchive(
|
||||
File rawDataDir,
|
||||
InputStream archiveIS,
|
||||
JSONObject archiveFileRecord,
|
||||
JSONArray fileRecords,
|
||||
ObjectNode archiveFileRecord,
|
||||
ArrayNode fileRecords,
|
||||
final Progress progress
|
||||
) {
|
||||
if (archiveIS instanceof TarInputStream) {
|
||||
@ -708,7 +710,7 @@ public class ImportingUtilities {
|
||||
|
||||
progress.setProgress("Extracting " + fileName2, -1);
|
||||
|
||||
JSONObject fileRecord2 = new JSONObject();
|
||||
ObjectNode fileRecord2 = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(fileRecord2, "origin", JSONUtilities.getString(archiveFileRecord, "origin", null));
|
||||
JSONUtilities.safePut(fileRecord2, "declaredEncoding", (String) null);
|
||||
JSONUtilities.safePut(fileRecord2, "declaredMimeType", (String) null);
|
||||
@ -738,7 +740,7 @@ public class ImportingUtilities {
|
||||
|
||||
progress.setProgress("Extracting " + fileName2, -1);
|
||||
|
||||
JSONObject fileRecord2 = new JSONObject();
|
||||
ObjectNode fileRecord2 = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(fileRecord2, "origin", JSONUtilities.getString(archiveFileRecord, "origin", null));
|
||||
JSONUtilities.safePut(fileRecord2, "declaredEncoding", (String) null);
|
||||
JSONUtilities.safePut(fileRecord2, "declaredMimeType", (String) null);
|
||||
@ -792,7 +794,7 @@ public class ImportingUtilities {
|
||||
static public File uncompressFile(
|
||||
File rawDataDir,
|
||||
InputStream uncompressedIS,
|
||||
JSONObject fileRecord,
|
||||
ObjectNode fileRecord,
|
||||
final Progress progress
|
||||
) throws IOException {
|
||||
String fileName = JSONUtilities.getString(fileRecord, "location", "unknown");
|
||||
@ -823,10 +825,10 @@ public class ImportingUtilities {
|
||||
return NumberFormat.getIntegerInstance().format(bytes);
|
||||
}
|
||||
|
||||
static public String getEncoding(JSONObject fileRecord) {
|
||||
String encoding = JSONUtilities.getString(fileRecord, "encoding", null);
|
||||
static public String getEncoding(ObjectNode firstFileRecord) {
|
||||
String encoding = JSONUtilities.getString(firstFileRecord, "encoding", null);
|
||||
if (encoding == null || encoding.isEmpty()) {
|
||||
encoding = JSONUtilities.getString(fileRecord, "declaredEncoding", null);
|
||||
encoding = JSONUtilities.getString(firstFileRecord, "declaredEncoding", null);
|
||||
}
|
||||
return encoding;
|
||||
}
|
||||
@ -840,14 +842,14 @@ public class ImportingUtilities {
|
||||
* @param fileSelectionIndexes JSON array of selected file indices matching best format
|
||||
* @return best (highest frequency) format
|
||||
*/
|
||||
static public String autoSelectFiles(ImportingJob job, JSONObject retrievalRecord, JSONArray fileSelectionIndexes) {
|
||||
static public String autoSelectFiles(ImportingJob job, ObjectNode retrievalRecord, ArrayNode fileSelectionIndexes) {
|
||||
final Map<String, Integer> formatToCount = new HashMap<String, Integer>();
|
||||
List<String> formats = new ArrayList<String>();
|
||||
|
||||
JSONArray fileRecords = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
int count = fileRecords.length();
|
||||
ArrayNode fileRecords = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
int count = fileRecords.size();
|
||||
for (int i = 0; i < count; i++) {
|
||||
JSONObject fileRecord = JSONUtilities.getObjectElement(fileRecords, i);
|
||||
ObjectNode fileRecord = JSONUtilities.getObjectElement(fileRecords, i);
|
||||
String format = JSONUtilities.getString(fileRecord, "format", null);
|
||||
if (format != null) {
|
||||
if (formatToCount.containsKey(format)) {
|
||||
@ -875,7 +877,7 @@ public class ImportingUtilities {
|
||||
} else {
|
||||
// Otherwise, select files matching the best format
|
||||
for (int i = 0; i < count; i++) {
|
||||
JSONObject fileRecord = JSONUtilities.getObjectElement(fileRecords, i);
|
||||
ObjectNode fileRecord = JSONUtilities.getObjectElement(fileRecords, i);
|
||||
String format = JSONUtilities.getString(fileRecord, "format", null);
|
||||
if (format != null && format.equals(bestFormat)) {
|
||||
JSONUtilities.append(fileSelectionIndexes, i);
|
||||
@ -884,7 +886,7 @@ public class ImportingUtilities {
|
||||
|
||||
// If nothing matches the best format but we have some files,
|
||||
// then select them all
|
||||
if (fileSelectionIndexes.length() == 0 && count > 0) {
|
||||
if (fileSelectionIndexes.size() == 0 && count > 0) {
|
||||
for (int i = 0; i < count; i++) {
|
||||
JSONUtilities.append(fileSelectionIndexes, i);
|
||||
}
|
||||
@ -893,18 +895,18 @@ public class ImportingUtilities {
|
||||
return bestFormat;
|
||||
}
|
||||
|
||||
static public String getCommonFormatForSelectedFiles(ImportingJob job, JSONArray fileSelectionIndexes) {
|
||||
JSONObject retrievalRecord = job.getRetrievalRecord();
|
||||
static public String getCommonFormatForSelectedFiles(ImportingJob job, ArrayNode fileSelectionIndexes) {
|
||||
ObjectNode retrievalRecord = job.getRetrievalRecord();
|
||||
|
||||
final Map<String, Integer> formatToCount = new HashMap<String, Integer>();
|
||||
List<String> formats = new ArrayList<String>();
|
||||
|
||||
JSONArray fileRecords = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
int count = fileSelectionIndexes.length();
|
||||
ArrayNode fileRecords = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
int count = fileSelectionIndexes.size();
|
||||
for (int i = 0; i < count; i++) {
|
||||
int index = JSONUtilities.getIntElement(fileSelectionIndexes, i, -1);
|
||||
if (index >= 0 && index < fileRecords.length()) {
|
||||
JSONObject fileRecord = JSONUtilities.getObjectElement(fileRecords, index);
|
||||
if (index >= 0 && index < fileRecords.size()) {
|
||||
ObjectNode fileRecord = JSONUtilities.getObjectElement(fileRecords, index);
|
||||
String format = JSONUtilities.getString(fileRecord, "format", null);
|
||||
if (format != null) {
|
||||
if (formatToCount.containsKey(format)) {
|
||||
@ -927,18 +929,18 @@ public class ImportingUtilities {
|
||||
}
|
||||
|
||||
static String guessBetterFormat(ImportingJob job, String bestFormat) {
|
||||
JSONObject retrievalRecord = job.getRetrievalRecord();
|
||||
ObjectNode retrievalRecord = job.getRetrievalRecord();
|
||||
return retrievalRecord != null ? guessBetterFormat(job, retrievalRecord, bestFormat) : bestFormat;
|
||||
}
|
||||
|
||||
static String guessBetterFormat(ImportingJob job, JSONObject retrievalRecord, String bestFormat) {
|
||||
JSONArray fileRecords = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
static String guessBetterFormat(ImportingJob job, ObjectNode retrievalRecord, String bestFormat) {
|
||||
ArrayNode fileRecords = JSONUtilities.getArray(retrievalRecord, "files");
|
||||
return fileRecords != null ? guessBetterFormat(job, fileRecords, bestFormat) : bestFormat;
|
||||
}
|
||||
|
||||
static String guessBetterFormat(ImportingJob job, JSONArray fileRecords, String bestFormat) {
|
||||
if (bestFormat != null && fileRecords != null && fileRecords.length() > 0) {
|
||||
JSONObject firstFileRecord = JSONUtilities.getObjectElement(fileRecords, 0);
|
||||
static String guessBetterFormat(ImportingJob job, ArrayNode fileRecords, String bestFormat) {
|
||||
if (bestFormat != null && fileRecords != null && fileRecords.size() > 0) {
|
||||
ObjectNode firstFileRecord = JSONUtilities.getObjectElement(fileRecords, 0);
|
||||
String encoding = getEncoding(firstFileRecord);
|
||||
String location = JSONUtilities.getString(firstFileRecord, "location", null);
|
||||
|
||||
@ -969,7 +971,7 @@ public class ImportingUtilities {
|
||||
return bestFormat;
|
||||
}
|
||||
|
||||
static void rankFormats(ImportingJob job, final String bestFormat, JSONArray rankedFormats) {
|
||||
static void rankFormats(ImportingJob job, final String bestFormat, ArrayNode rankedFormats) {
|
||||
final Map<String, String[]> formatToSegments = new HashMap<String, String[]>();
|
||||
|
||||
boolean download = bestFormat == null ? true :
|
||||
@ -1023,12 +1025,12 @@ public class ImportingUtilities {
|
||||
}
|
||||
|
||||
for (String format : formats) {
|
||||
JSONUtilities.append(rankedFormats, format);
|
||||
rankedFormats.add(format);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static public void previewParse(ImportingJob job, String format, JSONObject optionObj, List<Exception> exceptions) {
|
||||
static public void previewParse(ImportingJob job, String format, ObjectNode optionObj, List<Exception> exceptions) {
|
||||
Format record = ImportingManager.formatToRecord.get(format);
|
||||
if (record == null || record.parser == null) {
|
||||
// TODO: what to do?
|
||||
@ -1054,7 +1056,7 @@ public class ImportingUtilities {
|
||||
static public long createProject(
|
||||
final ImportingJob job,
|
||||
final String format,
|
||||
final JSONObject optionObj,
|
||||
final ObjectNode optionObj,
|
||||
final List<Exception> exceptions,
|
||||
boolean synchronous) {
|
||||
final Format record = ImportingManager.formatToRecord.get(format);
|
||||
@ -1084,7 +1086,7 @@ public class ImportingUtilities {
|
||||
static private void createProjectSynchronously(
|
||||
final ImportingJob job,
|
||||
final String format,
|
||||
final JSONObject optionObj,
|
||||
final ObjectNode optionObj,
|
||||
final List<Exception> exceptions,
|
||||
final Format record,
|
||||
final Project project
|
||||
@ -1105,15 +1107,15 @@ public class ImportingUtilities {
|
||||
if (exceptions.size() == 0) {
|
||||
project.update(); // update all internal models, indexes, caches, etc.
|
||||
|
||||
boolean hasMetadataFileRecord = ((JSONObject)job.getRetrievalRecord()).has(METADATA_FILE_KEY);
|
||||
boolean hasMetadataFileRecord = ((ObjectNode)job.getRetrievalRecord()).has(METADATA_FILE_KEY);
|
||||
|
||||
if (hasMetadataFileRecord) {
|
||||
JSONObject metadataFileRecord = (JSONObject) job.getRetrievalRecord().get(METADATA_FILE_KEY);
|
||||
ObjectNode metadataFileRecord = (ObjectNode) job.getRetrievalRecord().get(METADATA_FILE_KEY);
|
||||
|
||||
String metadataFormat = (String)metadataFileRecord.get("metaDataFormat");
|
||||
String metadataFormat = JSONUtilities.getString(metadataFileRecord, "metaDataFormat", null);
|
||||
IMetadata metadata = MetadataFactory.buildMetadata(MetadataFormat.valueOf(metadataFormat));
|
||||
|
||||
String relativePath = metadataFileRecord.getString("location");
|
||||
String relativePath = JSONUtilities.getString(metadataFileRecord, "location", null);
|
||||
File metadataFile = new File(job.getRawDataDir(), relativePath);
|
||||
metadata.loadFromFile(metadataFile);
|
||||
|
||||
@ -1159,10 +1161,10 @@ public class ImportingUtilities {
|
||||
}
|
||||
|
||||
try {
|
||||
JSONObject fieldsJSON = TypeInferrer.getInstance().infer(listCells,
|
||||
ObjectNode fieldsJSON = JSONUtilities.jsonObjectToObjectNode(TypeInferrer.getInstance().infer(listCells,
|
||||
project.columnModel.getColumnNames().toArray(new String[0]),
|
||||
100);
|
||||
populateColumnTypes(project.columnModel, fieldsJSON.getJSONArray(Schema.JSON_KEY_FIELDS));
|
||||
100));
|
||||
populateColumnTypes(project.columnModel, JSONUtilities.getArray(fieldsJSON, Schema.JSON_KEY_FIELDS));
|
||||
} catch (TypeInferringException e) {
|
||||
logger.error("infer column type exception.", ExceptionUtils.getStackTrace(e));
|
||||
}
|
||||
@ -1171,7 +1173,7 @@ public class ImportingUtilities {
|
||||
|
||||
private static void populateDataPackageMetadata(Project project, ProjectMetadata pmd, DataPackageMetadata metadata) {
|
||||
// project metadata
|
||||
JSONObject pkg = metadata.getPackage().getJson();
|
||||
ObjectNode pkg = JSONUtilities.jsonObjectToObjectNode(metadata.getPackage().getJson());
|
||||
|
||||
pmd.setName(getDataPackageProperty(pkg, Package.JSON_KEY_NAME));
|
||||
pmd.setDescription(getDataPackageProperty(pkg, PackageExtension.JSON_KEY_DESCRIPTION));
|
||||
@ -1182,18 +1184,18 @@ public class ImportingUtilities {
|
||||
pmd.setVersion(getDataPackageProperty(pkg, PackageExtension.JSON_KEY_VERSION));
|
||||
|
||||
if (pkg.has(PackageExtension.JSON_KEY_KEYWORKS)) {
|
||||
String[] tags = pkg.getJSONArray(PackageExtension.JSON_KEY_KEYWORKS).toList().toArray(new String[0]);
|
||||
String[] tags = JSONUtilities.getStringArray(pkg, PackageExtension.JSON_KEY_KEYWORKS);
|
||||
pmd.appendTags(tags);
|
||||
}
|
||||
|
||||
// column model
|
||||
JSONObject schema = metadata.getPackage().getResources().get(0).getSchema();
|
||||
ObjectNode schema = JSONUtilities.jsonObjectToObjectNode(metadata.getPackage().getResources().get(0).getSchema());
|
||||
if (schema != null) {
|
||||
populateColumnTypes(project.columnModel, schema.getJSONArray(Schema.JSON_KEY_FIELDS));
|
||||
populateColumnTypes(project.columnModel, JSONUtilities.getArray(schema, Schema.JSON_KEY_FIELDS));
|
||||
}
|
||||
}
|
||||
|
||||
private static String getDataPackageProperty(JSONObject pkg, String key) {
|
||||
private static String getDataPackageProperty(ObjectNode pkg, String key) {
|
||||
return JSONUtilities.getString(pkg, key, StringUtils.EMPTY);
|
||||
}
|
||||
/**
|
||||
@ -1201,12 +1203,12 @@ public class ImportingUtilities {
|
||||
* @param columnModel
|
||||
* @param fieldsJSON
|
||||
*/
|
||||
private static void populateColumnTypes(ColumnModel columnModel, JSONArray fieldsJSON) {
|
||||
private static void populateColumnTypes(ColumnModel columnModel, ArrayNode fieldsJSON) {
|
||||
int cellIndex = 0;
|
||||
Iterator<Object> iter = fieldsJSON.iterator();
|
||||
Iterator<JsonNode> iter = fieldsJSON.iterator();
|
||||
while(iter.hasNext()){
|
||||
JSONObject fieldJsonObj = (JSONObject)iter.next();
|
||||
Field field = new Field(fieldJsonObj);
|
||||
ObjectNode fieldJsonObj = (ObjectNode)iter.next();
|
||||
Field field = new Field(JSONUtilities.objectNodeToJsonNode(fieldJsonObj));
|
||||
|
||||
Column column = columnModel.getColumnByCellIndex(cellIndex);
|
||||
column.setType(field.getType());
|
||||
@ -1224,7 +1226,7 @@ public class ImportingUtilities {
|
||||
* @param optionObj
|
||||
* @return
|
||||
*/
|
||||
static public ProjectMetadata createProjectMetadata(JSONObject optionObj) {
|
||||
static public ProjectMetadata createProjectMetadata(ObjectNode optionObj) {
|
||||
ProjectMetadata pm = new ProjectMetadata();
|
||||
PreferenceStore ps = ProjectManager.singleton.getPreferenceStore();
|
||||
|
||||
|
@ -59,7 +59,6 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.history.HistoryEntryManager;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -38,7 +38,6 @@ import java.util.Properties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver;
|
||||
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
import com.google.refine.operations.OperationRegistry;
|
||||
import com.google.refine.operations.OperationResolver;
|
||||
|
@ -49,7 +49,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.InjectableValues;
|
||||
|
||||
import com.google.refine.expr.EvalError;
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.expr.HasFields;
|
||||
|
@ -46,7 +46,6 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
|
||||
import com.google.refine.InterProjectModel;
|
||||
import com.google.refine.model.recon.ReconConfig;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
@ -43,7 +43,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonView;
|
||||
|
||||
import com.google.refine.util.JsonViews;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
|
@ -46,7 +46,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonView;
|
||||
|
||||
import com.google.refine.expr.HasFields;
|
||||
import com.google.refine.util.JsonViews;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
@ -41,7 +41,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
|
||||
import com.google.refine.expr.HasFields;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
|
@ -38,7 +38,6 @@ import java.io.Writer;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.model.Recon.Judgment;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
@ -37,7 +37,6 @@ import java.io.IOException;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
|
||||
|
@ -44,7 +44,6 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
|
||||
public class RecordModel {
|
||||
|
@ -43,7 +43,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.InjectableValues;
|
||||
|
||||
import com.google.refine.expr.CellTuple;
|
||||
import com.google.refine.expr.HasFields;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
@ -41,7 +41,6 @@ import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import com.google.refine.history.Change;
|
||||
import com.google.refine.history.History;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -63,7 +63,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonRawValue;
|
||||
import com.fasterxml.jackson.annotation.JsonUnwrapped;
|
||||
import com.fasterxml.jackson.annotation.JsonView;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.refine.ProjectManager;
|
||||
import com.google.refine.preference.PreferenceStore;
|
||||
import com.google.refine.preference.TopList;
|
||||
@ -362,8 +362,8 @@ public class ProjectMetadata extends AbstractMetadata {
|
||||
updateModified();
|
||||
}
|
||||
|
||||
public void appendImportOptionMetadata(JSONObject obj) {
|
||||
_importOptionMetadata.put(obj);
|
||||
public void appendImportOptionMetadata(ObjectNode options) {
|
||||
_importOptionMetadata.put(options);
|
||||
updateModified();
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,6 @@ import org.slf4j.LoggerFactory;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver;
|
||||
|
||||
import com.google.refine.model.Cell;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.model.Recon;
|
||||
|
@ -60,7 +60,6 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonView;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import com.google.refine.expr.functions.ToDate;
|
||||
import com.google.refine.model.ReconCandidate;
|
||||
import com.google.refine.model.ReconType;
|
||||
|
@ -60,7 +60,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.model.Cell;
|
||||
import com.google.refine.model.Project;
|
||||
|
@ -38,7 +38,6 @@ import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.browsing.FilteredRows;
|
||||
|
@ -34,7 +34,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
package com.google.refine.operations;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
@ -33,21 +33,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.refine.operations;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.json.JSONObject;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
|
||||
import edu.mit.simile.butterfly.ButterflyModule;
|
||||
|
||||
|
@ -7,7 +7,6 @@ import com.fasterxml.jackson.databind.DatabindContext;
|
||||
import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase;
|
||||
import com.fasterxml.jackson.databind.type.TypeFactory;
|
||||
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
||||
public class OperationResolver extends TypeIdResolverBase {
|
||||
|
@ -37,7 +37,6 @@ import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.Engine.Mode;
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.browsing.RowVisitor;
|
||||
|
@ -37,7 +37,6 @@ import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.Engine;
|
||||
import com.google.refine.browsing.Engine.Mode;
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
|
@ -40,7 +40,6 @@ import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
@ -42,7 +42,6 @@ import java.util.Properties;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.browsing.RowVisitor;
|
||||
import com.google.refine.expr.Evaluable;
|
||||
|
@ -38,7 +38,6 @@ import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.expr.ExpressionUtils;
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
|
@ -43,7 +43,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
import com.google.refine.model.Cell;
|
||||
|
@ -39,7 +39,6 @@ import java.util.Properties;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.browsing.EngineConfig;
|
||||
import com.google.refine.browsing.RowVisitor;
|
||||
import com.google.refine.expr.Evaluable;
|
||||
|
@ -41,7 +41,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import com.google.refine.history.HistoryEntry;
|
||||
import com.google.refine.model.AbstractOperation;
|
||||
import com.google.refine.model.Cell;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user