add archive column (#2573)
Co-authored-by: Antonin Delpeuch <antonin@delpeuch.eu>
This commit is contained in:
parent
d57d76f7df
commit
04055153a1
@ -285,6 +285,7 @@ public class DatabaseImportController implements ImportingController {
|
||||
job,
|
||||
new DBQueryResultPreviewReader(job, databaseService, querySource, columns, dbQueryInfo, 100),
|
||||
querySource,
|
||||
"", // archivefilename
|
||||
limit,
|
||||
options,
|
||||
exceptions
|
||||
@ -414,6 +415,7 @@ public class DatabaseImportController implements ImportingController {
|
||||
job,
|
||||
new DBQueryResultImportReader(job, databaseService, querySource, columns, dbQueryInfo, getCreateBatchSize()),
|
||||
querySource,
|
||||
"", //archivefilename,
|
||||
limit,
|
||||
options,
|
||||
exceptions
|
||||
|
@ -142,6 +142,7 @@ public class GDataImporter {
|
||||
job,
|
||||
new WorksheetBatchRowReader(job, fileSource, service, spreadsheetId, worksheetEntry),
|
||||
fileSource,
|
||||
"", //archivefilename
|
||||
limit,
|
||||
options,
|
||||
exceptions
|
||||
|
@ -24,7 +24,9 @@
|
||||
<td><input bind="limitInput" type="text" class="lightweight" size="2" value="0" />
|
||||
<label for="$limit">row(s) of data</label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td><label for="$include-file-sources">Store file source<br/>(file names, URLs)<br/>in each row</label></td></tr>
|
||||
<td><label for="$include-file-sources">Store file source</label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td><label for="$include-archive-file">Store archive file</label></td></tr>
|
||||
</table></div></td>
|
||||
</tr>
|
||||
</table></div>
|
@ -89,6 +89,7 @@ Refine.PCAxisParserUI.prototype.getOptions = function() {
|
||||
options.skipDataLines = -1;
|
||||
}
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
return options;
|
||||
};
|
||||
@ -120,6 +121,9 @@ Refine.PCAxisParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
|
||||
var onChange = function() {
|
||||
self._scheduleUpdatePreview();
|
||||
|
@ -71,6 +71,7 @@ public class PCAxisImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -88,8 +89,8 @@ public class PCAxisImporter extends TabularImportingParserBase {
|
||||
|
||||
TabularImportingParserBase.readTable(
|
||||
project, metadata, job, dataReader,
|
||||
fileSource, limit, options, exceptions);
|
||||
fileSource, archiveFileName, limit, options, exceptions);
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, reader, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, reader, limit, options, exceptions);
|
||||
}
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ public class WikidataRefineTest extends PowerMockTestCase {
|
||||
SeparatorBasedImporter importer = new SeparatorBasedImporter();
|
||||
|
||||
List<Exception> exceptions = new ArrayList<Exception>();
|
||||
importer.parseOneFile(project, metadata, job, "filesource", new StringReader(input), -1, options, exceptions);
|
||||
importer.parseOneFile(project, metadata, job, "filesource", "archivefile", new StringReader(input), -1, options, exceptions);
|
||||
project.update();
|
||||
ProjectManager.singleton.registerProject(project, metadata);
|
||||
|
||||
|
@ -129,6 +129,7 @@ public class ExcelImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
InputStream inputStream,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -221,14 +222,14 @@ public class ExcelImporter extends TabularImportingParserBase {
|
||||
metadata,
|
||||
job,
|
||||
dataReader,
|
||||
fileSource + "#" + sheet.getSheetName(),
|
||||
fileSource + "#" + sheet.getSheetName(), archiveFileName,
|
||||
limit,
|
||||
options,
|
||||
exceptions
|
||||
);
|
||||
}
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, inputStream, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, inputStream, limit, options, exceptions);
|
||||
}
|
||||
|
||||
static protected Cell extractCell(org.apache.poi.ss.usermodel.Cell cell) {
|
||||
|
@ -83,6 +83,7 @@ public class FixedWidthImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -132,9 +133,9 @@ public class FixedWidthImporter extends TabularImportingParserBase {
|
||||
}
|
||||
};
|
||||
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, limit, options, exceptions);
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, archiveFileName, limit, options, exceptions);
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, reader, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, reader, limit, options, exceptions);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -71,6 +71,7 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
List<ObjectNode> fileRecords, String format) {
|
||||
ObjectNode options = ParsingUtilities.mapper.createObjectNode();
|
||||
JSONUtilities.safePut(options, "includeFileSources", fileRecords.size() > 1);
|
||||
JSONUtilities.safePut(options, "includeArchiveFileName", ImportingUtilities.hasArchiveFileField(fileRecords));
|
||||
|
||||
return options;
|
||||
}
|
||||
@ -109,13 +110,14 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
) throws IOException {
|
||||
final File file = ImportingUtilities.getFile(job, fileRecord);
|
||||
final String fileSource = ImportingUtilities.getFileSource(fileRecord);
|
||||
final String archiveFileName = ImportingUtilities.getArchiveFileName(fileRecord);
|
||||
|
||||
progress.startFile(fileSource);
|
||||
try {
|
||||
InputStream inputStream = ImporterUtilities.openAndTrackFile(fileSource, file, progress);
|
||||
try {
|
||||
if (useInputStream) {
|
||||
parseOneFile(project, metadata, job, fileSource, inputStream, limit, options, exceptions);
|
||||
parseOneFile(project, metadata, job, fileSource, archiveFileName, inputStream, limit, options, exceptions);
|
||||
} else {
|
||||
String commonEncoding = JSONUtilities.getString(options, "encoding", null);
|
||||
if (commonEncoding != null && commonEncoding.isEmpty()) {
|
||||
@ -125,7 +127,7 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
Reader reader = ImportingUtilities.getReaderFromStream(
|
||||
inputStream, fileRecord, commonEncoding);
|
||||
|
||||
parseOneFile(project, metadata, job, fileSource, reader, limit, options, exceptions);
|
||||
parseOneFile(project, metadata, job, fileSource, archiveFileName, reader, limit, options, exceptions);
|
||||
}
|
||||
} finally {
|
||||
inputStream.close();
|
||||
@ -140,16 +142,18 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
pushImportingOptions(metadata, fileSource, options);
|
||||
pushImportingOptions(metadata, "fileSource", fileSource, options);
|
||||
pushImportingOptions(metadata, "archiveFileName", archiveFileName, options);
|
||||
}
|
||||
|
||||
private void pushImportingOptions(ProjectMetadata metadata, String fileSource, ObjectNode options) {
|
||||
options.put("fileSource", fileSource);
|
||||
private void pushImportingOptions(ProjectMetadata metadata, String key, String value, ObjectNode options) {
|
||||
options.put(key, value);
|
||||
// set the import options to metadata:
|
||||
metadata.appendImportOptionMetadata(options);
|
||||
}
|
||||
@ -159,21 +163,42 @@ abstract public class ImportingParserBase implements ImportingParser {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
InputStream inputStream,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
pushImportingOptions(metadata, fileSource, options);
|
||||
pushImportingOptions(metadata, "fileSource", fileSource, options);
|
||||
pushImportingOptions(metadata, "archiveFileName", archiveFileName, options);
|
||||
}
|
||||
|
||||
|
||||
protected static int addFilenameColumn(Project project) {
|
||||
protected static int addFilenameColumn(Project project, boolean archiveColumnAdded) {
|
||||
String fileNameColumnName = "File";
|
||||
int columnId = archiveColumnAdded? 1 : 0;
|
||||
if (project.columnModel.getColumnByName(fileNameColumnName) == null) {
|
||||
try {
|
||||
project.columnModel.addColumn(
|
||||
0, new Column(project.columnModel.allocateNewCellIndex(), fileNameColumnName), false);
|
||||
columnId, new Column(project.columnModel.allocateNewCellIndex(), fileNameColumnName), false);
|
||||
|
||||
return columnId;
|
||||
} catch (ModelException e) {
|
||||
// Shouldn't happen: We already checked for duplicate name.
|
||||
logger.error("ModelException adding Filename column",e);
|
||||
}
|
||||
return -1;
|
||||
} else {
|
||||
return columnId;
|
||||
}
|
||||
}
|
||||
|
||||
protected static int addArchiveColumn(Project project) {
|
||||
String ArchiveColumnName = "Archive";
|
||||
if (project.columnModel.getColumnByName(ArchiveColumnName) == null) {
|
||||
try {
|
||||
project.columnModel.addColumn(
|
||||
0, new Column(project.columnModel.allocateNewCellIndex(), ArchiveColumnName), false);
|
||||
|
||||
return 0;
|
||||
} catch (ModelException e) {
|
||||
|
@ -201,13 +201,13 @@ public class JsonImporter extends TreeImportingParserBase {
|
||||
|
||||
@Override
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata,
|
||||
ImportingJob job, String fileSource, InputStream is,
|
||||
ImportingJob job, String fileSource, String archiveFileName, InputStream is,
|
||||
ImportColumnGroup rootColumnGroup, int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
|
||||
parseOneFile(project, metadata, job, fileSource,
|
||||
parseOneFile(project, metadata, job, fileSource, archiveFileName,
|
||||
new JSONTreeReader(is), rootColumnGroup, limit, options, exceptions);
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, is, rootColumnGroup, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, is, rootColumnGroup, limit, options, exceptions);
|
||||
}
|
||||
|
||||
static public class JSONTreeReader implements TreeReader {
|
||||
|
@ -66,6 +66,7 @@ public class LineBasedImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -129,8 +130,8 @@ public class LineBasedImporter extends TabularImportingParserBase {
|
||||
}
|
||||
};
|
||||
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, limit, options, exceptions);
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, archiveFileName, limit, options, exceptions);
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, reader, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, reader, limit, options, exceptions);
|
||||
}
|
||||
}
|
||||
|
@ -126,6 +126,7 @@ public class OdsImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
InputStream inputStream,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -188,13 +189,14 @@ public class OdsImporter extends TabularImportingParserBase {
|
||||
job,
|
||||
dataReader,
|
||||
fileSource + "#" + table.getTableName(),
|
||||
archiveFileName,
|
||||
limit,
|
||||
options,
|
||||
exceptions
|
||||
);
|
||||
}
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, inputStream, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, inputStream, limit, options, exceptions);
|
||||
}
|
||||
|
||||
static protected Serializable extractCell(OdfTableCell cell) {
|
||||
|
@ -76,7 +76,7 @@ public class RdfTripleImporter extends ImportingParserBase {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata, ImportingJob job, String fileSource,
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata, ImportingJob job, String fileSource, String archiveFileName,
|
||||
InputStream input, int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
// create an empty model
|
||||
Model model = ModelFactory.createDefaultModel();
|
||||
@ -162,6 +162,6 @@ public class RdfTripleImporter extends ImportingParserBase {
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, input, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, input, limit, options, exceptions);
|
||||
}
|
||||
}
|
||||
|
@ -87,6 +87,7 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -157,8 +158,8 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
|
||||
}
|
||||
};
|
||||
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, lnReader, limit, options, exceptions);
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, archiveFileName, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, lnReader, limit, options, exceptions);
|
||||
}
|
||||
|
||||
static protected ArrayList<Object> getCells(String line, CSVParser parser, LineNumberReader lnReader)
|
||||
|
@ -83,6 +83,7 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
ImportingJob job,
|
||||
TableDataReader reader,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
List<Exception> exceptions
|
||||
@ -105,10 +106,14 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
boolean storeBlankCellsAsNulls = JSONUtilities.getBoolean(options, "storeBlankCellsAsNulls", true);
|
||||
boolean includeFileSources = JSONUtilities.getBoolean(options, "includeFileSources", false);
|
||||
boolean trimStrings = JSONUtilities.getBoolean(options, "trimStrings", false);
|
||||
boolean includeArchiveFileName = JSONUtilities.getBoolean(options, "includeArchiveFileName", false);
|
||||
|
||||
int filenameColumnIndex = -1;
|
||||
int filenameColumnIndex = -1, archiveColumnIndex = -1;
|
||||
if (includeArchiveFileName && archiveFileName != null) {
|
||||
archiveColumnIndex = addArchiveColumn(project);
|
||||
}
|
||||
if (includeFileSources) {
|
||||
filenameColumnIndex = addFilenameColumn(project);
|
||||
filenameColumnIndex = addFilenameColumn(project, archiveColumnIndex >=0);
|
||||
}
|
||||
|
||||
List<String> columnNames = new ArrayList<String>();
|
||||
@ -187,10 +192,11 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
}
|
||||
|
||||
if (rowHasData || storeBlankRows) {
|
||||
if (includeFileSources && filenameColumnIndex >= 0) {
|
||||
row.setCell(
|
||||
filenameColumnIndex,
|
||||
new Cell(fileSource, null));
|
||||
if (archiveColumnIndex >= 0) {
|
||||
row.setCell(archiveColumnIndex, new Cell(archiveFileName, null));
|
||||
}
|
||||
if (filenameColumnIndex >= 0) {
|
||||
row.setCell(filenameColumnIndex, new Cell(fileSource, null));
|
||||
}
|
||||
project.rows.add(row);
|
||||
}
|
||||
@ -206,8 +212,8 @@ abstract public class TabularImportingParserBase extends ImportingParserBase {
|
||||
}
|
||||
}
|
||||
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata, ImportingJob job, String fileSource,
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata, ImportingJob job, String fileSource, String archiveFileName,
|
||||
Reader dataReader, int limit, ObjectNode options, List<Exception> exceptions) {
|
||||
super.parseOneFile(project, metadata, job, fileSource, dataReader, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, dataReader, limit, options, exceptions);
|
||||
}
|
||||
}
|
||||
|
@ -691,6 +691,7 @@ public class WikitextImporter extends TabularImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
int limit,
|
||||
ObjectNode options,
|
||||
@ -749,7 +750,7 @@ public class WikitextImporter extends TabularImportingParserBase {
|
||||
// TODO this does not seem to do anything - maybe we need to pass it to OpenRefine in some other way?
|
||||
}
|
||||
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, limit, options, exceptions);
|
||||
TabularImportingParserBase.readTable(project, metadata, job, dataReader, fileSource, archiveFileName, limit, options, exceptions);
|
||||
|
||||
// Add reconciliation statistics
|
||||
if (dataReader.columnReconciled != null) {
|
||||
@ -768,7 +769,7 @@ public class WikitextImporter extends TabularImportingParserBase {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, reader, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, reader, limit, options, exceptions);
|
||||
}
|
||||
|
||||
private StandardReconConfig getReconConfig(String url) {
|
||||
|
@ -193,15 +193,15 @@ public class XmlImporter extends TreeImportingParserBase {
|
||||
|
||||
@Override
|
||||
public void parseOneFile(Project project, ProjectMetadata metadata,
|
||||
ImportingJob job, String fileSource, InputStream inputStream,
|
||||
ImportingJob job, String fileSource, String archiveFileName, InputStream inputStream,
|
||||
ImportColumnGroup rootColumnGroup, int limit, ObjectNode options,
|
||||
List<Exception> exceptions) {
|
||||
|
||||
try {
|
||||
parseOneFile(project, metadata, job, fileSource,
|
||||
parseOneFile(project, metadata, job, fileSource, archiveFileName,
|
||||
new XmlParser(inputStream), rootColumnGroup, limit, options, exceptions);
|
||||
|
||||
super.parseOneFile(project, metadata, job, fileSource, inputStream, rootColumnGroup, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, inputStream, rootColumnGroup, limit, options, exceptions);
|
||||
} catch (XMLStreamException e) {
|
||||
exceptions.add(e);
|
||||
} catch (IOException e) {
|
||||
|
@ -33,18 +33,22 @@ public class ImportParameters {
|
||||
protected boolean guessDataType;
|
||||
protected boolean includeFileSources;
|
||||
protected String fileSource;
|
||||
protected boolean includeArchiveFileName;
|
||||
protected String archiveFileName;
|
||||
|
||||
public ImportParameters(boolean trimStrings, boolean storeEmptyStrings, boolean guessCellValueTypes,
|
||||
boolean includeFileSources, String fileSource) {
|
||||
boolean includeFileSources, String fileSource, boolean includeArchiveFileName, String archiveFileName) {
|
||||
this.trimStrings = trimStrings;
|
||||
this.storeEmptyStrings = storeEmptyStrings;
|
||||
this.guessDataType = guessCellValueTypes;
|
||||
this.includeFileSources = includeFileSources;
|
||||
this.fileSource = fileSource;
|
||||
this.includeArchiveFileName = includeArchiveFileName;
|
||||
this.archiveFileName = archiveFileName;
|
||||
}
|
||||
|
||||
public ImportParameters(boolean trimStrings, boolean storeEmptyStrings, boolean guessCellValueTypes) {
|
||||
this(trimStrings, storeEmptyStrings, guessCellValueTypes, false, "");
|
||||
this(trimStrings, storeEmptyStrings, guessCellValueTypes, false, "", false, "");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -112,13 +112,14 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
) throws IOException {
|
||||
final File file = ImportingUtilities.getFile(job, fileRecord);
|
||||
final String fileSource = ImportingUtilities.getFileSource(fileRecord);
|
||||
final String archiveFileName = ImportingUtilities.getArchiveFileName(fileRecord);
|
||||
|
||||
progress.startFile(fileSource);
|
||||
try {
|
||||
InputStream inputStream = ImporterUtilities.openAndTrackFile(fileSource, file, progress);
|
||||
try {
|
||||
if (useInputStream) {
|
||||
parseOneFile(project, metadata, job, fileSource, inputStream,
|
||||
parseOneFile(project, metadata, job, fileSource, archiveFileName, inputStream,
|
||||
rootColumnGroup, limit, options, exceptions);
|
||||
} else {
|
||||
String commonEncoding = JSONUtilities.getString(options, "encoding", null);
|
||||
@ -127,7 +128,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
}
|
||||
|
||||
Reader reader = ImportingUtilities.getFileReader(file, fileRecord, commonEncoding);
|
||||
parseOneFile(project, metadata, job, fileSource, reader,
|
||||
parseOneFile(project, metadata, job, fileSource, archiveFileName, reader,
|
||||
rootColumnGroup, limit, options, exceptions);
|
||||
}
|
||||
} finally {
|
||||
@ -149,6 +150,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
Reader reader,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
@ -169,6 +171,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
InputStream inputStream,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
@ -176,7 +179,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
List<Exception> exceptions
|
||||
) {
|
||||
// throw new NotImplementedException();
|
||||
super.parseOneFile(project, metadata, job, fileSource, inputStream, limit, options, exceptions);
|
||||
super.parseOneFile(project, metadata, job, fileSource, archiveFileName, inputStream, limit, options, exceptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -188,6 +191,7 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
ProjectMetadata metadata,
|
||||
ImportingJob job,
|
||||
String fileSource,
|
||||
String archiveFileName,
|
||||
TreeReader treeParser,
|
||||
ImportColumnGroup rootColumnGroup,
|
||||
int limit,
|
||||
@ -214,17 +218,26 @@ abstract public class TreeImportingParserBase extends ImportingParserBase {
|
||||
boolean guessCellValueTypes = JSONUtilities.getBoolean(options, "guessCellValueTypes", true);
|
||||
|
||||
boolean includeFileSources = JSONUtilities.getBoolean(options, "includeFileSources", false);
|
||||
int filenameColumnIndex = -1;
|
||||
boolean includeArchiveFileName = JSONUtilities.getBoolean(options, "includeArchiveFileName", false);
|
||||
int filenameColumnIndex = -1, archiveColumnIndex = -1;
|
||||
if (includeArchiveFileName && archiveFileName != null) {
|
||||
archiveColumnIndex = addArchiveColumn(project);
|
||||
assert archiveColumnIndex == 0;
|
||||
}
|
||||
if (includeFileSources) {
|
||||
filenameColumnIndex = addFilenameColumn(project);
|
||||
filenameColumnIndex = addFilenameColumn(project, includeArchiveFileName);
|
||||
// If the column add fails for any reason, we'll end up overwriting data in the first column
|
||||
if (includeArchiveFileName) {
|
||||
assert filenameColumnIndex == 1;
|
||||
} else {
|
||||
assert filenameColumnIndex == 0;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
XmlImportUtilities.importTreeData(treeParser, project, recordPath, rootColumnGroup, limit2,
|
||||
new ImportParameters(trimStrings, storeEmptyStrings, guessCellValueTypes, includeFileSources,
|
||||
fileSource));
|
||||
fileSource, includeArchiveFileName, archiveFileName));
|
||||
} catch (Exception e){
|
||||
exceptions.add(e);
|
||||
}
|
||||
|
@ -359,7 +359,7 @@ public class XmlImportUtilities extends TreeImportUtilities {
|
||||
ImportRecord record = new ImportRecord();
|
||||
|
||||
processSubRecord(project, parser, rootColumnGroup, record, 0, parameter);
|
||||
addImportRecordToProject(record, project, parameter.includeFileSources, parameter.fileSource);
|
||||
addImportRecordToProject(record, project, parameter.includeFileSources, parameter.fileSource, parameter.includeArchiveFileName, parameter.archiveFileName);
|
||||
}
|
||||
|
||||
|
||||
@ -411,20 +411,29 @@ public class XmlImportUtilities extends TreeImportUtilities {
|
||||
}
|
||||
if (record != null) {
|
||||
addImportRecordToProject(record, project,
|
||||
parameter.includeFileSources, parameter.fileSource);
|
||||
parameter.includeFileSources, parameter.fileSource, parameter.includeArchiveFileName, parameter.archiveFileName);
|
||||
}
|
||||
}
|
||||
|
||||
static protected void addImportRecordToProject(ImportRecord record, Project project,
|
||||
boolean includeFileSources, String fileSource) {
|
||||
boolean includeFileSources, String fileSource, boolean includeArchiveFileName, String archiveFileName) {
|
||||
int archiveColumnIndex = -1, fileSourceColumnIndex = -1;
|
||||
if (includeArchiveFileName && archiveFileName != null) {
|
||||
archiveColumnIndex = 0;
|
||||
}
|
||||
if (includeFileSources) {
|
||||
fileSourceColumnIndex = archiveColumnIndex == 0? 1 : 0;
|
||||
}
|
||||
for (List<Cell> row : record.rows) {
|
||||
if (row.size() > 0) {
|
||||
Row realRow = new Row(row.size());
|
||||
for (int c = 0; c < row.size(); c++) {
|
||||
if (c == 0 && includeFileSources) { // to add the file source:
|
||||
realRow.setCell(
|
||||
0,
|
||||
new Cell(fileSource, null));
|
||||
if (c == archiveColumnIndex) {
|
||||
realRow.setCell(archiveColumnIndex, new Cell(archiveFileName, null));
|
||||
continue;
|
||||
}
|
||||
if (c == fileSourceColumnIndex) {
|
||||
realRow.setCell(fileSourceColumnIndex, new Cell(fileSource, null));
|
||||
continue;
|
||||
}
|
||||
Cell cell = row.get(c);
|
||||
|
@ -89,6 +89,7 @@ import com.google.refine.importing.UrlRewriter.Result;
|
||||
import com.google.refine.model.Project;
|
||||
import com.google.refine.util.JSONUtilities;
|
||||
import com.google.refine.util.ParsingUtilities;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class ImportingUtilities {
|
||||
final static protected Logger logger = LoggerFactory.getLogger("importing-utilities");
|
||||
@ -500,6 +501,19 @@ public class ImportingUtilities {
|
||||
);
|
||||
}
|
||||
|
||||
static public String getArchiveFileName(ObjectNode fileRecord) {
|
||||
return JSONUtilities.getString(
|
||||
fileRecord,
|
||||
"archiveFileName",
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
static public boolean hasArchiveFileField(List<ObjectNode> fileRecords) {
|
||||
List<ObjectNode> filterResults = fileRecords.stream().filter(fileRecord -> getArchiveFileName(fileRecord) != null).collect(Collectors.toList());
|
||||
return filterResults.size() > 0;
|
||||
}
|
||||
|
||||
static private abstract class SavingUpdate {
|
||||
public long totalExpectedSize = 0;
|
||||
public long totalRetrievedSize = 0;
|
||||
|
@ -175,7 +175,7 @@ public class RefineTest extends PowerMockTestCase {
|
||||
SeparatorBasedImporter importer = new SeparatorBasedImporter();
|
||||
|
||||
List<Exception> exceptions = new ArrayList<Exception>();
|
||||
importer.parseOneFile(project, metadata, job, "filesource", new StringReader(input), -1, options, exceptions);
|
||||
importer.parseOneFile(project, metadata, job, "filesource", "archivefile", new StringReader(input), -1, options, exceptions);
|
||||
project.update();
|
||||
ProjectManager.singleton.registerProject(project, metadata);
|
||||
|
||||
|
@ -141,6 +141,35 @@ public class ExcelImporterTests extends ImporterTest {
|
||||
verify(options, times(1)).get("storeBlankCellsAsNulls");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void readXlsFromArchiveFile() throws FileNotFoundException, IOException{
|
||||
|
||||
ArrayNode sheets = ParsingUtilities.mapper.createArrayNode();
|
||||
sheets.add(ParsingUtilities.mapper.readTree("{name: \"file-source#Test Sheet 0\", fileNameAndSheetIndex: \"file-source#0\", rows: 31, selected: true}"));
|
||||
whenGetArrayOption("sheets", options, sheets);
|
||||
|
||||
whenGetIntegerOption("ignoreLines", options, 0);
|
||||
whenGetIntegerOption("headerLines", options, 0);
|
||||
whenGetIntegerOption("skipDataLines", options, 0);
|
||||
whenGetIntegerOption("limit", options, -1);
|
||||
whenGetBooleanOption("storeBlankCellsAsNulls",options,true);
|
||||
whenGetBooleanOption("includeArchiveFileName", options, true);
|
||||
|
||||
InputStream stream = new FileInputStream(xlsFile);
|
||||
|
||||
try {
|
||||
parseOneFile(SUT, stream);
|
||||
} catch (Exception e) {
|
||||
Assert.fail(e.getMessage());
|
||||
}
|
||||
|
||||
Assert.assertEquals(project.rows.get(0).cells.size(), COLUMNS + 1);
|
||||
Assert.assertEquals(project.columnModel.columns.get(0).getName(), "Archive");
|
||||
Assert.assertEquals(project.rows.get(0).cells.get(0).value, "archive-file");
|
||||
|
||||
verify(options, times(1)).get("includeArchiveFileName");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void readXlsx() throws FileNotFoundException, IOException{
|
||||
|
||||
|
@ -107,4 +107,38 @@ public class FixedWidthImporterTests extends ImporterTest {
|
||||
Assert.assertEquals((String)project.rows.get(2).getCellValue(1), "rt");
|
||||
Assert.assertNull(project.rows.get(2).getCellValue(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void readFixedWidthFromArchiveFile(){
|
||||
StringReader reader = new StringReader(SAMPLE_ROW + "\nTooShort");
|
||||
|
||||
ArrayNode columnWidths = ParsingUtilities.mapper.createArrayNode();
|
||||
JSONUtilities.append(columnWidths, 6);
|
||||
JSONUtilities.append(columnWidths, 9);
|
||||
JSONUtilities.append(columnWidths, 5);
|
||||
whenGetArrayOption("columnWidths", options, columnWidths);
|
||||
|
||||
ArrayNode columnNames = ParsingUtilities.mapper.createArrayNode();
|
||||
columnNames.add("Col 1");
|
||||
columnNames.add("Col 2");
|
||||
columnNames.add("Col 3");
|
||||
whenGetArrayOption("columnNames", options, columnNames);
|
||||
|
||||
whenGetIntegerOption("ignoreLines", options, 0);
|
||||
whenGetIntegerOption("headerLines", options, 0);
|
||||
whenGetIntegerOption("skipDataLines", options, 0);
|
||||
whenGetIntegerOption("limit", options, -1);
|
||||
whenGetBooleanOption("storeBlankCellsAsNulls",options,true);
|
||||
whenGetBooleanOption("includeArchiveFileName", options, true);
|
||||
|
||||
try {
|
||||
parseOneFile(SUT, reader);
|
||||
} catch (Exception e) {
|
||||
Assert.fail(e.getMessage());
|
||||
}
|
||||
|
||||
Assert.assertEquals(project.rows.get(0).cells.size(), 4);
|
||||
Assert.assertEquals(project.columnModel.columns.get(0).getName(), "Archive");
|
||||
Assert.assertEquals(project.rows.get(0).cells.get(0).value, "archive-file");
|
||||
}
|
||||
}
|
||||
|
@ -90,6 +90,7 @@ public abstract class ImporterTest extends RefineTest {
|
||||
metadata,
|
||||
job,
|
||||
"file-source",
|
||||
"archive-file",
|
||||
reader,
|
||||
-1,
|
||||
options,
|
||||
@ -104,6 +105,7 @@ public abstract class ImporterTest extends RefineTest {
|
||||
metadata,
|
||||
job,
|
||||
"file-source",
|
||||
"archive-file",
|
||||
inputStream,
|
||||
-1,
|
||||
options,
|
||||
@ -119,6 +121,7 @@ public abstract class ImporterTest extends RefineTest {
|
||||
metadata,
|
||||
job,
|
||||
"file-source",
|
||||
"archive-file",
|
||||
reader,
|
||||
rootColumnGroup,
|
||||
-1,
|
||||
@ -143,6 +146,7 @@ public abstract class ImporterTest extends RefineTest {
|
||||
metadata,
|
||||
job,
|
||||
"file-source",
|
||||
"archive-file",
|
||||
inputStream,
|
||||
rootColumnGroup,
|
||||
-1,
|
||||
@ -163,6 +167,7 @@ public abstract class ImporterTest extends RefineTest {
|
||||
metadata,
|
||||
job,
|
||||
"file-source",
|
||||
"archive-file",
|
||||
reader,
|
||||
rootColumnGroup,
|
||||
-1,
|
||||
|
@ -135,6 +135,7 @@ public class JsonImporterTests extends ImporterTest {
|
||||
metadata,
|
||||
job,
|
||||
"file-source",
|
||||
"archive-file",
|
||||
inputStream,
|
||||
rootColumnGroup,
|
||||
-1,
|
||||
|
@ -118,6 +118,27 @@ public class TsvCsvImporterTests extends ImporterTest {
|
||||
Assert.assertEquals(project.rows.get(0).cells.get(2).value, "data3");
|
||||
}
|
||||
|
||||
@Test(dataProvider = "CSV-TSV-AutoDetermine")
|
||||
public void readSimpleData_CSV_fromArchiveFileName(String sep){
|
||||
//create input to test with
|
||||
String inputSeparator = sep == null ? "\t" : sep;
|
||||
String input = "col1" + inputSeparator + "col2" + inputSeparator + "col3\n" +
|
||||
"data1" + inputSeparator + "data2" + inputSeparator + "data3";
|
||||
|
||||
|
||||
try {
|
||||
prepareOptions(sep, -1, 0, 0, 1, false, false,"\"","[]", true);
|
||||
parseOneFile(SUT, new StringReader(input));
|
||||
} catch (Exception e) {
|
||||
Assert.fail("Exception during file parse",e);
|
||||
}
|
||||
|
||||
Assert.assertEquals(project.columnModel.columns.size(), 4);
|
||||
Assert.assertEquals(project.columnModel.columns.get(0).getName(), "Archive");
|
||||
Assert.assertEquals(project.rows.get(0).cells.size(), 4);
|
||||
Assert.assertEquals(project.rows.get(0).cells.get(0).value, "archive-file");
|
||||
}
|
||||
|
||||
@Test(dataProvider = "CSV-TSV-AutoDetermine")
|
||||
public void readSimpleData_CSV_1Header_1Row_GuessValues(String sep){
|
||||
//create input to test with
|
||||
@ -579,7 +600,7 @@ public class TsvCsvImporterTests extends ImporterTest {
|
||||
String input = "data1" + inputSeparator + "data2" + inputSeparator + "data3\n";
|
||||
|
||||
try {
|
||||
prepareOptions(sep, -1, 0, 0, 1, false, false,"\"","[\"col1\",\"col2\",\"col3\"]");
|
||||
prepareOptions(sep, -1, 0, 0, 1, false, false,"\"","[\"col1\",\"col2\",\"col3\"]", false);
|
||||
parseOneFile(SUT, new StringReader(input));
|
||||
} catch (Exception e) {
|
||||
Assert.fail("Exception during file parse",e);
|
||||
@ -655,7 +676,7 @@ public class TsvCsvImporterTests extends ImporterTest {
|
||||
String sep, int limit, int skip, int ignoreLines,
|
||||
int headerLines, boolean guessValueType, boolean ignoreQuotes, String quoteCharacter) {
|
||||
|
||||
prepareOptions(sep, limit, skip, ignoreLines, headerLines, guessValueType, ignoreQuotes, quoteCharacter,"[]");
|
||||
prepareOptions(sep, limit, skip, ignoreLines, headerLines, guessValueType, ignoreQuotes, quoteCharacter,"[]", false);
|
||||
}
|
||||
|
||||
protected void prepareOptions(
|
||||
@ -673,7 +694,7 @@ public class TsvCsvImporterTests extends ImporterTest {
|
||||
|
||||
protected void prepareOptions(
|
||||
String sep, int limit, int skip, int ignoreLines,
|
||||
int headerLines, boolean guessValueType, boolean ignoreQuotes, String quoteCharacter, String columnNames) {
|
||||
int headerLines, boolean guessValueType, boolean ignoreQuotes, String quoteCharacter, String columnNames, boolean includeArchiveFileName) {
|
||||
|
||||
whenGetStringOption("separator", options, sep);
|
||||
whenGetStringOption("quoteCharacter", options, quoteCharacter);
|
||||
@ -685,5 +706,6 @@ public class TsvCsvImporterTests extends ImporterTest {
|
||||
whenGetBooleanOption("processQuotes", options, !ignoreQuotes);
|
||||
whenGetBooleanOption("storeBlankCellsAsNulls", options, true);
|
||||
whenGetArrayOption("columnNames", options, ParsingUtilities.evaluateJsonStringToArrayNode(columnNames));
|
||||
whenGetBooleanOption("includeArchiveFileName", options, includeArchiveFileName);
|
||||
}
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ public class KeyValueColumnizeTests extends RefineTest {
|
||||
+ "price,3.1\n";
|
||||
prepareOptions(",", 20, 0, 0, 1, false, false);
|
||||
List<Exception> exceptions = new ArrayList<Exception>();
|
||||
importer.parseOneFile(project, pm, job, "filesource", new StringReader(csv), -1, options, exceptions);
|
||||
importer.parseOneFile(project, pm, job, "filesource", "archivefile", new StringReader(csv), -1, options, exceptions);
|
||||
project.update();
|
||||
ProjectManager.singleton.registerProject(project, pm);
|
||||
|
||||
|
@ -123,7 +123,8 @@
|
||||
"core-index-parser/tabs": "mga tab (TSV)",
|
||||
"core-index-parser/escape": "Tangtanga ang espesyal nga karakter uban ni \\",
|
||||
"core-index-parser/lines-header": "Mga linya nga nagsugod sa kolum",
|
||||
"core-index-parser/store-source": "Pagbutang ug gigikan sa papeles<br/> 1(file names, URLs)<br/> 2sa kada laray",
|
||||
"core-index-parser/store-source": "Pagbutang ug gigikan sa papeles",
|
||||
"core-index-parser/store-archive": "ibutang ang file archive",
|
||||
"core-index-parser/use-quote": "Gamit ug karakter",
|
||||
"core-index-parser/ignore-first": "Ibaliwala ug una",
|
||||
"core-index-parser/quote-delimits-cells": "para isarado ang cells nga adunay kolum nga ibulag",
|
||||
|
@ -123,7 +123,8 @@
|
||||
"core-index-parser/tabs": "Tabs (TSV)",
|
||||
"core-index-parser/escape": "Markiere Sonderzeichen mit \\",
|
||||
"core-index-parser/lines-header": "Zeile(n) als Spaltenüberschriften",
|
||||
"core-index-parser/store-source": "Dateiquelle in jeder<br/>Zeile speichern<br/>(Dateinamen, URLs)",
|
||||
"core-index-parser/store-source": "Dateiquelle speichern",
|
||||
"core-index-parser/store-archive": "Archivdatei speichern",
|
||||
"core-index-parser/use-quote": "Zeichen verwenden",
|
||||
"core-index-parser/ignore-first": "Erste ignorieren",
|
||||
"core-index-parser/quote-delimits-cells": "zum Einschließen von Zellen mit Spaltentrennern",
|
||||
|
@ -139,7 +139,8 @@
|
||||
"core-index-parser/parse-references": "Extract references in additional columns",
|
||||
"core-index-parser/wiki-base-url": "Reconcile to wiki with base URL:",
|
||||
"core-index-parser/invalid-wikitext": "No table could be parsed. Are you sure this is a valid wiki table?",
|
||||
"core-index-parser/store-source": "Store file source <br/>(file names, URLs)<br/>in each row",
|
||||
"core-index-parser/store-source": "Store file source",
|
||||
"core-index-parser/store-archive": "Store archive file",
|
||||
"core-index-parser/preserve-empty": "Preserve empty strings",
|
||||
"core-index-parser/trim": "Trim leading & trailing whitespace from strings",
|
||||
"core-index-parser/json-parser": "Click on the first JSON { } node corresponding to the first record to load.",
|
||||
|
@ -121,7 +121,8 @@
|
||||
"core-index-parser/parse-next": "Seleccionar primera(s)",
|
||||
"core-index-parser/commas": "comas (CSV)",
|
||||
"core-index-parser/tabs": "tabulaciones (TSV)",
|
||||
"core-index-parser/store-source": "Cargar el origen del archivo<br/>(nombres, URLs)<br/>en cada fila",
|
||||
"core-index-parser/store-source": "Cargar el origen del archivo",
|
||||
"core-index-parser/store-archive": "almacenar archivo de almacenamiento",
|
||||
"core-index-parser/lines-header": "linea(s) para los nombres de las columnas",
|
||||
"core-index-parser/escape": "Ignorar caracteres especiales con \\",
|
||||
"core-index-parser/ignore-first": "Ignorar primera(s)",
|
||||
|
@ -123,7 +123,8 @@
|
||||
"core-index-parser/tabs": "mga tab (TSV)",
|
||||
"core-index-parser/escape": "Eskapo ang mga espesyal na character na may \\",
|
||||
"core-index-parser/lines-header": "(mga) linya bilang mga header ng hanay",
|
||||
"core-index-parser/store-source": "I-imbak ang pinagmulang file <br/> 1 (mga pangalan ng file, mga URL) <br/> 2in sa bawat hilera",
|
||||
"core-index-parser/store-source": "pinagmulan ng file file",
|
||||
"core-index-parser/store-archive": "mag-imbak ng file archive",
|
||||
"core-index-parser/use-quote": "Gamitin ang character",
|
||||
"core-index-parser/ignore-first": "Huwag pansinin muna",
|
||||
"core-index-parser/quote-delimits-cells": "upang masakop ang mga cell na naglalaman ng mga separator ng haligi",
|
||||
|
@ -124,7 +124,8 @@
|
||||
"core-index-parser/parse-next": "Analyser la ou les",
|
||||
"core-index-parser/commas": "une virgule (CSV)",
|
||||
"core-index-parser/tabs": "une tabulation (TSV)",
|
||||
"core-index-parser/store-source": "Indiquer la source du fichier<br/>(noms des fichiers, URLs)<br/>dans chaque ligne",
|
||||
"core-index-parser/store-source": "Indiquer la source du fichier",
|
||||
"core-index-parser/store-archive": "stocker le fichier d'archive",
|
||||
"core-index-parser/lines-header": "ligne(s) suivante(s) comme des entêtes de colonnes",
|
||||
"core-index-parser/escape": "Protéger les caractères spéciaux avec \\",
|
||||
"core-index-parser/use-quote": "Utiliser le caractère",
|
||||
|
@ -102,7 +102,8 @@
|
||||
"core-index-parser/parse-cell": "נתח תוכן התא כ <br/>מספרים, תאריכים וכו' ...",
|
||||
"core-index-parser/store-blank": "אכסון שורות ריקות",
|
||||
"core-index-parser/store-nulls": "אכסון תאים ריקים כ-nulls",
|
||||
"core-index-parser/store-source": "אכסון קובץ המקור <br/>(file names, URLs)<br/>בכל שורה",
|
||||
"core-index-parser/store-source": "אחסן את מקור הקבצים",
|
||||
"core-index-parser/store-archive": "אחסן קובץ ארכיב",
|
||||
"core-index-parser/preserve-empty": "שימור מחרוזות ריקות",
|
||||
"core-index-parser/trim": "חיתוך רווחים מקדימים ממחרוזות & ",
|
||||
"core-index-parser/json-parser": "הקליקו על צומת הג'ייסון {} התואם לרשומה הראשונה לטעינה.",
|
||||
|
@ -123,7 +123,8 @@
|
||||
"core-index-parser/tabs": "tabulátor (TSV)",
|
||||
"core-index-parser/escape": "Különleges karakterek feloldása ezzel \\",
|
||||
"core-index-parser/lines-header": "sort mint oszlopfejlécet",
|
||||
"core-index-parser/store-source": "Fájlforrás <br/>(fájlnév, URL) <br/>tárolása minden sorban",
|
||||
"core-index-parser/store-source": "Tárolja a fájlforrást",
|
||||
"core-index-parser/store-archive": "Tárolja az archívum fájlt",
|
||||
"core-index-parser/use-quote": "Használja a",
|
||||
"core-index-parser/ignore-first": "Hagyja ki az első",
|
||||
"core-index-parser/quote-delimits-cells": "karaktert az oszlopelválasztót tartalmazó cellák befoglalására",
|
||||
|
@ -121,7 +121,8 @@
|
||||
"core-index-parser/parse-next": "Analizza la/e prossima/e",
|
||||
"core-index-parser/commas": "virgole (CSV)",
|
||||
"core-index-parser/tabs": "tabs (TSV)",
|
||||
"core-index-parser/store-source": "Salva la sorgente <br/>(nome file, URLs)<br/>in ogni riga",
|
||||
"core-index-parser/store-source": "Salva la sorgente",
|
||||
"core-index-parser/store-archive": "Salva il file di archivio",
|
||||
"core-index-parser/lines-header": "linea/e come nomi delle colonne",
|
||||
"core-index-parser/escape": "Effettua l'escape di caratteri speciali con \\",
|
||||
"core-index-parser/use-quote": "Usa character",
|
||||
|
@ -123,6 +123,7 @@
|
||||
"core-index-parser/tabs": "タブ(TSV)",
|
||||
"core-index-parser/escape": "特殊文字はバックスラッシュでエスケープしてください",
|
||||
"core-index-parser/lines-header": "行分",
|
||||
"core-index-parser/store-archive": "アーカイブファイルの保存",
|
||||
"core-index-parser/store-source": "ソースファイル<br/>(ファイル名やURL)を<br/>各行に保存する",
|
||||
"core-index-parser/use-quote": "文字",
|
||||
"core-index-parser/ignore-first": "先頭を無視",
|
||||
|
@ -120,7 +120,8 @@
|
||||
"core-index-parser/parse-references": "Trekk ut referanser i ekstra kolonner",
|
||||
"core-index-parser/wiki-base-url": "Avstem mot wiki med rotnettadresse:",
|
||||
"core-index-parser/invalid-wikitext": "Ingen tabell kunne fortolkes. Er du sikker på at dette er en gyldig wikitabell?",
|
||||
"core-index-parser/store-source": "Lagre filkilde<br/>(filnavn, URL-er)<br/>i hver rad",
|
||||
"core-index-parser/store-source": "Lagre filkilde",
|
||||
"core-index-parser/store-archive": "Gem arkivfil",
|
||||
"core-index-parser/preserve-empty": "Bevar tomme strenger",
|
||||
"core-index-parser/trim": "Fjern innledende og avsluttende tomrom fra strenger",
|
||||
"core-index-parser/col-separated-by": "Kolonner adskilles av",
|
||||
|
@ -128,7 +128,8 @@
|
||||
"core-index-parser/include-raw-templates": "Sjablonen en afbeeldingen als ruwe wikitekst invoegen",
|
||||
"core-index-parser/parse-references": "Referenties in extra kolommen bijvoegen",
|
||||
"core-index-parser/invalid-wikitext": "Geen tabel kon worden verwerkt. Weet u zeker dat dit een geldige wikitabel is?",
|
||||
"core-index-parser/store-source": "Bestandsbron<br/>(bestandsnamen, URL's)<br/>in elke rij opslaan",
|
||||
"core-index-parser/store-source": "Bestandsbron opslaan",
|
||||
"core-index-parser/store-archive": "Bewaar archiefbestand",
|
||||
"core-index-parser/preserve-empty": "Lege tekenreeksen behouden",
|
||||
"core-index-parser/trim": "Voorloop- en volgspaties van tekenreeksen verwijderen",
|
||||
"core-index-parser/parse-every": "Verwerk elke",
|
||||
|
@ -123,7 +123,8 @@
|
||||
"core-index-parser/tabs": "tabs (TSV)",
|
||||
"core-index-parser/escape": "Ignorar caracteres especiais com \\",
|
||||
"core-index-parser/lines-header": "linha(s) como nomes das colunas",
|
||||
"core-index-parser/store-source": "Armazenar a origem do arquivo <br/>(nomes dos arquivos, URLs)<br/> em cada linha",
|
||||
"core-index-parser/store-source": "Armazenar a origem do arquivo",
|
||||
"core-index-parser/store-archive": "Armazene o arquivo morto",
|
||||
"core-index-parser/use-quote": "Usar caracter",
|
||||
"core-index-parser/ignore-first": "Ignorar primeira(s)",
|
||||
"core-index-parser/quote-delimits-cells": "encerrar células contendo separadores de coluna",
|
||||
|
@ -162,7 +162,8 @@
|
||||
"core-index-parser/parse-next": "Парсить следующие",
|
||||
"core-index-parser/commas": "запятыми (CSV)",
|
||||
"core-index-parser/tabs": "табами (TSV)",
|
||||
"core-index-parser/store-source": "Сохранять источники файла <br/>(имена файлов, URLs)<br/>в отдельной строке",
|
||||
"core-index-parser/store-source": "Сохранять источники файла",
|
||||
"core-index-parser/store-archive": "сохранить архивный файл",
|
||||
"core-index-parser/lines-header": "строк как заголовки колонок",
|
||||
"core-index-parser/escape": "Спецсимволы обрамлять с помощью \\",
|
||||
"core-index-parser/ignore-first": "Не учитывать первые",
|
||||
|
@ -130,7 +130,8 @@
|
||||
"core-index-parser/parse-references": "Extrahera referenser i extra kolumner",
|
||||
"core-index-parser/wiki-base-url": "Stäm av mot wiki med basadress:",
|
||||
"core-index-parser/invalid-wikitext": "Tabellen kunde ej tolkas. Är du säker på att detta är en giltig wiki-tabell?",
|
||||
"core-index-parser/store-source": "Spara filkälla<br/>(filnamn, webbadresser)<br/>i varje rad",
|
||||
"core-index-parser/store-source": "Spara filkälla",
|
||||
"core-index-parser/store-achive": "Lagra arkivfil",
|
||||
"core-index-parser/preserve-empty": "Behåll tomma strängar",
|
||||
"core-index-parser/trim": "Ta bort inledande och avslutande tomrum från strängar",
|
||||
"core-index-parser/json-parser": "Klicka på den första JSON {}-noden som motsvarar första posten som ska laddas.",
|
||||
|
@ -123,7 +123,8 @@
|
||||
"core-index-parser/tabs": "mga tab (TSV)",
|
||||
"core-index-parser/escape": "Iwasan ang espesyal na mga karakter na may \\",
|
||||
"core-index-parser/lines-header": "Ang mga linya bilang header ng mga kolum",
|
||||
"core-index-parser/store-source": "I-imbak ang pinagmulan ng file <br/>(mga pangalan ng file, URLs)<br/>bawat hilera",
|
||||
"core-index-parser/store-source": "I-imbak ang pinagmulan ng file",
|
||||
"core-index-parser/store-archive": "I-store ang archive file",
|
||||
"core-index-parser/use-quote": "Gumamit ng karakter",
|
||||
"core-index-parser/ignore-first": "Huwag munang intindihin",
|
||||
"core-index-parser/quote-delimits-cells": "ang pagpaloob sa mga cells na mayroong tagapaghiwalay ng kolum",
|
||||
|
@ -102,7 +102,8 @@
|
||||
"core-index-parser/parse-cell": "将单元格中的<br/>文本解析<br/>为<br/>数字,日期,…",
|
||||
"core-index-parser/store-blank": "保留空白行",
|
||||
"core-index-parser/store-nulls": "将空白单元格作为nulls保留",
|
||||
"core-index-parser/store-source": "在每一行<br/>(文件名称, URLs)<br/>保留文件信息",
|
||||
"core-index-parser/store-source": "保留文件信息",
|
||||
"core-index-parser/store-archive": "保留压缩文件",
|
||||
"core-index-parser/preserve-empty": "保留空字符串",
|
||||
"core-index-parser/trim": "移除字符串首尾的空白",
|
||||
"core-index-parser/json-parser": "点击的第一个JSON { } 节点 对应于第一个要加载的数据记录.",
|
||||
|
@ -42,6 +42,8 @@
|
||||
<td colspan="2"><label for="$store-blank-cells" id="or-import-null"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td><label for="$include-file-sources" id="or-import-source"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
</table></div></td>
|
||||
</tr>
|
||||
</table></div>
|
@ -108,6 +108,7 @@ Refine.ExcelParserUI.prototype.getOptions = function() {
|
||||
options.storeBlankRows = this._optionContainerElmts.storeBlankRowsCheckbox[0].checked;
|
||||
options.storeBlankCellsAsNulls = this._optionContainerElmts.storeBlankCellsAsNullsCheckbox[0].checked;
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
return options;
|
||||
};
|
||||
@ -136,6 +137,7 @@ Refine.ExcelParserUI.prototype._initialize = function() {
|
||||
$('#or-import-blank').text($.i18n('core-index-parser/store-blank'));
|
||||
$('#or-import-null').text($.i18n('core-index-parser/store-nulls'));
|
||||
$('#or-import-source').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
|
||||
var sheetTable = this._optionContainerElmts.sheetRecordContainer[0];
|
||||
$.each(this._config.sheetRecords, function(i, v) {
|
||||
@ -188,6 +190,9 @@ Refine.ExcelParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
|
||||
var onChange = function() {
|
||||
self._scheduleUpdatePreview();
|
||||
|
@ -48,6 +48,8 @@
|
||||
<td colspan="2"><label for="$store-blank-cells" id="or-import-null"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td><label for="$include-file-sources" id="or-import-source"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
</table></div></td>
|
||||
</tr>
|
||||
</table></div>
|
@ -109,6 +109,8 @@ Refine.FixedWidthParserUI.prototype.getOptions = function() {
|
||||
options.storeBlankRows = this._optionContainerElmts.storeBlankRowsCheckbox[0].checked;
|
||||
options.storeBlankCellsAsNulls = this._optionContainerElmts.storeBlankCellsAsNullsCheckbox[0].checked;
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFile = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
return options;
|
||||
};
|
||||
@ -140,6 +142,8 @@ Refine.FixedWidthParserUI.prototype._initialize = function() {
|
||||
$('#or-import-blank').text($.i18n('core-index-parser/store-blank'));
|
||||
$('#or-import-null').text($.i18n('core-index-parser/store-nulls'));
|
||||
$('#or-import-source').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
|
||||
|
||||
this._optionContainerElmts.encodingInput
|
||||
.attr('value', this._config.encoding || '')
|
||||
@ -184,6 +188,9 @@ Refine.FixedWidthParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.attr("checked", "checked");
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.attr("checked", "checked");
|
||||
}
|
||||
|
||||
var onChange = function() {
|
||||
self._scheduleUpdatePreview();
|
||||
|
@ -22,6 +22,8 @@
|
||||
<td><label for="$guess" id="or-import-parseCell"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td colspan="2"><label for="$include-file-sources" id="or-import-source"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td colspan="2"><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
</table></div></td>
|
||||
</tr>
|
||||
</table></div>
|
@ -95,6 +95,8 @@ Refine.JsonParserUI.prototype.getOptions = function() {
|
||||
options.storeEmptyStrings = this._optionContainerElmts.storeEmptyStringsCheckbox[0].checked;
|
||||
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
|
||||
return options;
|
||||
};
|
||||
@ -115,6 +117,7 @@ Refine.JsonParserUI.prototype._initialize = function() {
|
||||
$('#or-import-trim').html($.i18n('core-index-parser/trim'));
|
||||
$('#or-import-parseCell').html($.i18n('core-index-parser/parse-cell'));
|
||||
$('#or-import-source').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
$('#or-import-jsonParser').text($.i18n('core-index-parser/json-parser'));
|
||||
|
||||
if (this._config.limit > 0) {
|
||||
@ -133,6 +136,9 @@ Refine.JsonParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
this._optionContainerElmts.pickRecordElementsButton.click(function() {
|
||||
self._showPickRecordNodesUI();
|
||||
});
|
||||
|
@ -25,6 +25,8 @@
|
||||
<td colspan="2"><label for="$store-blank-cells" id="or-import-null"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td><label for="$include-file-sources" id="or-import-source"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
</table></div></td>
|
||||
|
||||
<td><div class="grid-layout layout-tightest"><table>
|
||||
|
@ -98,6 +98,7 @@ Refine.LineBasedParserUI.prototype.getOptions = function() {
|
||||
options.storeBlankRows = this._optionContainerElmts.storeBlankRowsCheckbox[0].checked;
|
||||
options.storeBlankCellsAsNulls = this._optionContainerElmts.storeBlankCellsAsNullsCheckbox[0].checked;
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
return options;
|
||||
};
|
||||
@ -117,6 +118,7 @@ Refine.LineBasedParserUI.prototype._initialize = function() {
|
||||
$('#or-import-blank').text($.i18n('core-index-parser/store-blank'));
|
||||
$('#or-import-null').text($.i18n('core-index-parser/store-nulls'));
|
||||
$('#or-import-source').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
$('#or-import-ignore').text($.i18n('core-index-parser/ignore-first'));
|
||||
$('#or-import-lines').text($.i18n('core-index-parser/lines-beg'));
|
||||
$('#or-import-parse').text($.i18n('core-index-parser/parse-next'));
|
||||
@ -158,6 +160,9 @@ Refine.LineBasedParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
|
||||
var onChange = function() {
|
||||
self._scheduleUpdatePreview();
|
||||
|
@ -78,6 +78,8 @@
|
||||
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td><label for="$include-file-sources" id="or-import-source"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
</table></div></td>
|
||||
</tr>
|
||||
</table></div>
|
||||
|
@ -117,6 +117,7 @@ Refine.SeparatorBasedParserUI.prototype.getOptions = function() {
|
||||
|
||||
options.storeBlankCellsAsNulls = this._optionContainerElmts.storeBlankCellsAsNullsCheckbox[0].checked;
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
options.trimStrings = this._optionContainerElmts.trimStringsCheckbox[0].checked;
|
||||
|
||||
if (this._optionContainerElmts.columnNamesCheckbox[0].checked) {
|
||||
@ -164,6 +165,7 @@ Refine.SeparatorBasedParserUI.prototype._initialize = function() {
|
||||
$('#or-import-blank').text($.i18n('core-index-parser/store-blank'));
|
||||
$('#or-import-null').text($.i18n('core-index-parser/store-nulls'));
|
||||
$('#or-import-source').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
|
||||
this._optionContainerElmts.encodingInput
|
||||
.attr('value', this._config.encoding || '')
|
||||
@ -238,6 +240,9 @@ Refine.SeparatorBasedParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.trimStrings) {
|
||||
this._optionContainerElmts.trimStringsCheckbox.attr("checked", "unchecked");
|
||||
}
|
||||
|
@ -29,6 +29,9 @@
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td><label for="$include-file-sources" id="or-import-source"></label></td></tr>
|
||||
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
|
||||
<tr>
|
||||
<td width="1%"></td>
|
||||
<td><button class="button" bind="previewButton"></button></td>
|
||||
|
@ -100,6 +100,7 @@ Refine.WikitextParserUI.prototype.getOptions = function() {
|
||||
|
||||
options.storeBlankCellsAsNulls = this._optionContainerElmts.storeBlankCellsAsNullsCheckbox[0].checked;
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
options.reconService = ReconciliationManager.ensureDefaultServicePresent();
|
||||
|
||||
@ -128,6 +129,8 @@ Refine.WikitextParserUI.prototype._initialize = function() {
|
||||
$('#or-import-blank').text($.i18n('core-index-parser/store-blank'));
|
||||
$('#or-import-null').text($.i18n('core-index-parser/store-nulls'));
|
||||
$('#or-import-source').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
|
||||
|
||||
/*
|
||||
this._optionContainerElmts.encodingInput
|
||||
@ -181,6 +184,9 @@ Refine.WikitextParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
|
||||
var onChange = function() {
|
||||
self._scheduleUpdatePreview();
|
||||
|
@ -23,6 +23,8 @@
|
||||
<td><label for="$guess" id="or-import-parseCell"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeFileSourcesCheckbox" id="$include-file-sources" /></td>
|
||||
<td colspan="2"><label for="$include-file-sources" id="or-import-store"></label></td></tr>
|
||||
<tr><td width="1%"><input type="checkbox" bind="includeArchiveFileCheckbox" id="$include-archive-file" /></td>
|
||||
<td colspan="2"><label for="$include-archive-file" id="or-import-archive"></label></td></tr>
|
||||
</table></div></td>
|
||||
</tr>
|
||||
</table></div>
|
@ -93,6 +93,7 @@ Refine.XmlParserUI.prototype.getOptions = function() {
|
||||
options.storeEmptyStrings = this._optionContainerElmts.storeEmptyStringsCheckbox[0].checked;
|
||||
|
||||
options.includeFileSources = this._optionContainerElmts.includeFileSourcesCheckbox[0].checked;
|
||||
options.includeArchiveFileName = this._optionContainerElmts.includeArchiveFileCheckbox[0].checked;
|
||||
|
||||
return options;
|
||||
};
|
||||
@ -113,6 +114,7 @@ Refine.XmlParserUI.prototype._initialize = function() {
|
||||
$('#or-import-trim').html($.i18n('core-index-parser/trim'));
|
||||
$('#or-import-parseCell').html($.i18n('core-index-parser/parse-cell'));
|
||||
$('#or-import-store').html($.i18n('core-index-parser/store-source'));
|
||||
$('#or-import-archive').html($.i18n('core-index-parser/store-archive'));
|
||||
|
||||
if (this._config.limit > 0) {
|
||||
this._optionContainerElmts.limitCheckbox.prop("checked", true);
|
||||
@ -130,6 +132,9 @@ Refine.XmlParserUI.prototype._initialize = function() {
|
||||
if (this._config.includeFileSources) {
|
||||
this._optionContainerElmts.includeFileSourcesCheckbox.prop("checked", true);
|
||||
}
|
||||
if (this._config.includeArchiveFileName) {
|
||||
this._optionContainerElmts.includeArchiveFileCheckbox.prop("checked", true);
|
||||
}
|
||||
this._optionContainerElmts.pickRecordElementsButton.click(function() {
|
||||
self._config.recordPath = undefined;
|
||||
self._showPickRecordElementsUI();
|
||||
|
Loading…
Reference in New Issue
Block a user