Merge branch master

This commit is contained in:
Tom Morris 2013-04-13 19:02:20 -04:00
commit 22a9acd04b
161 changed files with 12012 additions and 11600 deletions

View File

@ -20,7 +20,7 @@
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/commons-lang-2.5.jar" sourcepath="main/webapp/WEB-INF/lib-src/commons-lang-2.5-sources.jar"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/dom4j-1.6.1.jar"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/icu4j-4.2.1.jar"/>
<classpathentry kind="lib" path="main/webapp/WEB-INF/lib/jackson-core-asl-1.9.9.jar" sourcepath="main/webapp/WEB-INF/lib-src/jackson-src-1.9.9.zip"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/jackson-core-asl-1.9.12.jar" sourcepath="main/webapp/WEB-INF/lib-src/jackson-src-1.9.9.zip"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/jcl-over-slf4j-1.5.6.jar"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/jrdf-0.5.6.jar" sourcepath="main/webapp/WEB-INF/lib-src/jrdf-0.5.6-sources.jar"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/json-20100208.jar" sourcepath="main/webapp/WEB-INF/lib-src/json-20100208-sources.jar"/>
@ -46,7 +46,7 @@
<classpathentry exported="true" kind="lib" path="server/lib/slf4j-api-1.5.6.jar"/>
<classpathentry exported="true" kind="lib" path="server/lib/slf4j-log4j12-1.5.6.jar"/>
<classpathentry exported="true" kind="lib" path="broker/appengine/WEB-INF/lib/slf4j-jdk14-1.5.6.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/jython/module/MOD-INF/lib/jython-2.5.3.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/jython/module/MOD-INF/lib/jython-standalone-2.5.3.jar"/>
<classpathentry exported="true" kind="lib" path="broker/core/module/MOD-INF/lib/bdb-je-4.0.103.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/gdata/module/MOD-INF/lib/gdata-core-1.0.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/gdata/module/MOD-INF/lib/gdata-spreadsheet-3.0.jar"/>
@ -71,6 +71,11 @@
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/signpost-commonshttp4-1.2.1.2.jar" sourcepath="main/webapp/WEB-INF/lib-src/signpost-commonshttp4-1.2.1.2-sources.jar"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/signpost-core-1.2.1.2.jar" sourcepath="main/webapp/WEB-INF/lib-src/signpost-core-1.2.1.2-sources.jar"/>
<classpathentry exported="true" kind="lib" path="main/webapp/WEB-INF/lib/guava-13.0.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/freebase/module/MOD-INF/lib/google-api-client-1.13.2-beta.jar" sourcepath="extensions/freebase/module/MOD-INF/libsrc/google-api-client-1.13.2-beta-sources.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/freebase/module/MOD-INF/lib/google-http-client-1.13.1-beta.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/freebase/module/MOD-INF/lib/google-api-services-freebase-v1-rev25-1.13.2-beta.jar" sourcepath="extensions/freebase/module/MOD-INF/libsrc/google-api-services-freebase-v1-rev25-1.13.2-beta-sources.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/freebase/module/MOD-INF/lib/google-http-client-jackson-1.13.1-beta.jar"/>
<classpathentry exported="true" kind="lib" path="extensions/freebase/module/MOD-INF/lib/mail.jar"/>
<classpathentry kind="lib" path="extensions/gdata/module/MOD-INF/lib/google-api-client-1.13.2-beta.jar" sourcepath="extensions/gdata/module/MOD-INF/lib-src/google-api-client-1.13.2-beta-sources.jar"/>
<classpathentry kind="lib" path="extensions/gdata/module/MOD-INF/lib/google-api-services-fusiontables-v1-rev17-1.13.2-beta.jar" sourcepath="extensions/gdata/module/MOD-INF/lib-src/google-api-services-fusiontables-v1-rev17-1.13.2-beta-sources.jar"/>
<classpathentry kind="lib" path="extensions/gdata/module/MOD-INF/lib/google-http-client-1.13.1-beta.jar" sourcepath="extensions/gdata/module/MOD-INF/lib-src/google-http-client-1.13.1-beta-sources.jar"/>

22
.gitattributes vendored Normal file
View File

@ -0,0 +1,22 @@
# Auto detect text files and perform LF normalization
* text=auto
# Custom for Visual Studio
*.cs diff=csharp
*.sln merge=union
*.csproj merge=union
*.vbproj merge=union
*.fsproj merge=union
*.dbproj merge=union
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain

View File

@ -10,8 +10,14 @@
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
</natures>
</projectDescription>

6
.settings/.jsdtscope Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="main/webapp/modules/core/MOD-INF/controller.js|main/webapp/modules/core/externals/|test-output/" kind="src" path=""/>
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path=""/>
</classpath>

View File

@ -0,0 +1,63 @@
eclipse.preferences.version=1
org.eclipse.wst.jsdt.core.compiler.codegen.inlineJsrBytecode=disabled
org.eclipse.wst.jsdt.core.compiler.codegen.targetPlatform=1.2
org.eclipse.wst.jsdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.wst.jsdt.core.compiler.compliance=1.4
org.eclipse.wst.jsdt.core.compiler.debug.lineNumber=generate
org.eclipse.wst.jsdt.core.compiler.debug.localVariable=generate
org.eclipse.wst.jsdt.core.compiler.debug.sourceFile=generate
org.eclipse.wst.jsdt.core.compiler.doc.comment.support=enabled
org.eclipse.wst.jsdt.core.compiler.problem.assertIdentifier=warning
org.eclipse.wst.jsdt.core.compiler.problem.deprecation=warning
org.eclipse.wst.jsdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
org.eclipse.wst.jsdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
org.eclipse.wst.jsdt.core.compiler.problem.discouragedReference=warning
org.eclipse.wst.jsdt.core.compiler.problem.duplicateLocalVariables=warning
org.eclipse.wst.jsdt.core.compiler.problem.emptyStatement=ignore
org.eclipse.wst.jsdt.core.compiler.problem.fallthroughCase=ignore
org.eclipse.wst.jsdt.core.compiler.problem.fieldHiding=ignore
org.eclipse.wst.jsdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
org.eclipse.wst.jsdt.core.compiler.problem.forbiddenReference=error
org.eclipse.wst.jsdt.core.compiler.problem.hiddenCatchBlock=warning
org.eclipse.wst.jsdt.core.compiler.problem.indirectStaticAccess=ignore
org.eclipse.wst.jsdt.core.compiler.problem.invalidJavadoc=ignore
org.eclipse.wst.jsdt.core.compiler.problem.invalidJavadocTags=disabled
org.eclipse.wst.jsdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=disabled
org.eclipse.wst.jsdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=disabled
org.eclipse.wst.jsdt.core.compiler.problem.invalidJavadocTagsVisibility=default
org.eclipse.wst.jsdt.core.compiler.problem.localVariableHiding=ignore
org.eclipse.wst.jsdt.core.compiler.problem.looseVarDecleration=warning
org.eclipse.wst.jsdt.core.compiler.problem.missingJavadocComments=ignore
org.eclipse.wst.jsdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
org.eclipse.wst.jsdt.core.compiler.problem.missingJavadocCommentsVisibility=default
org.eclipse.wst.jsdt.core.compiler.problem.missingJavadocTags=ignore
org.eclipse.wst.jsdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
org.eclipse.wst.jsdt.core.compiler.problem.missingJavadocTagsVisibility=default
org.eclipse.wst.jsdt.core.compiler.problem.noEffectAssignment=warning
org.eclipse.wst.jsdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
org.eclipse.wst.jsdt.core.compiler.problem.nullReference=ignore
org.eclipse.wst.jsdt.core.compiler.problem.optionalSemicolon=warning
org.eclipse.wst.jsdt.core.compiler.problem.parameterAssignment=ignore
org.eclipse.wst.jsdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
org.eclipse.wst.jsdt.core.compiler.problem.potentialNullReference=ignore
org.eclipse.wst.jsdt.core.compiler.problem.redundantNullCheck=ignore
org.eclipse.wst.jsdt.core.compiler.problem.undefinedField=warning
org.eclipse.wst.jsdt.core.compiler.problem.undocumentedEmptyBlock=ignore
org.eclipse.wst.jsdt.core.compiler.problem.uninitializedGlobalVariable=ignore
org.eclipse.wst.jsdt.core.compiler.problem.uninitializedLocalVariable=warning
org.eclipse.wst.jsdt.core.compiler.problem.unnecessaryElse=ignore
org.eclipse.wst.jsdt.core.compiler.problem.unresolvedFieldReference=error
org.eclipse.wst.jsdt.core.compiler.problem.unresolvedMethodReference=error
org.eclipse.wst.jsdt.core.compiler.problem.unresolvedTypeReference=error
org.eclipse.wst.jsdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
org.eclipse.wst.jsdt.core.compiler.problem.unusedLabel=warning
org.eclipse.wst.jsdt.core.compiler.problem.unusedLocal=warning
org.eclipse.wst.jsdt.core.compiler.problem.unusedParameter=ignore
org.eclipse.wst.jsdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
org.eclipse.wst.jsdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
org.eclipse.wst.jsdt.core.compiler.problem.unusedPrivateMember=warning
org.eclipse.wst.jsdt.core.compiler.source=1.3
org.eclipse.wst.jsdt.core.compiler.taskCaseSensitive=enabled
org.eclipse.wst.jsdt.core.compiler.taskPriorities=NORMAL,HIGH,NORMAL
org.eclipse.wst.jsdt.core.compiler.taskTags=TODO,FIXME,XXX
semanticValidation=enabled

View File

@ -0,0 +1,2 @@
eclipse.preferences.version=1
org.eclipse.wst.jsdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>

View File

@ -0,0 +1 @@
org.eclipse.wst.jsdt.launching.JRE_CONTAINER

View File

@ -0,0 +1 @@
Global

View File

@ -92,7 +92,8 @@ public abstract class RefineBroker extends ButterflyModuleImpl {
static final protected Logger logger = LoggerFactory.getLogger("refine.broker");
static final protected String USER_INFO_URL = "http://www.freebase.com/api/service/user_info";
// TODO: This API is deprecated.
static final protected String USER_INFO_URL = "http://api.freebase.com/api/service/user_info";
static final protected String DELEGATED_OAUTH_HEADER = "X-Freebase-Credentials";
static final protected String OAUTH_HEADER = "Authorization";

View File

@ -19,6 +19,8 @@
<property name="build.dir" value="build"/>
<property name="dist.dir" value="dist"/>
<property name="java_version" value="1.6"/>
<property name="appengine.app_id" value="openrefine-broker"/>
<property name="appengine.version" value="1"/>
<property name="appengine.sdk.dir" value="/opt/appengine"/>
@ -119,7 +121,7 @@
<target name="build_server">
<mkdir dir="${server.classes.dir}" />
<javac encoding="utf-8" srcdir="${server.src.dir}" destdir="${server.classes.dir}" debug="true" includeAntRuntime="no">
<javac source="${java_version}" target="${java_version}" encoding="utf-8" srcdir="${server.src.dir}" destdir="${server.classes.dir}" debug="true" includeAntRuntime="no">
<classpath refid="server.class.path" />
</javac>
<copy file="${server.src.dir}/log4j.properties" tofile="${server.classes.dir}/log4j.properties"/>
@ -127,7 +129,7 @@
<target name="build_webapp">
<mkdir dir="${webapp.classes.dir}" />
<javac encoding="utf-8" destdir="${webapp.classes.dir}" debug="true" includeAntRuntime="no">
<javac source="${java_version}" target="${java_version}" encoding="utf-8" destdir="${webapp.classes.dir}" debug="true" includeAntRuntime="no">
<src path="${webapp.src.dir}"/>
<classpath refid="webapp.class.path" />
</javac>
@ -136,7 +138,7 @@
<target name="build_tests" depends="build_webapp, build_server">
<mkdir dir="${server.tests.classes.dir}" />
<javac encoding="utf-8" srcdir="${server.tests.src.dir}" destdir="${server.tests.classes.dir}" debug="true" includeAntRuntime="no">
<javac source="${java_version}" target="${java_version}" encoding="utf-8" srcdir="${server.tests.src.dir}" destdir="${server.tests.classes.dir}" debug="true" includeAntRuntime="no">
<classpath refid="tests.class.path" />
</javac>
<copy file="${server.tests.src.dir}/tests.log4j.properties" tofile="${server.tests.classes.dir}/tests.log4j.properties"/>
@ -150,7 +152,7 @@
<target name="build_broker_core">
<mkdir dir="${broker.core.dir}/module/MOD-INF/classes" />
<javac encoding="utf-8" destdir="${broker.core.dir}/module/MOD-INF/classes" debug="true" includeAntRuntime="no">
<javac source="${java_version}" target="${java_version}" encoding="utf-8" destdir="${broker.core.dir}/module/MOD-INF/classes" debug="true" includeAntRuntime="no">
<src path="${broker.core.dir}/src/"/>
<classpath refid="broker.class.path" />
</javac>
@ -158,7 +160,7 @@
<target name="build_broker_appengine" depends="build_broker_core">
<mkdir dir="${broker.appengine.dir}/module/MOD-INF/classes" />
<javac encoding="utf-8" destdir="${broker.appengine.dir}/module/MOD-INF/classes" debug="true" includeAntRuntime="no">
<javac source="${java_version}" target="${java_version}" encoding="utf-8" destdir="${broker.appengine.dir}/module/MOD-INF/classes" debug="true" includeAntRuntime="no">
<src path="${broker.appengine.dir}/src/"/>
<classpath refid="broker.appengine.class.path" />
</javac>
@ -217,40 +219,50 @@
</target>
<target name="mac" depends="jar, prepare_webapp">
<taskdef
name="jarbundler"
classname="net.sourceforge.jarbundler.JarBundler"
classpath="${jarbundler.dir}/jarbundler-2.2.0.jar"
/>
<mkdir dir="${mac.dir}"/>
<jarbundler
dir="${mac.dir}"
name="OpenRefine"
mainclass="com.google.refine.Refine"
icon="${graphics.dir}/icon/openrefine.icns"
version="${version}"
infostring="OpenRefine ${version}"
<taskdef name="bundleapp"
classname="com.oracle.appbundler.AppBundlerTask"
classpath="${appbundler.dir}/appbundler-1.0.jar" />
<bundleapp outputdirectory="${mac.dir}"
name="OpenRefine"
displayname="OpenRefine ${version}"
icon="${graphics.dir}/icon/openrefine.icns"
identifier="org.openrefine.refine.Refine"
shortversion="${version}"
mainclassname="com/google/refine/Refine">
<runtime dir="${env.JAVA_HOME}"/>
<classpath dir="${build.dir}">
<include name="${fullname}-server.jar" />
</classpath>
<classpath dir="${server.lib.dir}">
<include name="**/*.jar" />
</classpath>
<option value="-Xms256M"/>
<option value="-Xmx1024M"/>
<option value="-Djava.awt.headless=true"/>
<option value="-Drefine.headless=true"/>
<option value="-Drefine.version=${revision}"/>
<option value="-Drefine.webapp=$APP_ROOT/Contents/Resource/${built.webapp.name}"/>
</bundleapp>
<copy todir="${mac.dir}/OpenRefine.app/Contents/Resource">
<fileset dir="${build.dir}" id="librarypathset" >
<include name="${built.webapp.name}/**/**" />
<exclude name="**/*.class" />
</fileset>
</copy>
<!--- Options from old jarbundler code not supported by appbundler
aboutmenuname="OpenRefine"
workingdirectory="$APP_PACKAGE/Contents/Resources"
jvmversion="1.6+"
bundleid="com.google.refine.Refine"
vmoptions="-Xms256M -Xmx1024M -Drefine.version=${revision}"
antialiasedgraphics="true"
antialiasedtext="true"
liveresize="true"
growboxintrudes="true"
screenmenu="true">
<jarfilelist dir="${build.dir}" files="${fullname}-server.jar" />
<jarfileset dir="${server.lib.dir}">
<exclude name="**/.svn" />
<include name="**/*.jar" />
</jarfileset>
<resourcefileset dir="${built.webapp.dir}/..">
<include name="${built.webapp.name}/**" />
</resourcefileset>
</jarbundler>
-->
</target>
<target name="windows" depends="jar, prepare_webapp">
@ -277,7 +289,7 @@
fileVersion="${full_version}"
txtFileVersion="${version}"
fileDescription="openrefine"
copyright="Copyright (c) 2012 OpenRefine contributors, 2010, Google, Inc."
copyright="Copyright (c) 2013 OpenRefine contributors, 2010, Google, Inc."
productVersion="${full_version}"
txtProductVersion="${full_version}"
productName="OpenRefine"

Binary file not shown.

View File

@ -139,12 +139,12 @@ ExtendDataPreviewDialog.prototype._show = function(properties) {
}
var suggestConfig = {
filter: '(all type:/type/property)',
type: '/type/property', // NOTE: requires patched Suggest to pass this through
// Default returns id, lang, mid, name, notable {id,name}, score
mql_output : JSON.stringify({'name':null,'id':null,'mid':null, '/type/property/expected_type':{'name':null,'id':null}}),
};
if ((this._column.reconConfig) && (this._column.reconConfig.type)) {
suggestConfig.filter = '(all type:/type/property (any namespace:/type/object namespace:' + this._column.reconConfig.type.id + '))';
suggestConfig.filter = '(should (any namespace:/type/object namespace:/common/topic namespace:' + this._column.reconConfig.type.id + '))';
}
this._elmts.addPropertyInput.suggestP(suggestConfig).bind("fb-select", function(evt, data) {

View File

@ -46,6 +46,7 @@ FreebaseLoadingDialog.prototype._createDialog = function() {
var authorization = this._elmts.authorization;
var loadButton = this._elmts.loadButton;
// TODO: Does this need to be redone for the new API / freebase.com site?
var check_authorization = function(cont) {
var check_authorization_url = "command/freebase/check-authorization/" + provider;
$.get(check_authorization_url, function(data) {

View File

@ -99,6 +99,8 @@ SchemaAlignment._batchSearch = function(queries, onDone) {
batch[key] = queries[key];
}
// TODO: New API doesn't accept multiple queries
// args.push("https://www.googleapis.com/freebase/v1/search?key=" + Freebase.API_KEY + "&" +
args.push("http://api.freebase.com/api/service/search?" +
$.param({ "queries" : JSON.stringify(batch) }) + "&callback=?");

View File

@ -308,7 +308,7 @@ SchemaAlignmentDialog.UILink.prototype._showPropertySuggestPopup = function(elmt
};
var sourceTypeID = this._parentUINode.getExpectedType();
if (sourceTypeID !== null) {
suggestOptions.filter = '(all type:/type/property (any namespace:/type/object namespace:' + sourceTypeID + '))'
suggestOptions.filter = '(all type:/type/property (should namespace:' + sourceTypeID + '))'
}
input.suggestP(suggestOptions).bind("fb-select", function(e, data) { commitProperty(data); });
@ -340,15 +340,13 @@ SchemaAlignmentDialog.UILink.prototype._configureTarget = function() {
var dismissBusy = DialogSystem.showBusy();
$.getJSON(
"http://api.freebase.com/api/service/mqlread?query=" + JSON.stringify({
query: {
"https://www.googleapis.com/freebase/v1/mqlread?key="+ Freebase.API_KEY + "&query=" + JSON.stringify({
"id" : this._link.property.id,
"type" : "/type/property",
"expected_type" : {
"id" : null,
"name" : null,
"/freebase/type_hints/mediator" : null
}
}
}) + "&callback=?",
null,

View File

@ -33,12 +33,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var Freebase = {};
Freebase.API_KEY = "AIzaSyBAZ_EjMPKlOzyyZXv6JKXPPwJFISVji3M";
Freebase.mqlread = function(query, options, onDone) {
var params = {};
var queryEnv = {
"query": query
};
// TODO: Options need to be handled differently for new API - but this doesn't appear to be used
if (options) {
for (var n in options) {
if (options.hasOwnProperty(n)) {
@ -52,10 +52,10 @@ Freebase.mqlread = function(query, options, onDone) {
}
}
params.query = JSON.stringify(queryEnv);
params.query = JSON.stringify(query);
$.getJSON(
"http://api.freebase.com/api/service/mqlread?" + $.param(params) + "&callback=?",
"https://www.googleapis.com/freebase/v1/mqlread?key=" + Freebase.API_KEY + $.param(params) + "&callback=?",
null,
onDone,
"jsonp"

View File

@ -44,6 +44,11 @@ import com.google.refine.freebase.util.FreebaseUtils;
import com.google.refine.oauth.OAuthUtilities;
import com.google.refine.oauth.Provider;
/**
* Perform an MQLread on the server, using the client's credentials.
*
* Currently unused. All client code calls the Freebase API directly.
*/
public class MQLReadCommand extends Command {
@Override
@ -55,6 +60,7 @@ public class MQLReadCommand extends Command {
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Type", "application/json");
String query = request.getParameter("query");
@SuppressWarnings("deprecation")
String result = FreebaseUtils.mqlread(provider,query);
response.getWriter().write(result);
} catch (Exception e) {

View File

@ -1,6 +1,6 @@
/*
Copyright 2010, Google Inc.
Copyright 2010,2013 Google Inc. and other contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
@ -33,10 +33,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.freebase.model.recon;
import java.io.InputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -48,6 +46,7 @@ import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.google.refine.freebase.util.FreebaseUtils;
import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Recon;
@ -95,7 +94,7 @@ public class GuidBasedReconConfig extends StrictReconConfig {
@Override
public int getBatchSize() {
return 10;
return 50;
}
@Override
@ -119,87 +118,48 @@ public class GuidBasedReconConfig extends StrictReconConfig {
Map<String, Recon> guidToRecon = new HashMap<String, Recon>();
try {
String query = null;
{
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
String query = buildQuery(jobs);
jsonWriter.object();
jsonWriter.key("query");
jsonWriter.array();
jsonWriter.object();
String s = FreebaseUtils.mqlread(query);
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
jsonWriter.key("id"); jsonWriter.value(null);
jsonWriter.key("name"); jsonWriter.value(null);
jsonWriter.key("guid"); jsonWriter.value(null);
jsonWriter.key("type"); jsonWriter.array(); jsonWriter.endArray();
if (o.has("result")) {
JSONArray results = o.getJSONArray("result");
int count = results.length();
jsonWriter.key("guid|=");
jsonWriter.array();
for (ReconJob job : jobs) {
jsonWriter.value(((GuidBasedReconJob) job).guid);
}
jsonWriter.endArray();
for (int i = 0; i < count; i++) {
JSONObject result = results.getJSONObject(i);
jsonWriter.endObject();
jsonWriter.endArray();
jsonWriter.endObject();
String guid = result.getString("guid");
query = stringWriter.toString();
}
StringBuffer sb = new StringBuffer(1024);
sb.append(s_mqlreadService);
sb.append("?query=");
sb.append(ParsingUtilities.encode(query));
URL url = new URL(sb.toString());
URLConnection connection = url.openConnection();
connection.setConnectTimeout(5000);
connection.connect();
InputStream is = connection.getInputStream();
try {
String s = ParsingUtilities.inputStreamToString(is);
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
if (o.has("result")) {
JSONArray results = o.getJSONArray("result");
int count = results.length();
for (int i = 0; i < count; i++) {
JSONObject result = results.getJSONObject(i);
String guid = result.getString("guid");
JSONArray types = result.getJSONArray("type");
String[] typeIDs = new String[types.length()];
for (int j = 0; j < typeIDs.length; j++) {
typeIDs[j] = types.getString(j);
}
ReconCandidate candidate = new ReconCandidate(
result.getString("id"),
result.getString("name"),
typeIDs,
100
);
Recon recon = Recon.makeFreebaseRecon(historyEntryID);
recon.addCandidate(candidate);
recon.service = "mql";
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.match = candidate;
recon.matchRank = 0;
guidToRecon.put(guid, recon);
JSONArray types = result.getJSONArray("type");
String[] typeIDs = new String[types.length()];
for (int j = 0; j < typeIDs.length; j++) {
typeIDs[j] = types.getString(j);
}
ReconCandidate candidate = new ReconCandidate(
result.getString("id"),
result.getString("name"),
typeIDs,
100
);
Recon recon = Recon.makeFreebaseRecon(historyEntryID);
recon.addCandidate(candidate);
recon.service = "mql";
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.match = candidate;
recon.matchRank = 0;
guidToRecon.put(guid, recon);
}
} finally {
is.close();
}
} catch (Exception e) {
e.printStackTrace();
} catch (IOException e) {
LOGGER.error("IOException during recon : ",e);
} catch (JSONException e) {
LOGGER.error("JSONException during recon : ",e);
}
for (ReconJob job : jobs) {
@ -213,4 +173,33 @@ public class GuidBasedReconConfig extends StrictReconConfig {
return recons;
}
private String buildQuery(List<ReconJob> jobs)
throws JSONException {
String query = null;
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
jsonWriter.array();
jsonWriter.object();
jsonWriter.key("id"); jsonWriter.value(null);
jsonWriter.key("name"); jsonWriter.value(null);
jsonWriter.key("guid"); jsonWriter.value(null);
jsonWriter.key("type"); jsonWriter.array(); jsonWriter.endArray();
jsonWriter.key("guid|=");
jsonWriter.array();
for (ReconJob job : jobs) {
jsonWriter.value(((GuidBasedReconJob) job).guid);
}
jsonWriter.endArray();
jsonWriter.endObject();
jsonWriter.endArray();
query = stringWriter.toString();
return query;
}
}

View File

@ -1,6 +1,6 @@
/*
Copyright 2010, Google Inc.
Copyright 2010,2013 Google Inc. and other contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
@ -33,10 +33,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.freebase.model.recon;
import java.io.InputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -48,6 +46,7 @@ import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import com.google.refine.freebase.util.FreebaseUtils;
import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Recon;
@ -99,7 +98,7 @@ public class IdBasedReconConfig extends StrictReconConfig {
@Override
public int getBatchSize() {
return 10;
return 40;
}
@Override
@ -123,87 +122,47 @@ public class IdBasedReconConfig extends StrictReconConfig {
Map<String, Recon> idToRecon = new HashMap<String, Recon>();
try {
String query = null;
{
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
String query = buildQuery(jobs);
String s = FreebaseUtils.mqlread(query);
jsonWriter.object();
jsonWriter.key("query");
jsonWriter.array();
jsonWriter.object();
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
if (o.has("result")) {
JSONArray results = o.getJSONArray("result");
int count = results.length();
jsonWriter.key("id"); jsonWriter.value(null);
jsonWriter.key("name"); jsonWriter.value(null);
jsonWriter.key("guid"); jsonWriter.value(null);
jsonWriter.key("type"); jsonWriter.array(); jsonWriter.endArray();
for (int i = 0; i < count; i++) {
JSONObject result = results.getJSONObject(i);
jsonWriter.key("id|=");
jsonWriter.array();
for (ReconJob job : jobs) {
jsonWriter.value(((IdBasedReconJob) job).id);
}
jsonWriter.endArray();
String id = result.getString("id");
jsonWriter.endObject();
jsonWriter.endArray();
jsonWriter.endObject();
query = stringWriter.toString();
}
StringBuffer sb = new StringBuffer(1024);
sb.append(s_mqlreadService);
sb.append("?query=");
sb.append(ParsingUtilities.encode(query));
URL url = new URL(sb.toString());
URLConnection connection = url.openConnection();
connection.setConnectTimeout(5000);
connection.connect();
InputStream is = connection.getInputStream();
try {
String s = ParsingUtilities.inputStreamToString(is);
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
if (o.has("result")) {
JSONArray results = o.getJSONArray("result");
int count = results.length();
for (int i = 0; i < count; i++) {
JSONObject result = results.getJSONObject(i);
String id = result.getString("id");
JSONArray types = result.getJSONArray("type");
String[] typeIDs = new String[types.length()];
for (int j = 0; j < typeIDs.length; j++) {
typeIDs[j] = types.getString(j);
}
ReconCandidate candidate = new ReconCandidate(
id,
result.getString("name"),
typeIDs,
100
);
Recon recon = Recon.makeFreebaseRecon(historyEntryID);
recon.addCandidate(candidate);
recon.service = "mql";
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.match = candidate;
recon.matchRank = 0;
idToRecon.put(id, recon);
JSONArray types = result.getJSONArray("type");
String[] typeIDs = new String[types.length()];
for (int j = 0; j < typeIDs.length; j++) {
typeIDs[j] = types.getString(j);
}
ReconCandidate candidate = new ReconCandidate(
id,
result.getString("name"),
typeIDs,
100
);
Recon recon = Recon.makeFreebaseRecon(historyEntryID);
recon.addCandidate(candidate);
recon.service = "mql";
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.match = candidate;
recon.matchRank = 0;
idToRecon.put(id, recon);
}
} finally {
is.close();
}
} catch (Exception e) {
e.printStackTrace();
} catch (IOException e) {
LOGGER.error("IOException during recon : ",e);
} catch (JSONException e) {
LOGGER.error("JSONException during recon : ",e);
}
for (ReconJob job : jobs) {
@ -218,4 +177,34 @@ public class IdBasedReconConfig extends StrictReconConfig {
return recons;
}
private String buildQuery(List<ReconJob> jobs)
throws JSONException {
String query = null;
{
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
jsonWriter.array();
jsonWriter.object();
jsonWriter.key("id"); jsonWriter.value(null);
jsonWriter.key("name"); jsonWriter.value(null);
jsonWriter.key("guid"); jsonWriter.value(null);
jsonWriter.key("type"); jsonWriter.array(); jsonWriter.endArray();
jsonWriter.key("id|=");
jsonWriter.array();
for (ReconJob job : jobs) {
jsonWriter.value(((IdBasedReconJob) job).id);
}
jsonWriter.endArray();
jsonWriter.endObject();
jsonWriter.endArray();
query = stringWriter.toString();
}
return query;
}
}

View File

@ -33,10 +33,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.freebase.model.recon;
import java.io.InputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -49,6 +47,7 @@ import org.json.JSONObject;
import org.json.JSONWriter;
import com.google.refine.freebase.FreebaseTopic;
import com.google.refine.freebase.util.FreebaseUtils;
import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Recon;
@ -99,7 +98,7 @@ public class KeyBasedReconConfig extends StrictReconConfig {
@Override
public int getBatchSize() {
return 10;
return 40;
}
@Override
@ -124,100 +123,47 @@ public class KeyBasedReconConfig extends StrictReconConfig {
Map<String, Recon> keyToRecon = new HashMap<String, Recon>();
try {
String query = null;
{
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
String query = buildQuery(jobs);
String s = FreebaseUtils.mqlread(query);
jsonWriter.object();
jsonWriter.key("query");
jsonWriter.array();
jsonWriter.object();
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
if (o.has("result")) {
JSONArray results = o.getJSONArray("result");
int count = results.length();
jsonWriter.key("id"); jsonWriter.value(null);
jsonWriter.key("name"); jsonWriter.value(null);
jsonWriter.key("guid"); jsonWriter.value(null);
jsonWriter.key("type"); jsonWriter.array(); jsonWriter.endArray();
for (int i = 0; i < count; i++) {
JSONObject result = results.getJSONObject(i);
jsonWriter.key("key");
jsonWriter.array();
jsonWriter.object();
String key = result.getJSONArray("key").getJSONObject(0).getString("value");
jsonWriter.key("namespace");
jsonWriter.object();
jsonWriter.key("id"); jsonWriter.value(namespace.id);
jsonWriter.endObject();
jsonWriter.key("value"); jsonWriter.value(null);
jsonWriter.key("value|=");
jsonWriter.array();
for (ReconJob job : jobs) {
jsonWriter.value(((KeyBasedReconJob) job).key);
}
jsonWriter.endArray();
jsonWriter.endObject();
jsonWriter.endArray();
jsonWriter.endObject();
jsonWriter.endArray();
jsonWriter.endObject();
query = stringWriter.toString();
}
StringBuffer sb = new StringBuffer(1024);
sb.append(s_mqlreadService);
sb.append("?query=");
sb.append(ParsingUtilities.encode(query));
URL url = new URL(sb.toString());
URLConnection connection = url.openConnection();
connection.setConnectTimeout(5000);
connection.connect();
InputStream is = connection.getInputStream();
try {
String s = ParsingUtilities.inputStreamToString(is);
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
if (o.has("result")) {
JSONArray results = o.getJSONArray("result");
int count = results.length();
for (int i = 0; i < count; i++) {
JSONObject result = results.getJSONObject(i);
String key = result.getJSONArray("key").getJSONObject(0).getString("value");
JSONArray types = result.getJSONArray("type");
String[] typeIDs = new String[types.length()];
for (int j = 0; j < typeIDs.length; j++) {
typeIDs[j] = types.getString(j);
}
ReconCandidate candidate = new ReconCandidate(
result.getString("id"),
result.getString("name"),
typeIDs,
100
);
Recon recon = Recon.makeFreebaseRecon(historyEntryID);
recon.addCandidate(candidate);
recon.service = "mql";
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.match = candidate;
recon.matchRank = 0;
keyToRecon.put(key, recon);
JSONArray types = result.getJSONArray("type");
String[] typeIDs = new String[types.length()];
for (int j = 0; j < typeIDs.length; j++) {
typeIDs[j] = types.getString(j);
}
ReconCandidate candidate = new ReconCandidate(
result.getString("id"),
result.getString("name"),
typeIDs,
100
);
Recon recon = Recon.makeFreebaseRecon(historyEntryID);
recon.addCandidate(candidate);
recon.service = "mql";
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.match = candidate;
recon.matchRank = 0;
keyToRecon.put(key, recon);
}
} finally {
is.close();
}
} catch (Exception e) {
e.printStackTrace();
} catch (IOException e) {
LOGGER.error("IOException during recon : ",e);
} catch (JSONException e) {
LOGGER.error("JSONException during recon : ",e);
}
for (ReconJob job : jobs) {
@ -232,4 +178,47 @@ public class KeyBasedReconConfig extends StrictReconConfig {
return recons;
}
private String buildQuery(List<ReconJob> jobs)
throws JSONException {
String query = null;
{
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
jsonWriter.array();
jsonWriter.object();
jsonWriter.key("id"); jsonWriter.value(null);
jsonWriter.key("name"); jsonWriter.value(null);
jsonWriter.key("guid"); jsonWriter.value(null);
jsonWriter.key("type"); jsonWriter.array(); jsonWriter.endArray();
jsonWriter.key("key");
jsonWriter.array();
jsonWriter.object();
jsonWriter.key("namespace");
jsonWriter.object();
jsonWriter.key("id"); jsonWriter.value(namespace.id);
jsonWriter.endObject();
jsonWriter.key("value"); jsonWriter.value(null);
jsonWriter.key("value|=");
jsonWriter.array();
for (ReconJob job : jobs) {
jsonWriter.value(((KeyBasedReconJob) job).key);
}
jsonWriter.endArray();
jsonWriter.endObject();
jsonWriter.endArray();
jsonWriter.endObject();
jsonWriter.endArray();
query = stringWriter.toString();
}
return query;
}
}

View File

@ -40,7 +40,6 @@ import com.google.refine.model.Recon.Judgment;
import com.google.refine.model.recon.ReconConfig;
abstract public class StrictReconConfig extends ReconConfig {
final static protected String s_mqlreadService = "http://api.freebase.com/api/service/mqlread";
static public ReconConfig reconstruct(JSONObject obj) throws Exception {
String match = obj.getString("match");

View File

@ -13,7 +13,7 @@ import com.google.refine.util.JSONUtilities;
import com.google.refine.util.ParsingUtilities;
public class SchemaHelper {
static private final String helperURL = "http://2.refine-helper.stefanomazzocchi.user.dev.freebaseapps.com/";
static private final String helperURL = "http://openrefine-helper.freebaseapps.com/";
static private class PropertyInfo {
String fromTypeID;

View File

@ -1,6 +1,6 @@
/*
Copyright 2010, Google Inc.
Copyright 2010,2013 Google Inc. and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
@ -31,19 +31,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
*
*/
package com.google.refine.freebase.util;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -97,35 +89,31 @@ public class FreebaseDataExtensionJob {
) throws Exception {
StringWriter writer = new StringWriter();
formulateQuery(ids, extension, writer);
String query = writer.toString();
InputStream is = doMqlRead(query);
try {
String s = ParsingUtilities.inputStreamToString(is);
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(s);
Map<String, FreebaseDataExtensionJob.DataExtension> map = new HashMap<String, FreebaseDataExtensionJob.DataExtension>();
if (o.has("result")) {
JSONArray a = o.getJSONArray("result");
int l = a.length();
String result = FreebaseUtils.mqlread(query);
for (int i = 0; i < l; i++) {
JSONObject o2 = a.getJSONObject(i);
String id = o2.getString("id");
FreebaseDataExtensionJob.DataExtension ext = collectResult(o2, reconCandidateMap);
JSONObject o = ParsingUtilities.evaluateJsonStringToObject(result);
Map<String, FreebaseDataExtensionJob.DataExtension> map = new HashMap<String, FreebaseDataExtensionJob.DataExtension>();
if (o.has("result")) {
JSONArray a = o.getJSONArray("result");
int l = a.length();
if (ext != null) {
map.put(id, ext);
}
for (int i = 0; i < l; i++) {
JSONObject o2 = a.getJSONObject(i);
String id = o2.getString("id");
FreebaseDataExtensionJob.DataExtension ext = collectResult(o2, reconCandidateMap);
if (ext != null) {
map.put(id, ext);
}
}
return map;
} finally {
is.close();
}
return map;
}
protected FreebaseDataExtensionJob.DataExtension collectResult(
JSONObject obj,
Map<String, ReconCandidate> reconCandidateMap
@ -312,34 +300,10 @@ public class FreebaseDataExtensionJob {
}
static protected InputStream doMqlRead(String query) throws IOException {
URL url = new URL("http://api.freebase.com/api/service/mqlread");
URLConnection connection = url.openConnection();
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
connection.setConnectTimeout(5000);
connection.setDoOutput(true);
DataOutputStream dos = new DataOutputStream(connection.getOutputStream());
try {
String body = "extended=1&query=" + ParsingUtilities.encode(query);
dos.writeBytes(body);
} finally {
dos.flush();
dos.close();
}
connection.connect();
return connection.getInputStream();
}
static protected void formulateQuery(Set<String> ids, JSONObject node, Writer writer) throws JSONException {
JSONWriter jsonWriter = new JSONWriter(writer);
jsonWriter.object();
jsonWriter.key("query");
jsonWriter.array();
jsonWriter.object();
@ -357,7 +321,6 @@ public class FreebaseDataExtensionJob {
jsonWriter.endObject();
jsonWriter.endArray();
jsonWriter.endObject();
}
static protected void formulateQueryNode(JSONObject node, JSONWriter writer) throws JSONException {

View File

@ -1,6 +1,6 @@
/*
Copyright 2010, Google Inc.
Copyright 2010,2013 Google Inc. and other contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
@ -34,6 +34,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.freebase.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
@ -56,19 +62,42 @@ import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.util.EntityUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.batch.BatchRequest;
import com.google.api.client.googleapis.batch.json.JsonBatchCallback;
import com.google.api.client.googleapis.json.GoogleJsonError;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson.JacksonFactory;
import com.google.api.services.freebase.Freebase;
import com.google.api.services.freebase.FreebaseRequestInitializer;
import com.google.refine.ProjectManager;
import com.google.refine.RefineServlet;
import com.google.refine.oauth.Credentials;
import com.google.refine.oauth.OAuthUtilities;
import com.google.refine.oauth.Provider;
import com.google.refine.preference.PreferenceStore;
import com.google.refine.util.ParsingUtilities;
public class FreebaseUtils {
private static final String FREEBASE_API_VERSION = "v1";
// private static final String FREEBASE_SANDBOX_API_VERSION = "v1sandbox";
private static final String GOOGLE_RPC_URL = "https://www.googleapis.com/rpc";
private static final String FREEBASE_SERVICE_URL = "https://www.googleapis.com/freebase/" + FREEBASE_API_VERSION;
private static final String GOOGLE_BATCH_URL = "https://www.googleapis.com/batch";
static final Logger logger = LoggerFactory.getLogger("freebase");
static final public String FREEBASE_HOST = "freebase.com";
@ -80,7 +109,19 @@ public class FreebaseUtils {
static final private int SAMPLE_SIZE = 300;
static final private int JUDGES = 4;
public static final String API_KEY = "AIzaSyBAZ_EjMPKlOzyyZXv6JKXPPwJFISVji3M";
public static String getApiKey() {
PreferenceStore ps = ProjectManager.singleton.getPreferenceStore();
String key = (String) ps.get("freebase.api.key");
if (key == null) {
key = System.getProperty("refine.google_api_key");
}
return key == null ? API_KEY : key;
}
private static String getUserInfoURL(String host) {
// TODO: Needs to be upgraded to new APIs sandbox-freebase.com as host becomes v1sandbox as version
return "http://api." + host + "/api/service/user_info";
}
@ -134,6 +175,12 @@ public class FreebaseUtils {
return mqlread(provider, query);
}
/**
* Perform an MQLREAD operation using the credentials of the given OAuth provider
*
* @deprecated This will go away when we switch to Google authentication.
*/
@Deprecated
public static String mqlread(Provider provider, String query)
throws ClientProtocolException, IOException, JSONException {
@ -159,6 +206,24 @@ public class FreebaseUtils {
return EntityUtils.toString(httpResponse.getEntity());
}
/**
* Perform a single unauthenticated MQLread.
*
* (wrapper method for a bunch of alternative implementations)
*/
static public String mqlread(String query)
throws IOException, JSONException {
// A bunch of implementations which don't work for MQLread, but do for other methods
// String result = rpcCall(query);
// String result = googleCall(query);
// String result = batchCall1(query);
String result = mqlreadBatchMime(query);
return result;
}
public static String mqlwrite(Credentials credentials, Provider provider, String query)
throws OAuthMessageSignerException, OAuthExpectationFailedException, OAuthCommunicationException, ClientProtocolException, IOException, JSONException {
OAuthConsumer consumer = OAuthUtilities.getConsumer(credentials, provider);
@ -286,4 +351,190 @@ public class FreebaseUtils {
return url != null ? url : FREEQ_URL;
}
static final String BOUNDARY = "---theOpenRefineBoundary--=";
/**
* A hand rolled MIME multipart/mixed implementation for Google's Batch API
*/
static private String mqlreadBatchMime(String query) throws JSONException, IOException {
URL url = new URL(GOOGLE_BATCH_URL);
String service_url = FREEBASE_SERVICE_URL+"/mqlread";
// We could use the javax.mail package, but it's actually more trouble than it's worth
String body = "--" + BOUNDARY + "\n"
+ queryToMimeBodyPart("0", query, service_url, FreebaseUtils.getApiKey())
+ "\n--" + BOUNDARY + "\n" ;
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty("Content-Type","multipart/mixed; boundary="+ BOUNDARY);
connection.setConnectTimeout(5000);
connection.setDoOutput(true);
Writer writer = new OutputStreamWriter(connection.getOutputStream());
try {
writer.write(body);
} finally {
writer.flush();
writer.close();
}
connection.connect();
String result = null;
if (connection.getResponseCode() >= 400) {
String responseMessage = connection.getResponseMessage();
String errorStream = ParsingUtilities.inputStreamToString(connection.getErrorStream());
LoggerFactory.getLogger("freebase").error(
"Error in mqlreadMime: " + connection.getResponseCode() + ":" + responseMessage + " : "
+ errorStream);
} else {
InputStream is = connection.getInputStream();
try {
String s = ParsingUtilities.inputStreamToString(is);
String boundary = s.substring(0,s.indexOf("\n"));
boundary = boundary.split("\r")[0];
String[] part = s.split(boundary); // part 0 is empty because of leading boundary
String[] sections = part[1].split("\r\n\r\n");
// Mime headers, followed by HTTP headers, followd by actual response
result = sections[2];
} finally {
is.close();
}
}
return result;
}
static String queryToMimeBodyPart(String query_name,
String query, String service_url, String api_key)
throws IOException {
// We could use the javax.mail package, but it's actually more trouble than it's worth
StringBuilder sb = new StringBuilder();
sb.append("Content-Type: application/http\n");
sb.append("Content-Transfer-Encoding: binary\n");
sb.append("Content-ID: " + query_name + "\n");
sb.append("\n");
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("query",query));
params.add(new BasicNameValuePair("key", api_key));
UrlEncodedFormEntity param_string = new UrlEncodedFormEntity(params, "UTF-8");
String body = "GET " + service_url + "?" + ParsingUtilities.inputStreamToString(param_string.getContent()) + "\n";
sb.append(body);
sb.append("\n");
return sb.toString();
}
//////////////////////// Unused methods for future use /////////////////////
/**
* This RPC call works for the Reconcile API, but MQLread is not supported over JSONRPC
*/
@SuppressWarnings("unused")
static private JSONObject mqlreadRpc(String query) throws JSONException, UnsupportedEncodingException, IOException {
URL url = new URL(GOOGLE_RPC_URL);
JSONObject params = new JSONObject();
params.put("query",query);
params.put("key", FreebaseUtils.getApiKey());
JSONObject req1 = new JSONObject();
req1.put("jsonrpc","2.0");
req1.put("id","q0");
req1.put("method","freebase.mqlread");
req1.put("apiVersion", FREEBASE_API_VERSION);
req1.put("params",params);
JSONArray body = new JSONArray();
body.put(req1);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty("Content-Type", "application/json"); //
connection.setConnectTimeout(5000);
connection.setDoOutput(true);
OutputStreamWriter writer = new OutputStreamWriter(connection.getOutputStream(),"utf-8");
try {
writer.write(body.toString());
} finally {
writer.flush();
writer.close();
}
connection.connect();
JSONArray result = null;
if (connection.getResponseCode() >= 400) {
String responseMessage = connection.getResponseMessage();
String errorStream = ParsingUtilities.inputStreamToString(connection.getErrorStream());
LoggerFactory.getLogger("freebase").error(
"Error in mqlreadMime: " + connection.getResponseCode() + ":" + responseMessage + " : "
+ errorStream);
} else {
InputStream is = connection.getInputStream();
try {
String s = ParsingUtilities.inputStreamToString(is);
result = ParsingUtilities.evaluateJsonStringToArray(s);
} finally {
is.close();
}
}
return result.getJSONObject(0);
}
private static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport();
private static final JsonFactory JSON_FACTORY = new JacksonFactory();
private static final FreebaseRequestInitializer REQUEST_INITIALIZER =
new FreebaseRequestInitializer(FreebaseUtils.getApiKey());
/**
* Submit a single MQL read query via the standard Google client library
*/
@SuppressWarnings("unused")
static private String mqlreadFreebaseClient(String query)
throws IOException, JSONException {
Freebase client = new Freebase.Builder(HTTP_TRANSPORT, JSON_FACTORY, null)
.setApplicationName("OpenRefine")
.setFreebaseRequestInitializer(REQUEST_INITIALIZER)
.build();
InputStream is = client.mqlread(query).executeAsInputStream();
String result = ParsingUtilities.inputStreamToString(is);
return result;
}
/**
* Submit a single MQL query via the Batch endpoint
* (not supported by Google's Java client)
*/
@SuppressWarnings("unused")
static private JSONObject mqlreadBatchFreebaseClient(String query) throws IOException, JSONException {
JSONObject response = null;
// FIXME: We really want JsonBatchCallback<Freebase> here, but it's not supported right now
JsonBatchCallback<Void> callback = new JsonBatchCallback<Void>() {
public void onSuccess(Void res, HttpHeaders responseHeaders) {
System.out.println(res);
}
public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) {
System.out.println("Error Message: " + e.getMessage());
}
};
Freebase client = new Freebase.Builder(HTTP_TRANSPORT, JSON_FACTORY, null)
.setApplicationName("OpenRefine")
.setFreebaseRequestInitializer(REQUEST_INITIALIZER)
.build();
// FIXME: Batch doesn't work with MqlRead since it extends FreebaseRequest<Void>
BatchRequest batch = client.batch();
client.mqlread(query).queue(batch, callback);
batch.execute();
return response;
}
}

View File

@ -128,7 +128,7 @@ public class JythonEvaluable implements Evaluable {
return unwrap(result);
} catch (PyException e) {
return new EvalError(e.getMessage());
return new EvalError(e.toString());
}
}

View File

@ -99,27 +99,33 @@ public class InterProjectModel {
computeJoin(join);
_joins.put(key, join);
synchronized (_joins) {
_joins.put(key, join);
}
}
return _joins.get(key);
}
public void flushJoinsInvolvingProject(long projectID) {
for (Entry<String, ProjectJoin> entry : _joins.entrySet()) {
ProjectJoin join = entry.getValue();
if (join.fromProjectID == projectID || join.toProjectID == projectID) {
_joins.remove(entry.getKey());
synchronized (_joins) {
for (Entry<String, ProjectJoin> entry : _joins.entrySet()) {
ProjectJoin join = entry.getValue();
if (join.fromProjectID == projectID || join.toProjectID == projectID) {
_joins.remove(entry.getKey());
}
}
}
}
public void flushJoinsInvolvingProjectColumn(long projectID, String columnName) {
for (Entry<String, ProjectJoin> entry : _joins.entrySet()) {
ProjectJoin join = entry.getValue();
if (join.fromProjectID == projectID && join.fromProjectColumnName.equals(columnName) ||
join.toProjectID == projectID && join.toProjectColumnName.equals(columnName)) {
_joins.remove(entry.getKey());
synchronized (_joins) {
for (Entry<String, ProjectJoin> entry : _joins.entrySet()) {
ProjectJoin join = entry.getValue();
if (join.fromProjectID == projectID && join.fromProjectColumnName.equals(columnName) ||
join.toProjectID == projectID && join.toProjectColumnName.equals(columnName)) {
_joins.remove(entry.getKey());
}
}
}
}

View File

@ -157,7 +157,7 @@ public class TimeRangeFacet extends RangeFacet {
@Override
protected boolean checkValue(long t) {
return t >= _from && t < _to;
return t >= _from && t <= _to;
};
};
} else {

View File

@ -81,6 +81,7 @@ public class CsvExporter implements WriterExporter{
JSONUtilities.getString(options, "separator", Character.toString(this.separator));
final String lineSeparator = options == null ? CSVWriter.DEFAULT_LINE_END :
JSONUtilities.getString(options, "lineSeparator", CSVWriter.DEFAULT_LINE_END);
final boolean quoteAll = options == null ? false : JSONUtilities.getBoolean(options, "quoteAll", false);
final boolean printColumnHeader =
(params != null && params.getProperty("printColumnHeader") != null) ?
@ -110,7 +111,7 @@ public class CsvExporter implements WriterExporter{
cellData.text :
"";
}
csvWriter.writeNext(strings, false);
csvWriter.writeNext(strings, quoteAll);
}
}
};

View File

@ -185,7 +185,7 @@ public class FixedWidthImporter extends TabularImportingParserBase {
}
}
if (counts != null) {
if (counts != null && lineCount > 2) {
List<Integer> widths = new ArrayList<Integer>();
int startIndex = 0;

View File

@ -57,10 +57,6 @@ public class ImporterUtilities {
static public Serializable parseCellValue(String text) {
if (text.length() > 0) {
if (text.length() > 1 && text.startsWith("\"") && text.endsWith("\"")) {
return text.substring(1, text.length() - 1);
}
String text2 = text.trim();
if (text2.length() > 0) {
try {

View File

@ -34,10 +34,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.refine.importers;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.Serializable;
import java.util.List;
@ -67,7 +65,7 @@ public class JsonImporter extends TreeImportingParserBase {
public final static String ANONYMOUS = "_";
public JsonImporter() {
super(false);
super(true);
}
static private class PreviewParsingState {
@ -84,18 +82,13 @@ public class JsonImporter extends TreeImportingParserBase {
try {
JSONObject firstFileRecord = fileRecords.get(0);
File file = ImportingUtilities.getFile(job, firstFileRecord);
InputStream is = new FileInputStream(file);
try {
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createJsonParser(is);
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createJsonParser(file);
PreviewParsingState state = new PreviewParsingState();
Object rootValue = parseForPreview(parser, state);
if (rootValue != null) {
JSONUtilities.safePut(options, "dom", rootValue);
}
} finally {
is.close();
PreviewParsingState state = new PreviewParsingState();
Object rootValue = parseForPreview(parser, state);
if (rootValue != null) {
JSONUtilities.safePut(options, "dom", rootValue);
}
} catch (IOException e) {
logger.error("Error generating parser UI initialization data for JSON file", e);
@ -201,11 +194,11 @@ public class JsonImporter extends TreeImportingParserBase {
@Override
public void parseOneFile(Project project, ProjectMetadata metadata,
ImportingJob job, String fileSource, Reader reader,
ImportingJob job, String fileSource, InputStream is,
ImportColumnGroup rootColumnGroup, int limit, JSONObject options, List<Exception> exceptions) {
parseOneFile(project, metadata, job, fileSource,
new JSONTreeReader(reader), rootColumnGroup, limit, options, exceptions);
new JSONTreeReader(is), rootColumnGroup, limit, options, exceptions);
}
static public class JSONTreeReader implements TreeReader {
@ -220,9 +213,9 @@ public class JsonImporter extends TreeImportingParserBase {
private Serializable fieldValue = null;
public JSONTreeReader(Reader reader) {
public JSONTreeReader(InputStream is) {
try {
parser = factory.createJsonParser(reader);
parser = factory.createJsonParser(is);
current = null;
next = parser.nextToken();
} catch (IOException e) {

View File

@ -13,7 +13,7 @@ public class LineBasedFormatGuesser implements FormatGuesser {
return "text/line-based/*sv";
}
int[] widths = FixedWidthImporter.guessColumnWidths(file, encoding);
if (widths != null) {
if (widths != null && widths.length > 0) {
return "text/line-based/fixed-width";
}
return null;

View File

@ -42,6 +42,7 @@ import java.io.LineNumberReader;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@ -95,12 +96,13 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
}
sep = StringEscapeUtils.unescapeJava(sep);
boolean processQuotes = JSONUtilities.getBoolean(options, "processQuotes", true);
boolean strictQuotes = JSONUtilities.getBoolean(options, "strictQuotes", false);
final CSVParser parser = new CSVParser(
sep.toCharArray()[0],//HACK changing string to char - won't work for multi-char separators.
CSVParser.DEFAULT_QUOTE_CHARACTER,
(char) 127, // we don't want escape processing try DEL as a rare character until we can turn it off
CSVParser.DEFAULT_STRICT_QUOTES,
strictQuotes,
CSVParser.DEFAULT_IGNORE_LEADING_WHITESPACE,
!processQuotes);
@ -126,14 +128,10 @@ public class SeparatorBasedImporter extends TabularImportingParserBase {
ArrayList<Object> cells = new ArrayList<Object>();
String[] tokens = parser.parseLineMulti(line);
for (String s : tokens){
cells.add(s);
}
cells.addAll(Arrays.asList(tokens));
while (parser.isPending()) {
tokens = parser.parseLineMulti(lnReader.readLine());
for (String s : tokens) {
cells.add(s);
}
cells.addAll(Arrays.asList(tokens));
}
return cells;
}

View File

@ -1,6 +1,6 @@
package com.google.refine.importers.tree;
import java.util.LinkedList;
import java.util.ArrayList;
import java.util.List;
import com.google.refine.model.Cell;
@ -10,5 +10,66 @@ import com.google.refine.model.Cell;
*
*/
public class ImportRecord {
public List<List<Cell>> rows = new LinkedList<List<Cell>>();
public List<List<Cell>> rows = new BasedList<List<Cell>>();
/**
* A List implementation to match the characteristics needed by the
* import process. It's optimized for a relatively small number of
* contiguous records at a potentially large offset from zero.
* <p>
* I suspect it's usually only a single row, but we support more, just
* not as efficiently. Depending on the behavior of the ColumnGroups
* this may not be necessary at all, but I don't fully understand what it
* does, so we'll just put this hack in place for now.
*
* @param <T>
*/
class BasedList<T> extends ArrayList<T> {
private static final long serialVersionUID = 1L;
int offset = Integer.MAX_VALUE;
public T set(int index, T element) {
rebase(index);
extend(index);
return super.set(index - offset, element);
}
public T get(int index) {
if (offset == Integer.MAX_VALUE || index - offset > size() - 1) {
return null;
}
return super.get(index - offset);
}
private void rebase(final int index) {
if (index < offset) {
if (offset < Integer.MAX_VALUE) {
int new_offset = Math.max(0, index - 10); // Leave some extra room
int delta = offset - new_offset;
// Ensure room at top
for (int i = 0; i < delta; i++) {
add(null);
}
// Shuffle up
for (int i = size(); i > delta; i --) {
set(i,get(i-delta));
} // Null unused entries
for (int i = 0; i < delta; i++) {
set(i,null);
}
offset = new_offset;
} else {
offset = index;
}
}
}
private void extend(final int index) {
int i = index - offset;
while (i >= size()) {
add(null);
}
}
}
}

View File

@ -150,11 +150,13 @@ public abstract class TreeImportUtilities {
int cellIndex = column.cellIndex;
int rowIndex = Math.max(columnGroup.nextRowIndex, column.nextRowIndex);
while (rowIndex >= record.rows.size()) {
record.rows.add(new ArrayList<Cell>());
}
List<Cell> row = record.rows.get(rowIndex);
if (row == null) {
row = new ArrayList<Cell>();
record.rows.set(rowIndex, row);
}
while (cellIndex >= row.size()) {
row.add(null);
}

View File

@ -129,7 +129,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
* null if no candidates were found (less than 6 recurrences)
*/
static public String[] detectRecordElement(TreeReader parser) {
logger.trace("detectRecordElement(inputStream)");
if (logger.isTraceEnabled()) {
logger.trace("detectRecordElement(inputStream)");
}
List<RecordElementCandidate> candidates = new ArrayList<RecordElementCandidate>();
try {
@ -161,7 +163,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
}
static protected RecordElementCandidate detectRecordElement(TreeReader parser, String[] path) {
logger.trace("detectRecordElement(TreeReader, String[])");
if (logger.isTraceEnabled()) {
logger.trace("detectRecordElement(TreeReader, String[])");
}
List<RecordElementCandidate> descendantCandidates = new ArrayList<RecordElementCandidate>();
Map<String, Integer> immediateChildCandidateMap = new HashMap<String, Integer>();
@ -217,7 +221,10 @@ public class XmlImportUtilities extends TreeImportUtilities {
sortRecordElementCandidates(immediateChildCandidates);
RecordElementCandidate ourCandidate = immediateChildCandidates.get(0);
logger.trace("ourCandidate.count : " + ourCandidate.count + "; immediateChildCandidates.size() : " + immediateChildCandidates.size());
if (logger.isTraceEnabled()) {
logger.trace("ourCandidate.count : " + ourCandidate.count + "; immediateChildCandidates.size() : "
+ immediateChildCandidates.size());
}
if (ourCandidate.count / immediateChildCandidates.size() > 5) {
return ourCandidate;
}
@ -259,7 +266,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
boolean storeEmptyStrings,
boolean guessDataType
) {
logger.trace("importTreeData(TreeReader, Project, String[], ImportColumnGroup)");
if (logger.isTraceEnabled()) {
logger.trace("importTreeData(TreeReader, Project, String[], ImportColumnGroup)");
}
try {
while (parser.hasNext()) {
Token eventType = parser.next();
@ -305,8 +314,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
boolean storeEmptyStrings,
boolean guessDataType
) throws TreeReaderException {
logger.trace("findRecord(Project, TreeReader, String[], int, ImportColumnGroup - path:"+Arrays.toString(recordPath));
if (logger.isTraceEnabled()) {
logger.trace("findRecord(Project, TreeReader, String[], int, ImportColumnGroup - path:"+Arrays.toString(recordPath));
}
if(parser.current() == Token.Ignorable){//XMLStreamConstants.START_DOCUMENT){
logger.warn("Cannot use findRecord method for START_DOCUMENT event");
return;
@ -382,7 +392,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
boolean storeEmptyStrings,
boolean guessDataType
) throws TreeReaderException {
logger.trace("processRecord(Project,TreeReader,ImportColumnGroup)");
if (logger.isTraceEnabled()) {
logger.trace("processRecord(Project,TreeReader,ImportColumnGroup)");
}
ImportRecord record = new ImportRecord();
processSubRecord(project, parser, rootColumnGroup, record, 0, trimStrings, storeEmptyStrings, guessDataType);
@ -418,8 +430,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
boolean storeEmptyStrings,
boolean guessDataType
) throws TreeReaderException {
logger.trace("processFieldAsRecord(Project,TreeReader,ImportColumnGroup)");
if (logger.isTraceEnabled()) {
logger.trace("processFieldAsRecord(Project,TreeReader,ImportColumnGroup)");
}
Serializable value = parser.getValue();
ImportRecord record = null;
if (value instanceof String) {
@ -455,20 +468,19 @@ public class XmlImportUtilities extends TreeImportUtilities {
}
static protected void addImportRecordToProject(ImportRecord record, Project project) {
if (record.rows.size() > 0) {
for (List<Cell> row : record.rows) {
Row realRow = new Row(row.size());
int cellCount = 0;
for (List<Cell> row : record.rows) {
if (row.size() > 0) {
Row realRow = null;
for (int c = 0; c < row.size(); c++) {
Cell cell = row.get(c);
if (cell != null) {
if (realRow == null) {
realRow = new Row(row.size());
}
realRow.setCell(c, cell);
cellCount++;
}
}
if (cellCount > 0) {
if (realRow != null) {
project.rows.add(realRow);
}
}
@ -506,7 +518,9 @@ public class XmlImportUtilities extends TreeImportUtilities {
boolean storeEmptyStrings,
boolean guessDataType
) throws TreeReaderException {
logger.trace("processSubRecord(Project,TreeReader,ImportColumnGroup,ImportRecord) lvl:"+level+" "+columnGroup);
if (logger.isTraceEnabled()) {
logger.trace("processSubRecord(Project,TreeReader,ImportColumnGroup,ImportRecord) lvl:"+level+" "+columnGroup);
}
if(parser.current() == Token.Ignorable) {
return;

View File

@ -270,6 +270,9 @@ public class ImportingManager {
static public String getFormat(String fileName, String mimeType) {
String fileNameFormat = getFormatFromFileName(fileName);
if (mimeType != null) {
mimeType = mimeType.split(";")[0];
}
String mimeTypeFormat = mimeType == null ? null : getFormatFromMimeType(mimeType);
if (mimeTypeFormat == null) {
return fileNameFormat;

View File

@ -253,7 +253,7 @@ public class Recon implements HasFields, Jsonizable {
@Override
public Object getField(String name, Properties bindings) {
int index = s_featureMap.containsKey(name) ? s_featureMap.get(name) : -1;
return (index > 0 && index < features.length) ? features[index] : null;
return (index >= 0 && index < features.length) ? features[index] : null;
}
@Override

View File

@ -1,6 +1,6 @@
/*
Copyright 2010, Google Inc.
Copyright 2010,2013 Google Inc. and other contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
@ -44,6 +44,8 @@ import java.util.Properties;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.Jsonizable;
import com.google.refine.model.Cell;
@ -54,6 +56,8 @@ import com.google.refine.model.Row;
import edu.mit.simile.butterfly.ButterflyModule;
abstract public class ReconConfig implements Jsonizable {
final static protected Logger LOGGER = LoggerFactory.getLogger("recon-config");
static final public Map<String, List<Class<? extends ReconConfig>>> s_opNameToClass =
new HashMap<String, List<Class<? extends ReconConfig>>>();
@ -96,7 +100,7 @@ abstract public class ReconConfig implements Jsonizable {
}
}
} catch (Exception e) {
e.printStackTrace();
LOGGER.error("Reconstruct failed",e);
}
return null;
}
@ -122,7 +126,7 @@ abstract public class ReconConfig implements Jsonizable {
try {
write(jsonWriter, new Properties());
} catch (JSONException e) {
e.printStackTrace();
LOGGER.error("Save failed",e);
}
}
}

View File

@ -456,6 +456,7 @@ public class StandardReconConfig extends ReconConfig {
static final protected Set<String> s_stopWords = new HashSet<String>();
static {
// FIXME: This is English specific
s_stopWords.add("the");
s_stopWords.add("a");
s_stopWords.add("and");

View File

@ -35,7 +35,6 @@ package com.google.refine.tests.importers;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import java.util.LinkedList;
@ -228,7 +227,7 @@ public class JsonImporterTests extends ImporterTest {
String sampleJson2 = "{\"field\":{}}";
String sampleJson3 = "{\"field\":[{},{}]}";
JSONTreeReader parser = new JSONTreeReader(new StringReader(sampleJson));
JSONTreeReader parser = new JSONTreeReader(new ByteArrayInputStream(sampleJson.getBytes("UTF-8")));
Token token = Token.Ignorable;
int i = 0;
try{
@ -248,7 +247,7 @@ public class JsonImporterTests extends ImporterTest {
}
parser = new JSONTreeReader(new StringReader(sampleJson2));
parser = new JSONTreeReader(new ByteArrayInputStream(sampleJson2.getBytes("UTF-8")));
token = Token.Ignorable;
i = 0;
try{
@ -267,7 +266,7 @@ public class JsonImporterTests extends ImporterTest {
//silent
}
parser = new JSONTreeReader(new StringReader(sampleJson3));
parser = new JSONTreeReader(new ByteArrayInputStream(sampleJson3.getBytes("UTF-8")));
token = Token.Ignorable;
i = 0;
try{
@ -505,7 +504,7 @@ public class JsonImporterTests extends ImporterTest {
}
try {
parseOneFile(SUT, inputStream, options);
parseOneInputStream(SUT, inputStream, options);
} catch (Exception e) {
Assert.fail();
}

View File

@ -35,7 +35,6 @@ package com.google.refine.tests.importers;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
@ -197,7 +196,7 @@ public class XmlImportUtilitiesTests extends RefineTest {
loadSampleJson();
String[] path = XmlImportUtilitiesStub.detectRecordElement(
new JSONTreeReader(new InputStreamReader(inputStream)));
new JSONTreeReader(inputStream));
Assert.assertNotNull(path);
Assert.assertEquals(path.length, 2);
Assert.assertEquals(path[0], JsonImporter.ANONYMOUS);
@ -455,7 +454,7 @@ public class XmlImportUtilitiesTests extends RefineTest {
}
}
public TreeReader createJsonParser(){
parser = new JSONTreeReader(new InputStreamReader(inputStream));
parser = new JSONTreeReader(inputStream);
return parser;
}
}

View File

@ -87,7 +87,7 @@
var SEARCH_PARAMS = {
key:1, filter:1, spell:1, exact:1,
lang:1, scoring:1, prefixed:1, stemmed:1, format:1, mql_output:1,
output:1
output:1, type:1
};
$.suggest = function(name, prototype) {

View File

@ -34,7 +34,7 @@
*
*/
(function(c,q){if(!("console"in window)){var o=window.console={};o.log=o.warn=o.error=o.debug=function(){}}c(function(){var a=c("<div>");c(document.body).append(a);var b=setTimeout(function(){if(c.cleanData){var a=c.cleanData;c.cleanData=function(b){for(var e=0,g;null!=(g=b[e]);e++)c(g).triggerHandler("remove");a(b)}}else{var b=c.fn.remove;c.fn.remove=function(a,f){return this.each(function(){f||(!a||c.filter(a,[this]).length)&&c("*",this).add([this]).each(function(){c(this).triggerHandler("remove")});
return b.call(c(this),a,f)})}}},1);a.bind("remove",function(){clearTimeout(b)});a.remove()});var p={key:1,filter:1,spell:1,exact:1,lang:1,scoring:1,prefixed:1,stemmed:1,format:1,mql_output:1,output:1};c.suggest=function(a,b){c.fn[a]=function(b){this.length||console.warn("Suggest: invoked on empty element set");return this.each(function(){this.nodeName&&("INPUT"===this.nodeName.toUpperCase()?this.type&&"TEXT"!==this.type.toUpperCase()&&console.warn("Suggest: unsupported INPUT type: "+this.type):console.warn("Suggest: unsupported DOM element: "+
return b.call(c(this),a,f)})}}},1);a.bind("remove",function(){clearTimeout(b)});a.remove()});var p={key:1,filter:1,spell:1,exact:1,lang:1,scoring:1,prefixed:1,stemmed:1,format:1,mql_output:1,output:1,type:1};c.suggest=function(a,b){c.fn[a]=function(b){this.length||console.warn("Suggest: invoked on empty element set");return this.each(function(){this.nodeName&&("INPUT"===this.nodeName.toUpperCase()?this.type&&"TEXT"!==this.type.toUpperCase()&&console.warn("Suggest: unsupported INPUT type: "+this.type):console.warn("Suggest: unsupported DOM element: "+
this.nodeName));var g=c.data(this,a);g&&g._destroy();c.data(this,a,new c.suggest[a](this,b))._init()})};c.suggest[a]=function(b,g){var d=this,e=this.options=c.extend(!0,{},c.suggest.defaults,c.suggest[a].defaults,g),j=e.css_prefix=e.css_prefix||"",h=e.css;this.name=a;c.each(h,function(a){h[a]=j+h[a]});e.ac_param={};c.each(p,function(a){var b=e[a];null===b||""===b||(e.ac_param[a]=b)});e.flyout_lang=null;if(e.ac_param.lang){var i=e.ac_param.lang;"string"===c.type(i)&&(i=i.split(","));if(c.isArray(i)&&
i.length&&(i=c.trim(i[0])))e.flyout_lang=i}this._status={START:"",LOADING:"",SELECT:"",ERROR:""};e.status&&(e.status instanceof Array&&3<=e.status.length)&&(this._status.START=e.status[0]||"",this._status.LOADING=e.status[1]||"",this._status.SELECT=e.status[2]||"",4===e.status.length&&(this._status.ERROR=e.status[3]||""));var i=this.status=c('<div style="display:none;">').addClass(h.status),k=this.list=c("<ul>").addClass(h.list),l=this.pane=c('<div style="display:none;" class="fbs-reset">').addClass(h.pane);
l.append(i).append(k);e.parent?c(e.parent).append(l):(l.css("position","absolute"),e.zIndex&&l.css("z-index",e.zIndex),c(document.body).append(l));l.bind("mousedown",function(a){d.input.data("dont_hide",true);a.stopPropagation()}).bind("mouseup",function(a){d.input.data("dont_hide")&&d.input.focus();d.input.removeData("dont_hide");a.stopPropagation()}).bind("click",function(a){a.stopPropagation();if(a=d.get_selected()){d.onselect(a,true);d.hide_all()}});k.hover(function(a){d.hoverover_list(a)},function(a){d.hoverout_list(a)});

View File

@ -9,7 +9,7 @@
"New York" and "new york" are very likely to refer to the same concept and
just have capitalization differences, and "Gödel" and "Godel" probably refer
to the same person.
<a href="http://code.google.com/p/google-refine/wiki/Clustering" target="_blank">Find out more ...</a>
<a href="https://github.com/OpenRefine/OpenRefine/wiki/Clustering" target="_blank">Find out more ...</a>
</td>
</tr>
<tr>

View File

@ -153,7 +153,7 @@ ClusteringDialog.prototype._renderTable = function(clusters) {
cluster.value = value;
parent.find("input[type='text']").val(value);
parent.find("input:not(:checked)").attr('checked', true).change();
parent.find("input[type='checkbox']").attr('checked', true).change();
return false;
};
for (var c = 0; c < choices.length; c++) {
@ -202,7 +202,7 @@ ClusteringDialog.prototype._renderTable = function(clusters) {
editCheck.attr("checked", "true");
}
var input = $('<input size="25" />')
var input = $('<input type="text" size="25" />')
.attr("value", cluster.value)
.keyup(function() {
cluster.value = this.value;

View File

@ -35,7 +35,7 @@ var theProject;
var ui = {};
var Refine = {
refineHelperService: "http://1.refine-helper.stefanomazzocchi.user.dev.freebaseapps.com"
refineHelperService: "http://openrefine-helper.freebaseapps.com"
};
Refine.reportException = function(e) {

View File

@ -214,8 +214,8 @@ ReconDialog.prototype._onAddNamespacedService = function() {
var namespaceData = elmts.namespaceInput.data("data.suggest");
var typeData = elmts.typeInput.data("data.suggest");
if (namespaceData) {
var url = "http://4.standard-reconcile.dfhuynh.user.dev.freebaseapps.com/namespace_reconcile?namespace=" +
escape(namespaceData.id);
var url = "http://reconcile.freebaseapps.com/namespace_reconcile?namespace="
+ escape(namespaceData.id);
if (typeData) {
url += "&type=" + typeData.id;
}

View File

@ -156,7 +156,9 @@ ReconciliationManager.save = function(f) {
ReconciliationManager._rebuildMap();
} else {
ReconciliationManager.registerStandardService(
"http://4.standard-reconcile.dfhuynh.user.dev.freebaseapps.com/reconcile");
// "http://reconcile.freebaseapps.com/reconcile"
"http://standard-reconcile.freebaseapps.com/reconcile"
);
}
},
dataType: "json"

View File

@ -245,7 +245,7 @@ ReconStandardServicePanel.prototype._rewirePropertySuggests = function(type) {
} else if (this._isInFreebaseSchemaSpace()) {
var namespace = (type) ? (typeof type == "string" ? type : type.id) : "/common/topic"
inputs.suggestP({
filter : '(all type:/type/property (any namespace:/type/object namespace:' + namespace + '))'
filter : '(should (any namespace:/type/object namespace:' + namespace + '))'
});
}
};

View File

@ -33,6 +33,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var CustomSuggest = {};
// Default API key for Refine to use for freebase suggest widget
//CustomSuggest.setFreebaseAPIKey("AIzaSyBBTAtJ31v_jlg_ImbQuBNnAaAyrHzRyW8"); // Google key
CustomSuggest.FREEBASE_API_KEY = "AIzaSyBAZ_EjMPKlOzyyZXv6JKXPPwJFISVji3M"; // OpenRefine default key
CustomSuggest.setFreebaseAPIKey = function(freebaseAPIKey) {
$.suggest.suggest.defaults.key = freebaseAPIKey;
$.suggest.suggestT.defaults.key = freebaseAPIKey;
@ -111,6 +115,7 @@ CustomSuggest.setFreebaseAPIKey = function(freebaseAPIKey) {
{},
$.suggest.suggest.defaults, {
scoring: "schema",
type: "/type/property",
css: { pane: "fbs-pane fbs-pane-property" }
}
)
@ -172,8 +177,7 @@ CustomSuggest.setFreebaseAPIKey = function(freebaseAPIKey) {
if (data.value && data.value != "null") {
CustomSuggest.setFreebaseAPIKey(data.value);
} else {
// Default API key for Refine to use for freebase suggest widget
CustomSuggest.setFreebaseAPIKey("AIzaSyBBTAtJ31v_jlg_ImbQuBNnAaAyrHzRyW8");
CustomSuggest.setFreebaseAPIKey(CustomSuggest.FREEBASE_API_KEY);
}
}
}

View File

@ -379,7 +379,7 @@ DataTableCellUI.prototype._searchForMatch = function(suggestOptions) {
var suggestOptions2 = $.extend({ align: "left" }, suggestOptions
|| { all_types: true, // FIXME: all_types isn't documented for Suggest. Is it still implemented?
filter: "(not (any type:/common/document type:/type/content))" }); // blacklist documents and images
filter: "(not (any type:/common/document type:/type/content type:/common/resource))" }); // blacklist documents and images
if (suggestOptions2.service_url) {
// Old style suggest API
suggestOptions2.key = null;

View File

@ -89,8 +89,8 @@ DataTableColumnHeaderUI.extendMenu(function(column, columnHeaderUI, menu) {
"id" : data.id,
"type" : []
};
var baseUrl = "http://api.freebase.com/api/service/mqlread";
var url = baseUrl + "?" + $.param({ query: JSON.stringify({ query: query }) }) + "&callback=?";
var baseUrl = "https://www.googleapis.com/freebase/v1/mqlread?key=" + Freebase.API_KEY + "&";
var url = baseUrl + $.param({ query: JSON.stringify(query) }) + "&callback=?";
$.getJSON(
url,

111
refine
View File

@ -46,6 +46,8 @@ where [options] include:
-m <memory> max memory heap size to use
default: 1024M
-k <google api key> a server API key for calling Google APIs
-v <level> verbosity level [from low to high: error,warn,info,debug,trace]
default: info
@ -151,8 +153,8 @@ check_running() {
CHECK_STR="<title>OpenRefine</title>"
if [ "$CURL" ] ; then
curl -s -S $URL > /dev/null 2>&1
if [ "$?" = "7" ] ; then
curl -s -S -f $URL > /dev/null 2>&1
if [ "$?" = "7" ] || [ "$?" = "22" ] ; then
NOT_RUNNING="1"
fi
elif [ "$WGET" ] ; then
@ -207,12 +209,11 @@ get_revision() {
if [ -d ".svn" ] ; then
INFO=`svn info`
elif [ -d ".git" ] ; then
INFO=`git svn info`
INFO=`git describe`
else
error "cannot obtain revision, exiting!"
fi
REVISION=`echo $INFO | sed 's/.*Revision: /r/' | sed 's/ .*//'`
REVISION=`echo $INFO | sed s/^$VERSION-//`
}
download() {
@ -263,6 +264,17 @@ load_data() {
fi
}
display() {
FILE=$1
if [ "$OS" = "macosx" ] ; then
open $FILE
elif [ "$OS" = "linux" ] ; then
gnome-open $FILE
else
notepad $FILE
fi
}
# ----------------------------------------------------------------------------------------------
build_prepare() {
@ -286,9 +298,10 @@ tools_prepare() {
ant_prepare() {
tools_prepare
ANT_URL="http://archive.apache.org/dist/ant/binaries/apache-ant-1.8.1-bin.tar.gz"
ANT_VERSION="1.9.0"
ANT_URL="http://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz"
ANT_FILE=`echo $ANT_URL | sed 's|.*/||'`
ANT_DIR="apache-ant-1.8.1"
ANT_DIR="apache-ant-${ANT_VERSION}"
ANT="`which ant 2> /dev/null`"
if [ -z "$ANT" ] ; then
@ -325,12 +338,14 @@ appengine_prepare() {
launch4j_prepare() {
tools_prepare
L4J_VERSION="3.1.0-beta1" # buggy!
L4J_VERSION="3.0.2"
if [ "$OS" = "macosx" ] ; then
LAUNCH4J_URL="http://downloads.sourceforge.net/project/launch4j/launch4j-3/3.0.2/launch4j-3.0.2-macosx.tgz"
LAUNCH4J_URL="http://downloads.sourceforge.net/project/launch4j/launch4j-3/${L4J_VERSION}/launch4j-${L4J_VERSION}-macosx.tgz"
elif [ "$OS" = "windows" ] ; then
LAUNCH4J_URL="http://downloads.sourceforge.net/project/launch4j/launch4j-3/3.0.2/launch4j-3.0.2-win32.zip"
LAUNCH4J_URL="http://downloads.sourceforge.net/project/launch4j/launch4j-3/${L4J_VERSION}/launch4j-${L4J_VERSION}-win32.zip"
elif [ "$OS" = "linux" ] ; then
LAUNCH4J_URL="http://downloads.sourceforge.net/project/launch4j/launch4j-3/3.0.2/launch4j-3.0.2-linux.tgz"
LAUNCH4J_URL="http://downloads.sourceforge.net/project/launch4j/launch4j-3/${L4J_VERSION}/launch4j-${L4J_VERSION}-linux.tgz"
fi
LAUNCH4J_FILE=`echo $LAUNCH4J_URL | sed 's|.*/||'`
@ -342,6 +357,7 @@ launch4j_prepare() {
# without PPC support (e.g. Lion) so we patch it to include Intel binaries;
# the following lines can be removed once we update to a version of
# launch4j that fixes the issue (not available yet).
# (3.1.0-beta1 fixes this, but introduces other bugs which make it unusable)
if [ "$OS" = "macosx" ] ; then
if [ ! -f "$REFINE_TOOLS_DIR/launch4j/bin/windres.bak" ] ; then
mv "$REFINE_TOOLS_DIR/launch4j/bin/windres" "$REFINE_TOOLS_DIR/launch4j/bin/windres.bak"
@ -356,20 +372,20 @@ launch4j_prepare() {
fi
}
jarbundler_prepare() {
appbundler_prepare() {
tools_prepare
JARBUNDLER_URL="http://www.informagen.com/JarBundler/dist/jarbundler.tar.gz"
JARBUNDLER_FILE=`echo $JARBUNDLER_URL | sed 's|.*/||'`
JARBUNDLER_DIR="jarbundler-2.2.0"
APPBUNDLER_URL="http://java.net/projects/appbundler/downloads/download/appbundler-1.0.jar"
APPBUNDLER_DIR="."
APPBUNDLER_FILE=`echo $APPBUNDLER_URL | sed 's|.*/||'`
tool_download $JARBUNDLER_URL $JARBUNDLER_FILE $JARBUNDLER_DIR
tool_download $APPBUNDLER_URL $APPBUNDLER_FILE $APPBUNDLER_DIR
}
virtualenv_prepare() {
check_python
VIRTUALENV_DIR="virtualenv-1.7.1.2"
VIRTUALENV_DIR="virtualenv-1.9.1"
VIRTUALENV_FILE="${VIRTUALENV_DIR}.tar.gz"
VIRTUALENV_URL="http://pypi.python.org/packages/source/v/virtualenv/${VIRTUALENV_FILE}"
@ -417,10 +433,10 @@ windmill_prepare() {
findbugs_prepare() {
tools_prepare
FINDBUGS_URL="http://downloads.sourceforge.net/project/findbugs/findbugs/1.3.9/findbugs-1.3.9.tar.gz"
FB_VERSION="2.0.2"
FINDBUGS_URL="http://downloads.sourceforge.net/project/findbugs/findbugs/${FB_VERSION}/findbugs-${FB_VERSION}.tar.gz"
FINDBUGS_FILE=`echo $FINDBUGS_URL | sed 's|.*/||'`
FINDBUGS_DIR="findbugs-1.3.9"
FINDBUGS_DIR="findbugs-${FB_VERSION}"
tool_download $FINDBUGS_URL $FINDBUGS_FILE $FINDBUGS_DIR
}
@ -428,9 +444,10 @@ findbugs_prepare() {
pmd_prepare() {
tools_prepare
PMD_URL="http://downloads.sourceforge.net/project/pmd/pmd/4.2.5/pmd-bin-4.2.5.zip"
PMD_FILE="pmd-bin-4.2.5.zip"
PMD_DIR="pmd-4.2.5"
PMD_VERSION="4.2.6"
PMD_URL="http://downloads.sourceforge.net/project/pmd/pmd/${PMD_VERSION}/pmd-bin-${PMD_VERSION}.zip"
PMD_FILE=`echo $PMD_URL | sed 's|.*/||'`
PMD_DIR="pmd-bin-${PMD_VERSION}"
tool_download $PMD_URL $PMD_FILE $PMD_DIR
}
@ -438,9 +455,10 @@ pmd_prepare() {
jslint_prepare() {
tools_prepare
JSLINT_URL="http://jslint4java.googlecode.com/files/jslint4java-1.3.3-dist.zip"
JSLINT_FILE="jslint4java-1.3.3-dist.zip"
JSLINT_DIR="jslint4java-1.3.3"
JSLINT_VERSION="2.0.3"
JSLINT_URL="http://jslint4java.googlecode.com/files/jslint4java-${JSLINT_VERSION}-dist.zip"
JSLINT_FILE=`echo $JSLINT_URL | sed 's|.*/||'`
JSLINT_DIR="jslint4java-${JSLINT_VERSION}"
tool_download $JSLINT_URL $JSLINT_FILE $JSLINT_DIR
}
@ -503,15 +521,15 @@ mac_dist() {
get_version $1
get_revision
jarbundler_prepare
appbundler_prepare
ANT_PARAMS="-Djarbundler.dir=${REFINE_TOOLS_DIR}/${JARBUNDLER_DIR}"
ANT_PARAMS="-Dappbundler.dir=${REFINE_TOOLS_DIR}/${APPBUNDLER_DIR}"
ant mac
mkdir -p "$REFINE_BUILD_DIR/mac/.background"
cp graphics/dmg_background/dmg_background.png "$REFINE_BUILD_DIR/mac/.background/dmg_background.png"
SIZE=60
SIZE=200
if [ -f "$REFINE_BUILD_DIR/temp_refine.dmg" ] ; then
rm "$REFINE_BUILD_DIR/temp_refine.dmg"
@ -541,7 +559,7 @@ mac_dist() {
close
open
update without registering applications
delay 2
delay 5
eject
end tell
end tell
@ -551,14 +569,14 @@ mac_dist() {
sync
hdiutil detach $DEVICE
if [ -f "$REFINE_DIST_DIR/google-refine-$VERSION-$REVISION.dmg" ] ; then
rm "$REFINE_DIST_DIR/google-refine-$VERSION-$REVISION.dmg"
if [ -f "$REFINE_DIST_DIR/openrefine-$VERSION-$REVISION.dmg" ] ; then
rm "$REFINE_DIST_DIR/openrefine-$VERSION-$REVISION.dmg"
fi
hdiutil convert "$REFINE_BUILD_DIR/temp_refine.dmg" -format UDZO -imagekey zlib-level=9 -o "$REFINE_DIST_DIR/google-refine-$VERSION-$REVISION.dmg" || error "Error compressing DMG"
hdiutil internet-enable -yes "$REFINE_DIST_DIR/google-refine-$VERSION-$REVISION.dmg" || error "Error internet-enabling DMG"
hdiutil convert "$REFINE_BUILD_DIR/temp_refine.dmg" -format UDZO -imagekey zlib-level=9 -o "$REFINE_DIST_DIR/openrefine-$VERSION-$REVISION.dmg" || error "Error compressing DMG"
hdiutil internet-enable -yes "$REFINE_DIST_DIR/openrefine-$VERSION-$REVISION.dmg" || error "Error internet-enabling DMG"
rm -f "$REFINE_BUILD_DIR/temp_refine.dmg"
#rm -f "$REFINE_BUILD_DIR/temp_refine.dmg"
}
test() {
@ -571,7 +589,7 @@ ui_test() {
windmill_prepare
REFINE_DATA_DIR="${TMPDIR:=/tmp}/google-refine-tests"
REFINE_DATA_DIR="${TMPDIR:=/tmp}/openrefine-tests"
add_option "-Drefine.headless=true"
@ -634,7 +652,7 @@ run() {
fi
if [ ! -d $REFINE_CLASSES_DIR ] ; then
IS_JAR=`ls $REFINE_LIB_DIR | grep google-refine`
IS_JAR=`ls $REFINE_LIB_DIR | grep openrefine`
if [ -z "$IS_JAR" ] ; then
ant build
echo ""
@ -646,7 +664,7 @@ run() {
fi
if [ "$OS" = "macosx" ] ; then
add_option '-Xdock:icon=graphics/icon/google-refine.icns'
add_option '-Xdock:icon=graphics/icon/openrefine.icns'
fi
if [ "$REFINE_DATA_DIR" ] ; then
@ -665,6 +683,10 @@ run() {
add_option "-Drefine.host=$REFINE_HOST"
fi
if [ "$REFINE_GOOGLE_API_KEY" ] ; then
add_option "-Drefine.google_api_key=$REFINE_GOOGLE_API_KEY"
fi
CLASSPATH="$REFINE_CLASSES_DIR${SEP}$REFINE_LIB_DIR/*"
RUN_CMD="$JAVA -cp $CLASSPATH $OPTS com.google.refine.Refine"
@ -760,7 +782,7 @@ findbugs() {
ANT_PARAMS="-Dfindbugs.dir=${REFINE_TOOLS_DIR}/${FINDBUGS_DIR}"
ant findbugs
open "$REFINE_BUILD_DIR/reports/findbugs.html"
display "$REFINE_BUILD_DIR/reports/findbugs.html"
}
pmd() {
@ -769,7 +791,7 @@ pmd() {
ANT_PARAMS="-Dpmd.dir=${REFINE_TOOLS_DIR}/${PMD_DIR}"
ant pmd
open "$REFINE_BUILD_DIR/reports/pmd.html"
display "$REFINE_BUILD_DIR/reports/pmd.html"
}
cpd() {
@ -778,7 +800,7 @@ cpd() {
ANT_PARAMS="-Dpmd.dir=${REFINE_TOOLS_DIR}/${PMD_DIR}"
ant cpd
open "$REFINE_BUILD_DIR/reports/cpd.txt"
display "$REFINE_BUILD_DIR/reports/cpd.txt"
}
jslint() {
@ -787,7 +809,7 @@ jslint() {
ANT_PARAMS="-Djslint.dir=${REFINE_TOOLS_DIR}/${JSLINT_DIR}"
ant jslint
open "$REFINE_BUILD_DIR/reports/jslint.txt"
display "$REFINE_BUILD_DIR/reports/jslint.txt"
}
whitespace() {
@ -839,8 +861,9 @@ load_configs refine.ini
if [ "$OS" = "macosx" ] ; then
if [ -z "$JAVA_HOME" ] ; then
# Mac OS X defaults to Java 5. So update JAVA_HOME unless the user manually set it.
export JAVA_HOME="/System/Library/Frameworks/JavaVM.framework/Versions/1.6/Home"
# We need want recent Java because we're bundling JRE - may want to warn and force developer to set JAVA_HOME
# The /usr/libexec/java_home utility may be tied to the Java prefs app, so could go away when Apple removes it
export JAVA_HOME=$(/usr/libexec/java_home)
fi
fi
@ -869,6 +892,7 @@ while [ $# -ne 0 ] ; do
-w) shift; REFINE_WEBAPP="$1"; shift; continue;;
-d) shift; REFINE_DATA_DIR="$1"; shift; continue;;
-m) shift; REFINE_MEMORY="$1"; shift; continue;;
-k) shift; REFINE_GOOGLE_API_KEY="$1"; shift; continue;;
-v) shift; REFINE_VERBOSITY="$1"; shift; continue;;
-x) shift; REFINE_EXTRA_OPTS="$1"; shift; continue;;
--debug) shift; add_option '-Xdebug -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n'; continue;;
@ -953,7 +977,6 @@ add_option "-Dpython.path=$JYTHONPATH"
add_option "-Dpython.cachedir=$HOME/.local/share/google/refine/cachedir"
# ----- Respond to the action given --------------------------------------------
case "$ACTION" in
build) build_prepare; ant build;;
clean) ant clean;;

View File

@ -23,25 +23,25 @@ goto endUtils
echo Usage %0 [options] ^<action^>
echo where [options] include:
echo.
echo /? print this message and exit
echo "/?" print this message and exit
echo.
echo /p <port> the port that OpenRefine will listen to
echo "/p <port>" the port that OpenRefine will listen to
echo default: 3333
echo.
echo /i <interface> the host interface OpenRefine should bind to
echo "/i <interface>" the host interface OpenRefine should bind to
echo default: 127.0.0.1
echo.
echo /w <path> path to the webapp
echo "/w <path>" path to the webapp
echo default src\main\webapp
echo.
echo /d enable JVM debugging (on port 8000)
echo "/d" enable JVM debugging (on port 8000)
echo.
echo /m <memory> max memory heap size to use
echo "/m <memory>" max memory heap size to use
echo default: 1024M
echo.
echo /x enable JMX monitoring (for jconsole and friends)
echo "/x" enable JMX monitoring (for jconsole and friends)
echo.
echo and <action> is one of
echo "and <action> is one of
echo.
echo build ..................... Build OpenRefine
echo run ....................... Run OpenRefine

View File

@ -5,7 +5,7 @@ no_proxy="localhost,127.0.0.1"
#REFINE_PORT=3334
#REFINE_HOST=127.0.0.1
#REFINE_WEBAPP=main\webapp
#REFINE_MEMORY=1024M
REFINE_MEMORY=3000M
# Some sample configurations. These have no defaults.
#ANT_HOME=C:\grefine\tools\apache-ant-1.8.1