version that compiles

This commit is contained in:
rjawor 2019-08-26 10:21:38 +02:00
parent 1c2dcf26f2
commit 10641ecb6c
3 changed files with 28 additions and 8 deletions

View File

@ -16,6 +16,7 @@
#include "request.hpp"
#include "language.hpp"
#include "rapidjson/rapidjson.h"
#include "lemmatizer_result.hpp"
#include <boost/foreach.hpp>
#include <boost/ptr_container/ptr_map.hpp>
#include <boost/filesystem/path.hpp>
@ -180,7 +181,7 @@ std::string ConcordiaServer::handleRequest(std::string & requestString) {
} else if (operation == LEMMATIZE_OP) {
std::string sentence = _getStringParameter(d, "sentence");
std::string languageCode = _getStringParameter(d, "languageCode");
std::string lemmatizedSentence = _lemmatizerFacade->lemmatizeSentence(languageCode, sentence);
std::string lemmatizedSentence = _lemmatizerFacade->lemmatizeSentence(languageCode, sentence).getLemmatizedSentence();
jsonWriter.StartObject();
jsonWriter.String("lemmatizedSentence");
jsonWriter.String(lemmatizedSentence.c_str());
@ -192,12 +193,12 @@ std::string ConcordiaServer::handleRequest(std::string & requestString) {
for (rapidjson::SizeType i = 0; i < sentencesArray.Size(); i++) {
sentences.push_back(sentencesArray[i].GetString());
}
std::vector<std::string> lemmatizedSentences = _lemmatizerFacade->lemmatizeSentences(languageCode, sentences);
std::vector<LemmatizerResult> lemmatizedSentences = _lemmatizerFacade->lemmatizeSentences(languageCode, sentences);
jsonWriter.StartObject();
jsonWriter.String("lemmatizedSentences");
jsonWriter.StartArray();
BOOST_FOREACH(std::string & lemmatizedSentence, lemmatizedSentences) {
jsonWriter.String(lemmatizedSentence.c_str());
BOOST_FOREACH(LemmatizerResult & lemmatizedSentence, lemmatizedSentences) {
jsonWriter.String(lemmatizedSentence.getLemmatizedSentence().c_str());
}
jsonWriter.EndArray();
jsonWriter.EndObject();

View File

@ -1,10 +1,11 @@
#include "lemmatizer_result.hpp"
LemmatizerResutl::LemmatizerResult(const std::string & lemmatizedSentence,
LemmatizerResult::LemmatizerResult(const std::string & lemmatizedSentence,
const bool isFirstLemmatized):
_lemmatizedSentence(lemmatizedSentence),
_isFirstLemmatized(isFirstLemmatized) {
}
LemmatizerResutl::~LemmatizerResult();
LemmatizerResult::~LemmatizerResult() {
}

View File

@ -25,12 +25,22 @@ SearcherController::~SearcherController() {
void SearcherController::simpleSearch(rapidjson::Writer<rapidjson::StringBuffer> & jsonWriter,
std::string & pattern,
const int tmId) {
// tmId should point to non-lemmatized memory
boost::ptr_map<int,Concordia>::iterator it = _concordiasMap->find(tmId);
Tm tm = _tmDAO.getTm(tmId);
if (it != _concordiasMap->end()) {
TokenizedSentence tokenizedPattern = it->second->tokenize(pattern, false, false);
LemmatizerResult lemmatizerResult = _lemmatizerFacade->lemmatizeSentence(tokenizedPattern.getTokenizedSentence());
Tm tm = _tmDAO.getTm(tmId);
LemmatizerResult lemmatizerResult = _lemmatizerFacade->lemmatizeSentence(
tm.getSourceLanguageCode(), tokenizedPattern.getTokenizedSentence());
if (lemmatizerResult.isFirstLemmatized()) {
// search in lemmatized memory
it = _concordiasMap->find(tm.getPairedTmId());
if (it == _concordiasMap->end()) {
JsonGenerator::signalError(jsonWriter, "paired tm not found!");
return;
}
}
SimpleSearchResult result = _unitDAO.getSimpleSearchResult(
it->second->simpleSearch(lemmatizerResult.getLemmatizedSentence(), true));
jsonWriter.StartObject();
@ -49,6 +59,7 @@ void SearcherController::fullSearch(rapidjson::Writer<rapidjson::StringBuffer> &
const int tmId,
const int limit,
const int offset) {
/*
boost::ptr_map<int,Concordia>::iterator it = _concordiasMap->find(tmId);
if (it != _concordiasMap->end()) {
TokenizedSentence tokenizedPattern = it->second->tokenize(pattern, false, false);
@ -63,11 +74,13 @@ void SearcherController::fullSearch(rapidjson::Writer<rapidjson::StringBuffer> &
} else {
JsonGenerator::signalError(jsonWriter, "no such tm!");
}
*/
}
void SearcherController::lexiconSearch(rapidjson::Writer<rapidjson::StringBuffer> & jsonWriter,
std::string & pattern,
const int tmId) {
/*
boost::ptr_map<int,Concordia>::iterator it = _concordiasMap->find(tmId);
if (it != _concordiasMap->end()) {
TokenizedSentence tokenizedPattern = it->second->tokenize(pattern, false, false);
@ -82,12 +95,14 @@ void SearcherController::lexiconSearch(rapidjson::Writer<rapidjson::StringBuffer
} else {
JsonGenerator::signalError(jsonWriter, "no such tm!");
}
*/
}
void SearcherController::concordiaPhraseSearch(rapidjson::Writer<rapidjson::StringBuffer> & jsonWriter,
std::string & pattern,
const std::vector<Interval> & intervals,
const int tmId) {
/*
boost::ptr_map<int,Concordia>::iterator it = _concordiasMap->find(tmId);
if (it != _concordiasMap->end()) {
if (intervals.size() > 0) {
@ -147,12 +162,14 @@ void SearcherController::concordiaPhraseSearch(rapidjson::Writer<rapidjson::Stri
} else {
JsonGenerator::signalError(jsonWriter, "no such tm!");
}
*/
}
void SearcherController::concordiaSearch(rapidjson::Writer<rapidjson::StringBuffer> & jsonWriter,
std::string & pattern,
const int tmId) {
/*
boost::ptr_map<int,Concordia>::iterator it = _concordiasMap->find(tmId);
if (it != _concordiasMap->end()) {
TokenizedSentence originalPattern = it->second->tokenize(pattern, false, false);
@ -182,6 +199,7 @@ void SearcherController::concordiaSearch(rapidjson::Writer<rapidjson::StringBuff
} else {
JsonGenerator::signalError(jsonWriter, "no such tm!");
}
*/
}
std::string SearcherController::_substrUTF8(std::string source, int start, int length) {