diff --git a/concordia-server/concordia_server.cpp b/concordia-server/concordia_server.cpp index 2ebd2fd..8f285a9 100644 --- a/concordia-server/concordia_server.cpp +++ b/concordia-server/concordia_server.cpp @@ -16,6 +16,7 @@ #include "request.hpp" #include "language.hpp" #include "rapidjson/rapidjson.h" +#include "lemmatizer_result.hpp" #include #include #include @@ -180,7 +181,7 @@ std::string ConcordiaServer::handleRequest(std::string & requestString) { } else if (operation == LEMMATIZE_OP) { std::string sentence = _getStringParameter(d, "sentence"); std::string languageCode = _getStringParameter(d, "languageCode"); - std::string lemmatizedSentence = _lemmatizerFacade->lemmatizeSentence(languageCode, sentence); + std::string lemmatizedSentence = _lemmatizerFacade->lemmatizeSentence(languageCode, sentence).getLemmatizedSentence(); jsonWriter.StartObject(); jsonWriter.String("lemmatizedSentence"); jsonWriter.String(lemmatizedSentence.c_str()); @@ -192,12 +193,12 @@ std::string ConcordiaServer::handleRequest(std::string & requestString) { for (rapidjson::SizeType i = 0; i < sentencesArray.Size(); i++) { sentences.push_back(sentencesArray[i].GetString()); } - std::vector lemmatizedSentences = _lemmatizerFacade->lemmatizeSentences(languageCode, sentences); + std::vector lemmatizedSentences = _lemmatizerFacade->lemmatizeSentences(languageCode, sentences); jsonWriter.StartObject(); jsonWriter.String("lemmatizedSentences"); jsonWriter.StartArray(); - BOOST_FOREACH(std::string & lemmatizedSentence, lemmatizedSentences) { - jsonWriter.String(lemmatizedSentence.c_str()); + BOOST_FOREACH(LemmatizerResult & lemmatizedSentence, lemmatizedSentences) { + jsonWriter.String(lemmatizedSentence.getLemmatizedSentence().c_str()); } jsonWriter.EndArray(); jsonWriter.EndObject(); diff --git a/concordia-server/lemmatizer_result.cpp b/concordia-server/lemmatizer_result.cpp index 3babfcb..991dd4a 100644 --- a/concordia-server/lemmatizer_result.cpp +++ b/concordia-server/lemmatizer_result.cpp @@ -1,10 +1,11 @@ #include "lemmatizer_result.hpp" -LemmatizerResutl::LemmatizerResult(const std::string & lemmatizedSentence, +LemmatizerResult::LemmatizerResult(const std::string & lemmatizedSentence, const bool isFirstLemmatized): _lemmatizedSentence(lemmatizedSentence), _isFirstLemmatized(isFirstLemmatized) { } -LemmatizerResutl::~LemmatizerResult(); +LemmatizerResult::~LemmatizerResult() { +} diff --git a/concordia-server/searcher_controller.cpp b/concordia-server/searcher_controller.cpp index 503d45a..32e9033 100644 --- a/concordia-server/searcher_controller.cpp +++ b/concordia-server/searcher_controller.cpp @@ -25,12 +25,22 @@ SearcherController::~SearcherController() { void SearcherController::simpleSearch(rapidjson::Writer & jsonWriter, std::string & pattern, const int tmId) { + // tmId should point to non-lemmatized memory boost::ptr_map::iterator it = _concordiasMap->find(tmId); - Tm tm = _tmDAO.getTm(tmId); if (it != _concordiasMap->end()) { TokenizedSentence tokenizedPattern = it->second->tokenize(pattern, false, false); - LemmatizerResult lemmatizerResult = _lemmatizerFacade->lemmatizeSentence(tokenizedPattern.getTokenizedSentence()); + Tm tm = _tmDAO.getTm(tmId); + LemmatizerResult lemmatizerResult = _lemmatizerFacade->lemmatizeSentence( + tm.getSourceLanguageCode(), tokenizedPattern.getTokenizedSentence()); + if (lemmatizerResult.isFirstLemmatized()) { + // search in lemmatized memory + it = _concordiasMap->find(tm.getPairedTmId()); + if (it == _concordiasMap->end()) { + JsonGenerator::signalError(jsonWriter, "paired tm not found!"); + return; + } + } SimpleSearchResult result = _unitDAO.getSimpleSearchResult( it->second->simpleSearch(lemmatizerResult.getLemmatizedSentence(), true)); jsonWriter.StartObject(); @@ -49,6 +59,7 @@ void SearcherController::fullSearch(rapidjson::Writer & const int tmId, const int limit, const int offset) { + /* boost::ptr_map::iterator it = _concordiasMap->find(tmId); if (it != _concordiasMap->end()) { TokenizedSentence tokenizedPattern = it->second->tokenize(pattern, false, false); @@ -63,11 +74,13 @@ void SearcherController::fullSearch(rapidjson::Writer & } else { JsonGenerator::signalError(jsonWriter, "no such tm!"); } + */ } void SearcherController::lexiconSearch(rapidjson::Writer & jsonWriter, std::string & pattern, const int tmId) { + /* boost::ptr_map::iterator it = _concordiasMap->find(tmId); if (it != _concordiasMap->end()) { TokenizedSentence tokenizedPattern = it->second->tokenize(pattern, false, false); @@ -82,12 +95,14 @@ void SearcherController::lexiconSearch(rapidjson::Writer & jsonWriter, std::string & pattern, const std::vector & intervals, const int tmId) { + /* boost::ptr_map::iterator it = _concordiasMap->find(tmId); if (it != _concordiasMap->end()) { if (intervals.size() > 0) { @@ -147,12 +162,14 @@ void SearcherController::concordiaPhraseSearch(rapidjson::Writer & jsonWriter, std::string & pattern, const int tmId) { + /* boost::ptr_map::iterator it = _concordiasMap->find(tmId); if (it != _concordiasMap->end()) { TokenizedSentence originalPattern = it->second->tokenize(pattern, false, false); @@ -182,6 +199,7 @@ void SearcherController::concordiaSearch(rapidjson::Writer