2013-11-12 16:58:31 +01:00
|
|
|
#include "concordia/hash_generator.hpp"
|
2015-04-09 22:17:19 +02:00
|
|
|
#include "concordia/common/utils.hpp"
|
2014-08-15 13:22:04 +02:00
|
|
|
|
2013-11-12 16:58:31 +01:00
|
|
|
#include <boost/filesystem.hpp>
|
|
|
|
#include <boost/archive/binary_oarchive.hpp>
|
|
|
|
#include <boost/archive/binary_iarchive.hpp>
|
2013-11-12 22:08:37 +01:00
|
|
|
#include <boost/algorithm/string.hpp>
|
2014-08-15 13:22:04 +02:00
|
|
|
|
2013-11-12 16:58:31 +01:00
|
|
|
#include <fstream>
|
|
|
|
|
2014-04-13 12:21:30 +02:00
|
|
|
HashGenerator::HashGenerator(boost::shared_ptr<ConcordiaConfig> config)
|
2013-11-14 15:44:50 +01:00
|
|
|
throw(ConcordiaException) :
|
2014-04-13 12:21:30 +02:00
|
|
|
_wordMapFilePath(config->getWordMapFilePath()),
|
|
|
|
_wordMap(boost::shared_ptr<WordMap>(new WordMap)),
|
2014-04-29 14:46:04 +02:00
|
|
|
_sentenceAnonymizer(boost::shared_ptr<SentenceAnonymizer>(
|
|
|
|
new SentenceAnonymizer(config))) {
|
2013-11-14 20:36:34 +01:00
|
|
|
if (boost::filesystem::exists(_wordMapFilePath)) {
|
2015-04-15 14:14:10 +02:00
|
|
|
std::ifstream ifs(_wordMapFilePath.c_str(), std::ios::binary);
|
2013-11-12 16:58:31 +01:00
|
|
|
boost::archive::binary_iarchive ia(ifs);
|
2013-11-14 15:44:50 +01:00
|
|
|
boost::shared_ptr<WordMap> restoredWordMap(new WordMap);
|
|
|
|
ia >> *_wordMap;
|
|
|
|
}
|
2013-11-12 16:58:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
HashGenerator::~HashGenerator() {
|
|
|
|
}
|
|
|
|
|
2015-04-15 14:14:10 +02:00
|
|
|
std::vector<INDEX_CHARACTER_TYPE> HashGenerator::generateHash(
|
|
|
|
const std::string & sentence) throw(ConcordiaException) {
|
|
|
|
std::vector<INDEX_CHARACTER_TYPE> result;
|
|
|
|
std::vector<std::string> tokenTexts = generateTokenVector(sentence);
|
2015-04-15 10:55:26 +02:00
|
|
|
if (tokenTexts.size() > Utils::maxSentenceSize) {
|
2014-04-29 14:46:04 +02:00
|
|
|
throw ConcordiaException("Trying to add too long sentence.");
|
2014-03-14 11:30:17 +01:00
|
|
|
}
|
2015-04-15 14:14:10 +02:00
|
|
|
for (std::vector<std::string>::iterator it = tokenTexts.begin();
|
2015-04-15 10:55:26 +02:00
|
|
|
it != tokenTexts.end(); ++it) {
|
2015-04-15 14:14:10 +02:00
|
|
|
std::string token = *it;
|
2013-12-06 22:29:25 +01:00
|
|
|
INDEX_CHARACTER_TYPE code = _wordMap->getWordCode(token);
|
2015-04-15 10:55:26 +02:00
|
|
|
result.push_back(code);
|
2013-11-14 15:44:50 +01:00
|
|
|
}
|
|
|
|
|
2013-11-12 16:58:31 +01:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2015-04-15 14:14:10 +02:00
|
|
|
std::vector<std::string> HashGenerator::generateTokenVector(
|
|
|
|
const std::string & sentence) {
|
|
|
|
std::string anonymizedSentence = _sentenceAnonymizer->anonymize(sentence);
|
2014-08-15 13:22:04 +02:00
|
|
|
boost::trim(anonymizedSentence);
|
2015-04-15 14:14:10 +02:00
|
|
|
std::vector<std::string> tokenTexts;
|
2015-04-15 10:55:26 +02:00
|
|
|
boost::split(tokenTexts, anonymizedSentence, boost::is_any_of(" \t\r\n"),
|
2014-04-29 14:46:04 +02:00
|
|
|
boost::algorithm::token_compress_on);
|
|
|
|
return tokenTexts;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-11-12 16:58:31 +01:00
|
|
|
void HashGenerator::serializeWordMap() {
|
2015-04-15 14:14:10 +02:00
|
|
|
std::ofstream ofs(_wordMapFilePath.c_str(), std::ios::binary);
|
2013-11-12 16:58:31 +01:00
|
|
|
boost::archive::binary_oarchive oa(ofs);
|
2013-11-14 15:44:50 +01:00
|
|
|
oa << *_wordMap;
|
2013-11-12 16:58:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|