concordia-library/concordia/hash_generator.cpp

64 lines
2.3 KiB
C++
Raw Normal View History

2013-11-12 16:58:31 +01:00
#include "concordia/hash_generator.hpp"
2013-11-12 16:58:31 +01:00
#include <boost/filesystem.hpp>
#include <boost/archive/binary_oarchive.hpp>
#include <boost/archive/binary_iarchive.hpp>
2013-11-12 22:08:37 +01:00
#include <boost/algorithm/string.hpp>
2013-11-12 16:58:31 +01:00
#include <fstream>
HashGenerator::HashGenerator(boost::shared_ptr<ConcordiaConfig> config)
2013-11-14 15:44:50 +01:00
throw(ConcordiaException) :
_wordMapFilePath(config->getWordMapFilePath()),
_wordMap(boost::shared_ptr<WordMap>(new WordMap)),
_sentenceAnonymizer(boost::shared_ptr<SentenceAnonymizer>(
new SentenceAnonymizer(config))) {
2013-11-14 20:36:34 +01:00
if (boost::filesystem::exists(_wordMapFilePath)) {
ifstream ifs(_wordMapFilePath.c_str(), std::ios::binary);
2013-11-12 16:58:31 +01:00
boost::archive::binary_iarchive ia(ifs);
2013-11-14 15:44:50 +01:00
boost::shared_ptr<WordMap> restoredWordMap(new WordMap);
ia >> *_wordMap;
}
2013-11-12 16:58:31 +01:00
}
HashGenerator::~HashGenerator() {
}
boost::shared_ptr<vector<INDEX_CHARACTER_TYPE> > HashGenerator::generateHash(
const string & sentence) throw(ConcordiaException) {
boost::shared_ptr<vector<INDEX_CHARACTER_TYPE> >
result(new vector<INDEX_CHARACTER_TYPE>());
boost::shared_ptr<vector<string> > tokenTexts =
generateTokenVector(sentence);
if (tokenTexts->size() > MAX_SENTENCE_SIZE) {
throw ConcordiaException("Trying to add too long sentence.");
}
for (vector<string>::iterator it = tokenTexts->begin();
it != tokenTexts->end(); ++it) {
2013-11-12 22:08:37 +01:00
string token = *it;
INDEX_CHARACTER_TYPE code = _wordMap->getWordCode(token);
result->push_back(code);
2013-11-14 15:44:50 +01:00
}
2013-11-12 16:58:31 +01:00
return result;
}
boost::shared_ptr<vector<string> >
HashGenerator::generateTokenVector(const string & sentence) {
string anonymizedSentence = _sentenceAnonymizer->anonymize(sentence);
boost::trim(anonymizedSentence);
boost::shared_ptr<vector<string> > tokenTexts(new vector<string>());
boost::split(*tokenTexts, anonymizedSentence, boost::is_any_of(" \t\r\n"),
boost::algorithm::token_compress_on);
return tokenTexts;
}
2013-11-12 16:58:31 +01:00
void HashGenerator::serializeWordMap() {
2013-11-14 20:36:34 +01:00
ofstream ofs(_wordMapFilePath.c_str(), std::ios::binary);
2013-11-12 16:58:31 +01:00
boost::archive::binary_oarchive oa(ofs);
2013-11-14 15:44:50 +01:00
oa << *_wordMap;
2013-11-12 16:58:31 +01:00
}