#include "concordia/hash_generator.hpp" #include "concordia/common/utils.hpp" #include #include #include #include #include HashGenerator::HashGenerator(boost::shared_ptr config) throw(ConcordiaException) : _wordMapFilePath(config->getWordMapFilePath()), _wordMap(boost::shared_ptr(new WordMap)), _sentenceAnonymizer(boost::shared_ptr( new SentenceAnonymizer(config))) { if (boost::filesystem::exists(_wordMapFilePath)) { ifstream ifs(_wordMapFilePath.c_str(), std::ios::binary); boost::archive::binary_iarchive ia(ifs); boost::shared_ptr restoredWordMap(new WordMap); ia >> *_wordMap; } } HashGenerator::~HashGenerator() { } vector HashGenerator::generateHash( const string & sentence) throw(ConcordiaException) { vector result; vector tokenTexts = generateTokenVector(sentence); if (tokenTexts.size() > Utils::maxSentenceSize) { throw ConcordiaException("Trying to add too long sentence."); } for (vector::iterator it = tokenTexts.begin(); it != tokenTexts.end(); ++it) { string token = *it; INDEX_CHARACTER_TYPE code = _wordMap->getWordCode(token); result.push_back(code); } return result; } vector HashGenerator::generateTokenVector(const string & sentence) { string anonymizedSentence = _sentenceAnonymizer->anonymize(sentence); boost::trim(anonymizedSentence); vector tokenTexts; boost::split(tokenTexts, anonymizedSentence, boost::is_any_of(" \t\r\n"), boost::algorithm::token_compress_on); return tokenTexts; } void HashGenerator::serializeWordMap() { ofstream ofs(_wordMapFilePath.c_str(), std::ios::binary); boost::archive::binary_oarchive oa(ofs); oa << *_wordMap; }