concordia-library/concordia/hash_generator.cpp
2015-06-26 22:50:53 +02:00

53 lines
1.7 KiB
C++

#include "concordia/hash_generator.hpp"
#include "concordia/common/utils.hpp"
#include "concordia/token_annotation.hpp"
#include <boost/filesystem.hpp>
#include <boost/archive/binary_oarchive.hpp>
#include <boost/archive/binary_iarchive.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/foreach.hpp>
#include <fstream>
HashGenerator::HashGenerator(boost::shared_ptr<ConcordiaConfig> config)
throw(ConcordiaException) :
_wordMapFilePath(config->getWordMapFilePath()),
_wordMap(boost::shared_ptr<WordMap>(new WordMap)),
_sentenceTokenizer(boost::shared_ptr<SentenceTokenizer>(
new SentenceTokenizer(config))) {
if (boost::filesystem::exists(_wordMapFilePath)) {
std::ifstream ifs(_wordMapFilePath.c_str(), std::ios::binary);
boost::archive::binary_iarchive ia(ifs);
boost::shared_ptr<WordMap> restoredWordMap(new WordMap);
ia >> *_wordMap;
}
}
HashGenerator::~HashGenerator() {
}
boost::shared_ptr<TokenizedSentence> HashGenerator::generateHash(
const std::string & sentence) throw(ConcordiaException) {
boost::shared_ptr<TokenizedSentence> ts = _sentenceTokenizer->tokenize(sentence);
ts->generateHash(_wordMap);
if (ts->getTokens().size() > Utils::maxSentenceSize) {
throw ConcordiaException("Trying to add too long sentence.");
}
return ts;
}
void HashGenerator::serializeWordMap() {
std::ofstream ofs(_wordMapFilePath.c_str(), std::ios::binary);
boost::archive::binary_oarchive oa(ofs);
oa << *_wordMap;
}
void HashGenerator::clearWordMap() {
_wordMap = boost::shared_ptr<WordMap>(new WordMap);
boost::filesystem::remove(_wordMapFilePath);
}