concordia-library/concordia/hash_generator.cpp

67 lines
2.1 KiB
C++
Raw Permalink Normal View History

2013-11-12 16:58:31 +01:00
#include "concordia/hash_generator.hpp"
#include "concordia/common/utils.hpp"
#include "concordia/token_annotation.hpp"
2013-11-12 16:58:31 +01:00
#include <boost/filesystem.hpp>
#include <boost/archive/binary_oarchive.hpp>
#include <boost/archive/binary_iarchive.hpp>
2013-11-12 22:08:37 +01:00
#include <boost/algorithm/string.hpp>
#include <boost/foreach.hpp>
2013-11-12 16:58:31 +01:00
#include <fstream>
HashGenerator::HashGenerator(std::string indexPath,
2019-01-18 13:30:51 +01:00
boost::shared_ptr<ConcordiaConfig> config):
_wordMapFilePath(indexPath+"/"+WORD_MAP_FILE_NAME),
_wordMap(boost::shared_ptr<WordMap>(new WordMap)),
2015-06-25 10:12:51 +02:00
_sentenceTokenizer(boost::shared_ptr<SentenceTokenizer>(
new SentenceTokenizer(config))) {
2013-11-14 20:36:34 +01:00
if (boost::filesystem::exists(_wordMapFilePath)) {
std::ifstream ifs(_wordMapFilePath.c_str(), std::ios::binary);
2013-11-12 16:58:31 +01:00
boost::archive::binary_iarchive ia(ifs);
2013-11-14 15:44:50 +01:00
boost::shared_ptr<WordMap> restoredWordMap(new WordMap);
ia >> *_wordMap;
}
2013-11-12 16:58:31 +01:00
}
HashGenerator::~HashGenerator() {
}
2015-08-19 20:49:26 +02:00
TokenizedSentence HashGenerator::generateHash(
2015-12-27 20:54:40 +01:00
const std::string & sentence,
2019-01-18 13:30:51 +01:00
bool byWhitespace) {
2015-12-27 20:54:40 +01:00
TokenizedSentence ts = _sentenceTokenizer->tokenize(sentence, byWhitespace);
2015-08-19 20:49:26 +02:00
ts.generateHash(_wordMap);
2015-06-27 12:40:24 +02:00
2015-08-19 20:49:26 +02:00
if (ts.getTokens().size() > Utils::maxSentenceSize) {
throw ConcordiaException("Trying to add too long sentence.");
}
2013-11-14 15:44:50 +01:00
2015-06-26 22:50:53 +02:00
return ts;
}
2016-01-01 20:45:07 +01:00
TokenizedSentence HashGenerator::generateTokens(
const std::string & sentence,
2019-01-18 13:30:51 +01:00
bool byWhitespace) {
2016-01-01 20:45:07 +01:00
TokenizedSentence ts = _sentenceTokenizer->tokenize(sentence, byWhitespace);
ts.generateTokens();
if (ts.getTokens().size() > Utils::maxSentenceSize) {
throw ConcordiaException("Trying to add too long sentence.");
}
return ts;
}
2013-11-12 16:58:31 +01:00
void HashGenerator::serializeWordMap() {
std::ofstream ofs(_wordMapFilePath.c_str(), std::ios::binary);
2013-11-12 16:58:31 +01:00
boost::archive::binary_oarchive oa(ofs);
2013-11-14 15:44:50 +01:00
oa << *_wordMap;
2013-11-12 16:58:31 +01:00
}
2015-05-04 20:40:44 +02:00
void HashGenerator::clearWordMap() {
_wordMap = boost::shared_ptr<WordMap>(new WordMap);
boost::filesystem::remove(_wordMapFilePath);
}
2013-11-12 16:58:31 +01:00