Dodane wygladzanie.

This commit is contained in:
Jan Nowak 2022-04-11 11:15:56 +02:00
parent 0f6815f63d
commit 0aa79cba31
3 changed files with 17940 additions and 17935 deletions

File diff suppressed because it is too large Load Diff

13
run.py
View File

@ -11,7 +11,9 @@ class WordPred:
def __init__(self): def __init__(self):
self.tokenizer = RegexpTokenizer(r"\w+") self.tokenizer = RegexpTokenizer(r"\w+")
self.model = defaultdict(lambda: defaultdict(lambda: 0)) self.model = defaultdict(lambda: defaultdict(lambda: 0))
self.vocab = set()
self.alpha = 0.001
def read_file(self, file): def read_file(self, file):
for line in file: for line in file:
@ -30,13 +32,16 @@ class WordPred:
for w1, w2, w3 in trigrams(tokens, pad_right=True, pad_left=True): for w1, w2, w3 in trigrams(tokens, pad_right=True, pad_left=True):
if w1 and w2 and w3: if w1 and w2 and w3:
self.model[(w2, w3)][w1] += 1 self.model[(w2, w3)][w1] += 1
if index == 1300000: self.vocab.add(w1)
self.vocab.add(w2)
self.vocab.add(w3)
if index == 300000:
break break
for word_pair in self.model: for word_pair in self.model:
num_n_grams = float(sum(self.model[word_pair].values())) num_n_grams = float(sum(self.model[word_pair].values()))
for word in self.model[word_pair]: for word in self.model[word_pair]:
self.model[word_pair][word] /= num_n_grams self.model[word_pair][word] = (self.model[word_pair][word] + self.alpha) / (num_n_grams + self.alpha*len(self.vocab))
def generate_outputs(self, input_file, output_file): def generate_outputs(self, input_file, output_file):
with open(output_file, 'w') as outputf: with open(output_file, 'w') as outputf:
@ -73,4 +78,4 @@ class WordPred:
wp = WordPred() wp = WordPred()
wp.read_train_data('train/in.tsv.xz') wp.read_train_data('train/in.tsv.xz')
wp.generate_outputs('dev-0/in.tsv.xz', 'dev-0/out.tsv') wp.generate_outputs('dev-0/in.tsv.xz', 'dev-0/out.tsv')
wp.generate_outputs('test-A/in.tsv.xz', 'test-A/out.tsv') wp.generate_outputs('test-A/in.tsv.xz', 'test-A/out.tsv')

File diff suppressed because it is too large Load Diff