Compare commits
No commits in common. "gpt2-finetune" and "master" have entirely different histories.
gpt2-finet
...
master
10519
dev-0/out-hidden_size=128.tsv
Normal file
10519
dev-0/out-hidden_size=128.tsv
Normal file
File diff suppressed because it is too large
Load Diff
10519
dev-0/out-hidden_size=512.tsv
Normal file
10519
dev-0/out-hidden_size=512.tsv
Normal file
File diff suppressed because it is too large
Load Diff
21038
dev-0/out.tsv
21038
dev-0/out.tsv
File diff suppressed because it is too large
Load Diff
@ -201,7 +201,7 @@ def predict_words(dataset):
|
|||||||
src = tokenizer.encode(text, return_tensors="pt", truncation=True).to(device)
|
src = tokenizer.encode(text, return_tensors="pt", truncation=True).to(device)
|
||||||
output = model.generate(src, max_length=len(src[0]) + 1, do_sample=True, top_k=0, temperature=0.8,
|
output = model.generate(src, max_length=len(src[0]) + 1, do_sample=True, top_k=0, temperature=0.8,
|
||||||
num_return_sequences=1, no_repeat_ngram_size=2, output_scores=True)
|
num_return_sequences=1, no_repeat_ngram_size=2, output_scores=True)
|
||||||
probs, idxs = torch.softmax(output.scores[0][-1], dim=0).topk(30)
|
probs, idxs = torch.softmax(output.scores[0][-1], dim=0).topk(50)
|
||||||
current_output = ''
|
current_output = ''
|
||||||
accumulated_probability = 0
|
accumulated_probability = 0
|
||||||
for prob, token_id in zip(probs, idxs):
|
for prob, token_id in zip(probs, idxs):
|
||||||
|
7414
test-A/out-hidden_size=128.tsv
Normal file
7414
test-A/out-hidden_size=128.tsv
Normal file
File diff suppressed because it is too large
Load Diff
7414
test-A/out-hidden_size=512.tsv
Normal file
7414
test-A/out-hidden_size=512.tsv
Normal file
File diff suppressed because it is too large
Load Diff
14828
test-A/out.tsv
14828
test-A/out.tsv
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user