delete tokenizers
This commit is contained in:
parent
85cac92433
commit
751e972710
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "special_tokens_map_file": null, "name_or_path": "original/polish-bart-base", "tokenizer_class": "BartTokenizer"}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"add_bos_token": false,
|
|
||||||
"add_prefix_space": false,
|
|
||||||
"bos_token": "<s>",
|
|
||||||
"eos_token": "</s>",
|
|
||||||
"errors": "replace",
|
|
||||||
"name_or_path": "original/polish-gpt2-small",
|
|
||||||
"special_tokens_map_file": "original/polish-gpt2-small/special_tokens_map.json",
|
|
||||||
"tokenizer_class": "GPT2Tokenizer",
|
|
||||||
"unk_token": "<unk>"
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user