add model dir
This commit is contained in:
parent
98dc92beea
commit
f9a8851810
63
model-pl2en/config.json
Normal file
63
model-pl2en/config.json
Normal file
@ -0,0 +1,63 @@
|
||||
{
|
||||
"_name_or_path": "facebook/mbart-large-50-many-to-one-mmt",
|
||||
"_num_labels": 3,
|
||||
"activation_dropout": 0.0,
|
||||
"activation_function": "relu",
|
||||
"add_bias_logits": false,
|
||||
"add_final_layer_norm": true,
|
||||
"architectures": [
|
||||
"MBartForConditionalGeneration"
|
||||
],
|
||||
"attention_dropout": 0.0,
|
||||
"bos_token_id": 0,
|
||||
"classif_dropout": 0.0,
|
||||
"classifier_dropout": 0.0,
|
||||
"d_model": 1024,
|
||||
"decoder_attention_heads": 16,
|
||||
"decoder_ffn_dim": 4096,
|
||||
"decoder_layerdrop": 0.0,
|
||||
"decoder_layers": 12,
|
||||
"decoder_start_token_id": 2,
|
||||
"dropout": 0.1,
|
||||
"encoder_attention_heads": 16,
|
||||
"encoder_ffn_dim": 4096,
|
||||
"encoder_layerdrop": 0.0,
|
||||
"encoder_layers": 12,
|
||||
"eos_token_id": 2,
|
||||
"forced_bos_token_id": 250004,
|
||||
"forced_eos_token_id": 2,
|
||||
"gradient_checkpointing": false,
|
||||
"id2label": {
|
||||
"0": "LABEL_0",
|
||||
"1": "LABEL_1",
|
||||
"2": "LABEL_2"
|
||||
},
|
||||
"init_std": 0.02,
|
||||
"is_encoder_decoder": true,
|
||||
"label2id": {
|
||||
"LABEL_0": 0,
|
||||
"LABEL_1": 1,
|
||||
"LABEL_2": 2
|
||||
},
|
||||
"max_length": 200,
|
||||
"max_position_embeddings": 1024,
|
||||
"model_type": "mbart",
|
||||
"normalize_before": true,
|
||||
"normalize_embedding": true,
|
||||
"num_beams": 5,
|
||||
"num_hidden_layers": 12,
|
||||
"output_past": true,
|
||||
"pad_token_id": 1,
|
||||
"scale_embedding": true,
|
||||
"static_position_embeddings": false,
|
||||
"task_specific_params": {
|
||||
"translation_en_to_ro": {
|
||||
"decoder_start_token_id": 250020
|
||||
}
|
||||
},
|
||||
"tokenizer_class": "MBart50Tokenizer",
|
||||
"torch_dtype": "float32",
|
||||
"transformers_version": "4.19.2",
|
||||
"use_cache": true,
|
||||
"vocab_size": 250054
|
||||
}
|
BIN
model-pl2en/sentencepiece.bpe.model
Normal file
BIN
model-pl2en/sentencepiece.bpe.model
Normal file
Binary file not shown.
1
model-pl2en/special_tokens_map.json
Normal file
1
model-pl2en/special_tokens_map.json
Normal file
@ -0,0 +1 @@
|
||||
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>", "additional_special_tokens": ["ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN", "af_ZA", "az_AZ", "bn_IN", "fa_IR", "he_IL", "hr_HR", "id_ID", "ka_GE", "km_KH", "mk_MK", "ml_IN", "mn_MN", "mr_IN", "pl_PL", "ps_AF", "pt_XX", "sv_SE", "sw_KE", "ta_IN", "te_IN", "th_TH", "tl_XX", "uk_UA", "ur_PK", "xh_ZA", "gl_ES", "sl_SI"]}
|
1000837
model-pl2en/tokenizer.json
Normal file
1000837
model-pl2en/tokenizer.json
Normal file
File diff suppressed because one or more lines are too long
1
model-pl2en/tokenizer_config.json
Normal file
1
model-pl2en/tokenizer_config.json
Normal file
@ -0,0 +1 @@
|
||||
{"src_lang": "pl_PL", "tgt_lang": null, "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": "<s>", "language_codes": "ML50", "special_tokens_map_file": "special_tokens_map.json", "name_or_path": "facebook/mbart-large-50-many-to-one-mmt", "additional_special_tokens": ["ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN", "af_ZA", "az_AZ", "bn_IN", "fa_IR", "he_IL", "hr_HR", "id_ID", "ka_GE", "km_KH", "mk_MK", "ml_IN", "mn_MN", "mr_IN", "pl_PL", "ps_AF", "pt_XX", "sv_SE", "sw_KE", "ta_IN", "te_IN", "th_TH", "tl_XX", "uk_UA", "ur_PK", "xh_ZA", "gl_ES", "sl_SI"], "sp_model_kwargs": {}, "tokenizer_class": "MBart50Tokenizer"}
|
Loading…
Reference in New Issue
Block a user