finetuned model + empathy
This commit is contained in:
parent
31538ba2c2
commit
f2e09fa12f
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
.DS_Store
|
File diff suppressed because one or more lines are too long
6450
finetuning.ipynb
6450
finetuning.ipynb
File diff suppressed because one or more lines are too long
3
model_save/added_tokens.json
Normal file
3
model_save/added_tokens.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"<|endoftext|>": 50256
|
||||
}
|
40
model_save/config.json
Normal file
40
model_save/config.json
Normal file
@ -0,0 +1,40 @@
|
||||
{
|
||||
"_name_or_path": "flax-community/papuGaPT2",
|
||||
"activation_function": "gelu_new",
|
||||
"architectures": [
|
||||
"GPT2LMHeadModel"
|
||||
],
|
||||
"attn_pdrop": 0.0,
|
||||
"bos_token_id": 50256,
|
||||
"embd_pdrop": 0.0,
|
||||
"eos_token_id": 50256,
|
||||
"gradient_checkpointing": false,
|
||||
"initializer_range": 0.02,
|
||||
"layer_norm_epsilon": 1e-05,
|
||||
"model_type": "gpt2",
|
||||
"n_ctx": 1024,
|
||||
"n_embd": 768,
|
||||
"n_head": 12,
|
||||
"n_inner": null,
|
||||
"n_layer": 12,
|
||||
"n_positions": 1024,
|
||||
"reorder_and_upcast_attn": false,
|
||||
"resid_pdrop": 0.0,
|
||||
"scale_attn_by_inverse_layer_idx": false,
|
||||
"scale_attn_weights": true,
|
||||
"summary_activation": null,
|
||||
"summary_first_dropout": 0.1,
|
||||
"summary_proj_to_labels": true,
|
||||
"summary_type": "cls_index",
|
||||
"summary_use_proj": true,
|
||||
"task_specific_params": {
|
||||
"text-generation": {
|
||||
"do_sample": true,
|
||||
"max_length": 50
|
||||
}
|
||||
},
|
||||
"torch_dtype": "float32",
|
||||
"transformers_version": "4.30.2",
|
||||
"use_cache": true,
|
||||
"vocab_size": 50257
|
||||
}
|
6
model_save/generation_config.json
Normal file
6
model_save/generation_config.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"_from_model_config": true,
|
||||
"bos_token_id": 50256,
|
||||
"eos_token_id": 50256,
|
||||
"transformers_version": "4.30.2"
|
||||
}
|
49996
model_save/merges.txt
Normal file
49996
model_save/merges.txt
Normal file
File diff suppressed because it is too large
Load Diff
BIN
model_save/pytorch_model.bin
Normal file
BIN
model_save/pytorch_model.bin
Normal file
Binary file not shown.
6
model_save/special_tokens_map.json
Normal file
6
model_save/special_tokens_map.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"bos_token": "<|endoftext|>",
|
||||
"eos_token": "<|endoftext|>",
|
||||
"pad_token": "<|endoftext|>",
|
||||
"unk_token": "<|endoftext|>"
|
||||
}
|
100344
model_save/tokenizer.json
Normal file
100344
model_save/tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
9
model_save/tokenizer_config.json
Normal file
9
model_save/tokenizer_config.json
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"add_prefix_space": false,
|
||||
"bos_token": "<|endoftext|>",
|
||||
"clean_up_tokenization_spaces": true,
|
||||
"eos_token": "<|endoftext|>",
|
||||
"model_max_length": 1000000000000000019884624838656,
|
||||
"tokenizer_class": "GPT2Tokenizer",
|
||||
"unk_token": "<|endoftext|>"
|
||||
}
|
1
model_save/vocab.json
Normal file
1
model_save/vocab.json
Normal file
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue
Block a user