hf roberta base regression layer on top
This commit is contained in:
parent
33e11dad3d
commit
9af4dd453e
38240
dev-0/out.tsv
38240
dev-0/out.tsv
File diff suppressed because it is too large
Load Diff
@ -9,7 +9,7 @@ from transformers import get_scheduler
|
||||
import torch
|
||||
from tqdm.auto import tqdm
|
||||
|
||||
BATCH_SIZE = 1
|
||||
BATCH_SIZE = 24
|
||||
EARLY_STOPPING = 3
|
||||
WARMUP_STEPS = 10_000
|
||||
|
||||
@ -38,7 +38,7 @@ model = AutoModelForSequenceClassification.from_pretrained(MODEL, num_labels=1)
|
||||
optimizer = AdamW(model.parameters(), lr=1e-6)
|
||||
|
||||
|
||||
num_epochs = 1
|
||||
num_epochs = 15
|
||||
num_training_steps = num_epochs * len(train_dataloader)
|
||||
lr_scheduler = get_scheduler(
|
||||
"linear",
|
||||
|
@ -1,4 +1,4 @@
|
||||
#MODEL = '/home/wmi/RoBERTa/without_date/checkpoint-1325000'
|
||||
MODEL = 'roberta-base'
|
||||
TEST=True
|
||||
TEST=False
|
||||
|
||||
|
38190
test-A/out.tsv
38190
test-A/out.tsv
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user