From c67fa2f8af84d9e09689a50c6316f060bcfb0ded Mon Sep 17 00:00:00 2001 From: Jakub Pokrywka Date: Wed, 29 Dec 2021 13:40:11 +0100 Subject: [PATCH] add heads --- .../classification_head.py | 10 ++++++++++ hf_roberta_base_classification/regressor_head.py | 13 +++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 hf_roberta_base_classification/classification_head.py create mode 100644 hf_roberta_base_classification/regressor_head.py diff --git a/hf_roberta_base_classification/classification_head.py b/hf_roberta_base_classification/classification_head.py new file mode 100644 index 0000000..60e46c5 --- /dev/null +++ b/hf_roberta_base_classification/classification_head.py @@ -0,0 +1,10 @@ +import torch + +class YearClassificationHead(torch.nn.Module): + def __init__(self, in_dim, MIN_YEAR, MAX_YEAR): + super(YearClassificationHead, self).__init__() + self.linear = torch.nn.Linear(in_dim, MAX_YEAR - MIN_YEAR + 1) + def forward(self, x): + x = x.mean(1) + x = self.linear(x) + return x diff --git a/hf_roberta_base_classification/regressor_head.py b/hf_roberta_base_classification/regressor_head.py new file mode 100644 index 0000000..77326e0 --- /dev/null +++ b/hf_roberta_base_classification/regressor_head.py @@ -0,0 +1,13 @@ +import torch + +class RegressorHead(torch.nn.Module): + def __init__(self, in_dim): + super(RegressorHead, self).__init__() + self.linear = torch.nn.Linear(in_dim, 1) + self.m = torch.nn.LeakyReLU(0.1) + def forward(self, x): + x = x.mean(1) + x = self.linear(x) + x = self.m(x) + x = - self.m(-x + 1 ) +1 + return x