This commit is contained in:
Andrzej Preibisz 2023-02-12 20:03:56 +01:00
commit a9e6944d12

View File

@ -18,12 +18,11 @@ class RobertaLeakyHead(nn.Module):
hidden_size *= 2
self.dense_1 = nn.Linear(hidden_size, 2 * hidden_size)
self.dense_2 = nn.Linear(2 * hidden_size, 4 * hidden_size)
self.dense_3 = nn.Linear(4 * hidden_size, 2 * hidden_size)
self.dense_4 = nn.Linear(2 * hidden_size, hidden_size)
self.dense_2 = nn.Linear(2 * hidden_size, hidden_size)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.leaky_relu = nn.LeakyReLU()
self.dropout = nn.Dropout(classifier_dropout)
self.out_proj = nn.Linear(hidden_size, config.num_labels)
@ -53,19 +52,11 @@ class RobertaLeakyHead(nn.Module):
)
x = self.dense_1(x)
x = torch.nn.LeakyReLU(x)
x = self.leaky_relu(x)
x = self.dropout(x)
x = self.dense_2(x)
x = torch.nn.LeakyReLU(x)
x = self.dropout(x)
x = self.dense_3(x)
x = torch.nn.LeakyReLU(x)
x = self.dropout(x)
x = self.dense_4(x)
x = torch.nn.LeakyReLU(x)
x = self.leaky_relu(x)
x = self.dropout(x)
x = self.out_proj(x)