From ba21870e77b118465c03156116d0c352e0d1fb4c Mon Sep 17 00:00:00 2001 From: s444501 Date: Sun, 12 Feb 2023 20:34:20 +0100 Subject: [PATCH] roberta custom head test7 --- roberta.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/roberta.py b/roberta.py index f4d7765..ad0f479 100644 --- a/roberta.py +++ b/roberta.py @@ -22,7 +22,6 @@ class RobertaLeakyHead(nn.Module): classifier_dropout = ( config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob ) - self.leaky_relu = nn.LeakyReLU() self.dropout = nn.Dropout(classifier_dropout) self.out_proj = nn.Linear(hidden_size, config.num_labels) @@ -78,17 +77,17 @@ class RobertaLeaky(RobertaForSequenceClassification): self.post_init() def forward( - self, - input_ids: Optional[torch.LongTensor] = None, - attention_mask: Optional[torch.FloatTensor] = None, - token_type_ids: Optional[torch.LongTensor] = None, - position_ids: Optional[torch.LongTensor] = None, - head_mask: Optional[torch.FloatTensor] = None, - inputs_embeds: Optional[torch.FloatTensor] = None, - labels: Optional[torch.LongTensor] = None, - output_attentions: Optional[bool] = None, - output_hidden_states: Optional[bool] = None, - return_dict: Optional[bool] = None, + self, + input_ids: Optional[torch.LongTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + token_type_ids: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + head_mask: Optional[torch.FloatTensor] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, ) -> Union[Tuple[torch.Tensor], SequenceClassifierOutput]: r""" labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):