diff --git a/02.ipynb b/02.ipynb index b86c716..c8dda91 100644 --- a/02.ipynb +++ b/02.ipynb @@ -551,6 +551,46 @@ "## Zadanie 6\n", "Spróbuj porównać czy jedno zdanie następuje po drugim." ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Some weights of the model checkpoint at bert-base-uncased were not used when initializing BertForNextSentencePrediction: ['cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.decoder.weight', 'cls.predictions.bias']\n", + "- This IS expected if you are initializing BertForNextSentencePrediction from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "- This IS NOT expected if you are initializing BertForNextSentencePrediction from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Kolejne zdanie jest losowe: False\n" + ] + } + ], + "source": [ + "from transformers import BertTokenizer, BertForNextSentencePrediction\n", + "import torch\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained(\"bert-base-uncased\")\n", + "model = BertForNextSentencePrediction.from_pretrained(\"bert-base-uncased\")\n", + "\n", + "prompt = \"In Italy, pizza served in formal settings, such as at a restaurant, is presented unsliced.\"\n", + "next_sentence = \"In other cases pizza may be sliced.\"\n", + "encoding = tokenizer(prompt, next_sentence, return_tensors=\"pt\")\n", + "\n", + "outputs = model(**encoding, labels=torch.LongTensor([1]))\n", + "logits = outputs.logits\n", + "\n", + "sentenceWasRandom = logits[0, 0] < logits[0, 1]\n", + "print(\"Kolejne zdanie jest losowe: \" + str(sentenceWasRandom.item()))" + ] } ], "metadata": {