projekt-glebokie/T5.ipynb
2023-02-12 15:22:23 +01:00

5023 lines
252 KiB
Plaintext
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU",
"gpuClass": "standard",
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"4c2fa083b8eb4b2789ad7d423132b3bf": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HBoxModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HBoxView",
"box_style": "",
"children": [
"IPY_MODEL_920e6a7cec5c48b1aadee703e65a6700",
"IPY_MODEL_2d2805490d5c4bb1a94690be6088d661",
"IPY_MODEL_9de3d926400a4b42bb00348feae17390"
],
"layout": "IPY_MODEL_e8f633233f3b472495ad5fb41151e071"
}
},
"920e6a7cec5c48b1aadee703e65a6700": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_0b968d14824e4cffb08af2a714fb3b09",
"placeholder": "",
"style": "IPY_MODEL_1df55001edfa4d45bd6c6cafd60d056f",
"value": "100%"
}
},
"2d2805490d5c4bb1a94690be6088d661": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "FloatProgressModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "ProgressView",
"bar_style": "success",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_cb820b66b28642d1ab421f24fa3d2d56",
"max": 3,
"min": 0,
"orientation": "horizontal",
"style": "IPY_MODEL_49065aeb0f1e4bf7a0b12f555b0a49d3",
"value": 3
}
},
"9de3d926400a4b42bb00348feae17390": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"model_module_version": "1.5.0",
"state": {
"_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "HTMLModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/controls",
"_view_module_version": "1.5.0",
"_view_name": "HTMLView",
"description": "",
"description_tooltip": null,
"layout": "IPY_MODEL_73c44df5c4074d38808b207bf384c07c",
"placeholder": "",
"style": "IPY_MODEL_c21a5ee5c45a45b693c5518876d78da8",
"value": " 3/3 [00:00<00:00, 97.56it/s]"
}
},
"e8f633233f3b472495ad5fb41151e071": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"0b968d14824e4cffb08af2a714fb3b09": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"1df55001edfa4d45bd6c6cafd60d056f": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
},
"cb820b66b28642d1ab421f24fa3d2d56": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"49065aeb0f1e4bf7a0b12f555b0a49d3": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "ProgressStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"bar_color": null,
"description_width": ""
}
},
"73c44df5c4074d38808b207bf384c07c": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"model_module_version": "1.2.0",
"state": {
"_model_module": "@jupyter-widgets/base",
"_model_module_version": "1.2.0",
"_model_name": "LayoutModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "LayoutView",
"align_content": null,
"align_items": null,
"align_self": null,
"border": null,
"bottom": null,
"display": null,
"flex": null,
"flex_flow": null,
"grid_area": null,
"grid_auto_columns": null,
"grid_auto_flow": null,
"grid_auto_rows": null,
"grid_column": null,
"grid_gap": null,
"grid_row": null,
"grid_template_areas": null,
"grid_template_columns": null,
"grid_template_rows": null,
"height": null,
"justify_content": null,
"justify_items": null,
"left": null,
"margin": null,
"max_height": null,
"max_width": null,
"min_height": null,
"min_width": null,
"object_fit": null,
"object_position": null,
"order": null,
"overflow": null,
"overflow_x": null,
"overflow_y": null,
"padding": null,
"right": null,
"top": null,
"visibility": null,
"width": null
}
},
"c21a5ee5c45a45b693c5518876d78da8": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"model_module_version": "1.5.0",
"state": {
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_model_name": "DescriptionStyleModel",
"_view_count": null,
"_view_module": "@jupyter-widgets/base",
"_view_module_version": "1.2.0",
"_view_name": "StyleView",
"description_width": ""
}
}
}
}
},
"cells": [
{
"cell_type": "markdown",
"source": [
"# Setup"
],
"metadata": {
"id": "n2A5EThJNiAy"
}
},
{
"cell_type": "markdown",
"source": [
"## Requirements"
],
"metadata": {
"id": "tPp2_1rDOFYA"
}
},
{
"cell_type": "code",
"execution_count": 53,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "OmsX3kG4bLTg",
"outputId": "50bddad6-a15c-45c5-82f9-183a397eebc2"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: torch in /usr/local/lib/python3.8/dist-packages (1.13.1+cu116)\n",
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.8/dist-packages (from torch) (4.4.0)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: datasets in /usr/local/lib/python3.8/dist-packages (2.9.0)\n",
"Requirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.8/dist-packages (from datasets) (4.64.1)\n",
"Requirement already satisfied: multiprocess in /usr/local/lib/python3.8/dist-packages (from datasets) (0.70.14)\n",
"Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.8/dist-packages (from datasets) (2.25.1)\n",
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.8/dist-packages (from datasets) (6.0)\n",
"Requirement already satisfied: pandas in /usr/local/lib/python3.8/dist-packages (from datasets) (1.3.5)\n",
"Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.8/dist-packages (from datasets) (1.21.6)\n",
"Requirement already satisfied: dill<0.3.7 in /usr/local/lib/python3.8/dist-packages (from datasets) (0.3.6)\n",
"Requirement already satisfied: packaging in /usr/local/lib/python3.8/dist-packages (from datasets) (23.0)\n",
"Requirement already satisfied: fsspec[http]>=2021.11.1 in /usr/local/lib/python3.8/dist-packages (from datasets) (2023.1.0)\n",
"Requirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from datasets) (3.8.3)\n",
"Requirement already satisfied: pyarrow>=6.0.0 in /usr/local/lib/python3.8/dist-packages (from datasets) (9.0.0)\n",
"Requirement already satisfied: responses<0.19 in /usr/local/lib/python3.8/dist-packages (from datasets) (0.18.0)\n",
"Requirement already satisfied: xxhash in /usr/local/lib/python3.8/dist-packages (from datasets) (3.2.0)\n",
"Requirement already satisfied: huggingface-hub<1.0.0,>=0.2.0 in /usr/local/lib/python3.8/dist-packages (from datasets) (0.12.0)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (1.3.1)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (1.8.2)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (4.0.2)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (6.0.4)\n",
"Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (2.1.1)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (1.3.3)\n",
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets) (22.2.0)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from huggingface-hub<1.0.0,>=0.2.0->datasets) (3.9.0)\n",
"Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub<1.0.0,>=0.2.0->datasets) (4.4.0)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->datasets) (4.0.0)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->datasets) (2022.12.7)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->datasets) (2.10)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->datasets) (1.26.14)\n",
"Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.8/dist-packages (from pandas->datasets) (2.8.2)\n",
"Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.8/dist-packages (from pandas->datasets) (2022.7.1)\n",
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.8/dist-packages (from python-dateutil>=2.7.3->pandas->datasets) (1.15.0)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: transformers in /usr/local/lib/python3.8/dist-packages (4.26.1)\n",
"Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.8/dist-packages (from transformers) (4.64.1)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from transformers) (3.9.0)\n",
"Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.8/dist-packages (from transformers) (2022.6.2)\n",
"Requirement already satisfied: tokenizers!=0.11.3,<0.14,>=0.11.1 in /usr/local/lib/python3.8/dist-packages (from transformers) (0.13.2)\n",
"Requirement already satisfied: huggingface-hub<1.0,>=0.11.0 in /usr/local/lib/python3.8/dist-packages (from transformers) (0.12.0)\n",
"Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from transformers) (23.0)\n",
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.8/dist-packages (from transformers) (6.0)\n",
"Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.8/dist-packages (from transformers) (1.21.6)\n",
"Requirement already satisfied: requests in /usr/local/lib/python3.8/dist-packages (from transformers) (2.25.1)\n",
"Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub<1.0,>=0.11.0->transformers) (4.4.0)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (2022.12.7)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (1.26.14)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (4.0.0)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests->transformers) (2.10)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: scikit-learn in /usr/local/lib/python3.8/dist-packages (1.0.2)\n",
"Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.8/dist-packages (from scikit-learn) (3.1.0)\n",
"Requirement already satisfied: numpy>=1.14.6 in /usr/local/lib/python3.8/dist-packages (from scikit-learn) (1.21.6)\n",
"Requirement already satisfied: scipy>=1.1.0 in /usr/local/lib/python3.8/dist-packages (from scikit-learn) (1.7.3)\n",
"Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.8/dist-packages (from scikit-learn) (1.2.0)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: evaluate in /usr/local/lib/python3.8/dist-packages (0.4.0)\n",
"Requirement already satisfied: packaging in /usr/local/lib/python3.8/dist-packages (from evaluate) (23.0)\n",
"Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.8/dist-packages (from evaluate) (1.21.6)\n",
"Requirement already satisfied: responses<0.19 in /usr/local/lib/python3.8/dist-packages (from evaluate) (0.18.0)\n",
"Requirement already satisfied: requests>=2.19.0 in /usr/local/lib/python3.8/dist-packages (from evaluate) (2.25.1)\n",
"Requirement already satisfied: tqdm>=4.62.1 in /usr/local/lib/python3.8/dist-packages (from evaluate) (4.64.1)\n",
"Requirement already satisfied: pandas in /usr/local/lib/python3.8/dist-packages (from evaluate) (1.3.5)\n",
"Requirement already satisfied: dill in /usr/local/lib/python3.8/dist-packages (from evaluate) (0.3.6)\n",
"Requirement already satisfied: fsspec[http]>=2021.05.0 in /usr/local/lib/python3.8/dist-packages (from evaluate) (2023.1.0)\n",
"Requirement already satisfied: datasets>=2.0.0 in /usr/local/lib/python3.8/dist-packages (from evaluate) (2.9.0)\n",
"Requirement already satisfied: multiprocess in /usr/local/lib/python3.8/dist-packages (from evaluate) (0.70.14)\n",
"Requirement already satisfied: huggingface-hub>=0.7.0 in /usr/local/lib/python3.8/dist-packages (from evaluate) (0.12.0)\n",
"Requirement already satisfied: xxhash in /usr/local/lib/python3.8/dist-packages (from evaluate) (3.2.0)\n",
"Requirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from datasets>=2.0.0->evaluate) (3.8.3)\n",
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.8/dist-packages (from datasets>=2.0.0->evaluate) (6.0)\n",
"Requirement already satisfied: pyarrow>=6.0.0 in /usr/local/lib/python3.8/dist-packages (from datasets>=2.0.0->evaluate) (9.0.0)\n",
"Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub>=0.7.0->evaluate) (4.4.0)\n",
"Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from huggingface-hub>=0.7.0->evaluate) (3.9.0)\n",
"Requirement already satisfied: chardet<5,>=3.0.2 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->evaluate) (4.0.0)\n",
"Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->evaluate) (1.26.14)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->evaluate) (2022.12.7)\n",
"Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests>=2.19.0->evaluate) (2.10)\n",
"Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.8/dist-packages (from pandas->evaluate) (2.8.2)\n",
"Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.8/dist-packages (from pandas->evaluate) (2022.7.1)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.3.1)\n",
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (22.2.0)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (6.0.4)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (4.0.2)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.8.2)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (1.3.3)\n",
"Requirement already satisfied: charset-normalizer<3.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->datasets>=2.0.0->evaluate) (2.1.1)\n",
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.8/dist-packages (from python-dateutil>=2.7.3->pandas->evaluate) (1.15.0)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: accelerate in /usr/local/lib/python3.8/dist-packages (0.16.0)\n",
"Requirement already satisfied: torch>=1.4.0 in /usr/local/lib/python3.8/dist-packages (from accelerate) (1.13.1+cu116)\n",
"Requirement already satisfied: pyyaml in /usr/local/lib/python3.8/dist-packages (from accelerate) (6.0)\n",
"Requirement already satisfied: psutil in /usr/local/lib/python3.8/dist-packages (from accelerate) (5.4.8)\n",
"Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from accelerate) (23.0)\n",
"Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.8/dist-packages (from accelerate) (1.21.6)\n",
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.8/dist-packages (from torch>=1.4.0->accelerate) (4.4.0)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: sentencepiece in /usr/local/lib/python3.8/dist-packages (0.1.97)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: protobuf in /usr/local/lib/python3.8/dist-packages (3.19.6)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: sacrebleu in /usr/local/lib/python3.8/dist-packages (2.3.1)\n",
"Requirement already satisfied: regex in /usr/local/lib/python3.8/dist-packages (from sacrebleu) (2022.6.2)\n",
"Requirement already satisfied: colorama in /usr/local/lib/python3.8/dist-packages (from sacrebleu) (0.4.6)\n",
"Requirement already satisfied: lxml in /usr/local/lib/python3.8/dist-packages (from sacrebleu) (4.9.2)\n",
"Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.8/dist-packages (from sacrebleu) (1.21.6)\n",
"Requirement already satisfied: tabulate>=0.8.9 in /usr/local/lib/python3.8/dist-packages (from sacrebleu) (0.8.10)\n",
"Requirement already satisfied: portalocker in /usr/local/lib/python3.8/dist-packages (from sacrebleu) (2.7.0)\n",
"Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
"Requirement already satisfied: py7zr in /usr/local/lib/python3.8/dist-packages (0.20.4)\n",
"Requirement already satisfied: multivolumefile>=0.2.3 in /usr/local/lib/python3.8/dist-packages (from py7zr) (0.2.3)\n",
"Requirement already satisfied: brotli>=1.0.9 in /usr/local/lib/python3.8/dist-packages (from py7zr) (1.0.9)\n",
"Requirement already satisfied: texttable in /usr/local/lib/python3.8/dist-packages (from py7zr) (1.6.7)\n",
"Requirement already satisfied: inflate64>=0.3.1 in /usr/local/lib/python3.8/dist-packages (from py7zr) (0.3.1)\n",
"Requirement already satisfied: pyppmd<1.1.0,>=0.18.1 in /usr/local/lib/python3.8/dist-packages (from py7zr) (1.0.0)\n",
"Requirement already satisfied: pybcj>=0.6.0 in /usr/local/lib/python3.8/dist-packages (from py7zr) (1.0.1)\n",
"Requirement already satisfied: pyzstd>=0.14.4 in /usr/local/lib/python3.8/dist-packages (from py7zr) (0.15.3)\n",
"Requirement already satisfied: psutil in /usr/local/lib/python3.8/dist-packages (from py7zr) (5.4.8)\n",
"Requirement already satisfied: pycryptodomex>=3.6.6 in /usr/local/lib/python3.8/dist-packages (from py7zr) (3.17)\n"
]
}
],
"source": [
"!pip install torch\n",
"!pip install datasets\n",
"!pip install transformers\n",
"!pip install scikit-learn\n",
"!pip install evaluate\n",
"!pip install accelerate\n",
"!pip install sentencepiece\n",
"!pip install protobuf\n",
"!pip install sacrebleu\n",
"!pip install py7zr\n"
]
},
{
"cell_type": "markdown",
"source": [
"## Imports"
],
"metadata": {
"id": "o3Kj9IzuOKMi"
}
},
{
"cell_type": "code",
"source": [
"import os\n",
"import json\n",
"import torch\n",
"from google.colab import drive\n",
"from pathlib import Path\n",
"from typing import Dict, List\n",
"from datasets import load_dataset\n",
"from transformers import T5Tokenizer"
],
"metadata": {
"id": "r92S06noeSWE"
},
"execution_count": 54,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"## Loading data"
],
"metadata": {
"id": "2UzLo91gNnsA"
}
},
{
"cell_type": "code",
"source": [
"loaded_data = load_dataset('emotion')\n",
"!mkdir -v -p data\n",
"train_path = Path('data/train.json')\n",
"valid_path = Path('data/valid.json')\n",
"test_path = Path('data/test.json')\n",
"data_train, data_valid, data_test = [], [], []"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 0,
"referenced_widgets": [
"4c2fa083b8eb4b2789ad7d423132b3bf",
"920e6a7cec5c48b1aadee703e65a6700",
"2d2805490d5c4bb1a94690be6088d661",
"9de3d926400a4b42bb00348feae17390",
"e8f633233f3b472495ad5fb41151e071",
"0b968d14824e4cffb08af2a714fb3b09",
"1df55001edfa4d45bd6c6cafd60d056f",
"cb820b66b28642d1ab421f24fa3d2d56",
"49065aeb0f1e4bf7a0b12f555b0a49d3",
"73c44df5c4074d38808b207bf384c07c",
"c21a5ee5c45a45b693c5518876d78da8"
]
},
"id": "n_miey7eb2Xr",
"outputId": "bf7ba21e-3acb-4302-9631-3d9628b31dad"
},
"execution_count": 55,
"outputs": [
{
"output_type": "stream",
"name": "stderr",
"text": [
"WARNING:datasets.builder:No config specified, defaulting to: emotion/split\n",
"WARNING:datasets.builder:Found cached dataset emotion (/root/.cache/huggingface/datasets/emotion/split/1.0.0/cca5efe2dfeb58c1d098e0f9eeb200e9927d889b5a03c67097275dfb5fe463bd)\n"
]
},
{
"output_type": "display_data",
"data": {
"text/plain": [
" 0%| | 0/3 [00:00<?, ?it/s]"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "4c2fa083b8eb4b2789ad7d423132b3bf"
}
},
"metadata": {}
}
]
},
{
"cell_type": "code",
"source": [
"for source_data, dataset, max_size in [\n",
" (loaded_data['train'], data_train, None),\n",
" (loaded_data['validation'], data_valid, None),\n",
" (loaded_data['test'], data_test, None),\n",
"]:\n",
" for i, data in enumerate(source_data):\n",
" if max_size is not None and i >= max_size:\n",
" break\n",
" data_line = {\n",
" 'label': int(data['label']),\n",
" 'text': data['text'],\n",
" }\n",
" dataset.append(data_line)\n",
"\n",
"print(f'Train: {len(data_train):6d}')\n",
"print(f'Valid: {len(data_valid):6d}')\n",
"print(f'Test: {len(data_test):6d}')"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "BZ6afaRzGsxS",
"outputId": "bab51519-3afc-4b32-9611-775edb1b305b"
},
"execution_count": 56,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Train: 16000\n",
"Valid: 2000\n",
"Test: 2000\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"MAP_LABEL_TRANSLATION = {\n",
" 0: 'sadness',\n",
" 1: 'joy',\n",
" 2: 'love',\n",
" 3: 'anger',\n",
" 4: 'fear',\n",
" 5: 'surprise',\n",
"}"
],
"metadata": {
"id": "w0KyM4TrGxQY"
},
"execution_count": 57,
"outputs": []
},
{
"cell_type": "code",
"source": [
"def save_as_translations(original_save_path: Path, data_to_save: List[Dict]) -> None:\n",
" file_name = 's2s-' + original_save_path.name\n",
" file_path = original_save_path.parent / file_name\n",
"\n",
" print(f'Saving into: {file_path}')\n",
" with open(file_path, 'wt') as f_write:\n",
" for data_line in data_to_save:\n",
" label = data_line['label']\n",
" new_label = MAP_LABEL_TRANSLATION[label]\n",
" data_line['label'] = new_label\n",
" data_line_str = json.dumps(data_line)\n",
" f_write.write(f'{data_line_str}\\n')"
],
"metadata": {
"id": "-EFRYeAYHIKN"
},
"execution_count": 58,
"outputs": []
},
{
"cell_type": "code",
"source": [
"for file_path, data_to_save in [(train_path, data_train), (valid_path, data_valid), (test_path, data_test)]:\n",
" print(f'Saving into: {file_path}')\n",
" with open(file_path, 'wt') as f_write:\n",
" for data_line in data_to_save:\n",
" data_line_str = json.dumps(data_line)\n",
" f_write.write(f'{data_line_str}\\n')\n",
" \n",
" save_as_translations(file_path, data_to_save)"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "7RsrTNGCHIqc",
"outputId": "ef7b1299-309d-4850-e2bd-e914f8604f4c"
},
"execution_count": 59,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Saving into: data/train.json\n",
"Saving into: data/s2s-train.json\n",
"Saving into: data/valid.json\n",
"Saving into: data/s2s-valid.json\n",
"Saving into: data/test.json\n",
"Saving into: data/s2s-test.json\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"!head data/train.json"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Svu6YYSaHK4t",
"outputId": "c715aa76-05f4-4a88-834a-568714902e53"
},
"execution_count": 60,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"{\"label\": 0, \"text\": \"i didnt feel humiliated\"}\n",
"{\"label\": 0, \"text\": \"i can go from feeling so hopeless to so damned hopeful just from being around someone who cares and is awake\"}\n",
"{\"label\": 3, \"text\": \"im grabbing a minute to post i feel greedy wrong\"}\n",
"{\"label\": 2, \"text\": \"i am ever feeling nostalgic about the fireplace i will know that it is still on the property\"}\n",
"{\"label\": 3, \"text\": \"i am feeling grouchy\"}\n",
"{\"label\": 0, \"text\": \"ive been feeling a little burdened lately wasnt sure why that was\"}\n",
"{\"label\": 5, \"text\": \"ive been taking or milligrams or times recommended amount and ive fallen asleep a lot faster but i also feel like so funny\"}\n",
"{\"label\": 4, \"text\": \"i feel as confused about life as a teenager or as jaded as a year old man\"}\n",
"{\"label\": 1, \"text\": \"i have been with petronas for years i feel that petronas has performed well and made a huge profit\"}\n",
"{\"label\": 2, \"text\": \"i feel romantic too\"}\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"!head data/s2s-train.json"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "5INZa4ZJHQbn",
"outputId": "f32b1ec2-0218-4ca7-f45d-2e11ccb2e0e4"
},
"execution_count": 61,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"{\"label\": \"sadness\", \"text\": \"i didnt feel humiliated\"}\n",
"{\"label\": \"sadness\", \"text\": \"i can go from feeling so hopeless to so damned hopeful just from being around someone who cares and is awake\"}\n",
"{\"label\": \"anger\", \"text\": \"im grabbing a minute to post i feel greedy wrong\"}\n",
"{\"label\": \"love\", \"text\": \"i am ever feeling nostalgic about the fireplace i will know that it is still on the property\"}\n",
"{\"label\": \"anger\", \"text\": \"i am feeling grouchy\"}\n",
"{\"label\": \"sadness\", \"text\": \"ive been feeling a little burdened lately wasnt sure why that was\"}\n",
"{\"label\": \"surprise\", \"text\": \"ive been taking or milligrams or times recommended amount and ive fallen asleep a lot faster but i also feel like so funny\"}\n",
"{\"label\": \"fear\", \"text\": \"i feel as confused about life as a teenager or as jaded as a year old man\"}\n",
"{\"label\": \"joy\", \"text\": \"i have been with petronas for years i feel that petronas has performed well and made a huge profit\"}\n",
"{\"label\": \"love\", \"text\": \"i feel romantic too\"}\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"# create tiny datasets for debugging purposes\n",
"for file_name in [\"s2s-train\", \"s2s-valid\", \"s2s-test\"]:\n",
" print(f\"=== {file_name} ===\")\n",
" all_text = Path(f\"data/{file_name}.json\").read_text().split('\\n')\n",
" text = all_text[:250] + all_text[-250:]\n",
" Path(f\"data/{file_name}-500.json\").write_text(\"\\n\".join(text))"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "OYeI-JvepSf7",
"outputId": "d734994e-8bb4-4440-fe8e-1bc058013836"
},
"execution_count": 62,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"=== s2s-train ===\n",
"=== s2s-valid ===\n",
"=== s2s-test ===\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"!wc -l data/*"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "_WSOgm50LI0m",
"outputId": "8669ada8-b4c6-4b68-faa1-1c67187360ba"
},
"execution_count": 63,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
" 499 data/s2s-test-500.json\n",
" 2000 data/s2s-test.json\n",
" 499 data/s2s-train-500.json\n",
" 16000 data/s2s-train.json\n",
" 499 data/s2s-valid-500.json\n",
" 2000 data/s2s-valid.json\n",
" 2000 data/test.json\n",
" 16000 data/train.json\n",
" 2000 data/valid.json\n",
" 41497 total\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# GPU Info"
],
"metadata": {
"id": "b78jArQhN2Jb"
}
},
{
"cell_type": "code",
"source": [
"!nvidia-smi"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "TZk2ZwJML4Wz",
"outputId": "1bcf918c-049c-401d-ea89-75fd957e7e1d"
},
"execution_count": 64,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Sun Feb 12 13:55:26 2023 \n",
"+-----------------------------------------------------------------------------+\n",
"| NVIDIA-SMI 510.47.03 Driver Version: 510.47.03 CUDA Version: 11.6 |\n",
"|-------------------------------+----------------------+----------------------+\n",
"| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n",
"| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n",
"| | | MIG M. |\n",
"|===============================+======================+======================|\n",
"| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n",
"| N/A 69C P0 31W / 70W | 0MiB / 15360MiB | 0% Default |\n",
"| | | N/A |\n",
"+-------------------------------+----------------------+----------------------+\n",
" \n",
"+-----------------------------------------------------------------------------+\n",
"| Processes: |\n",
"| GPU GI CI PID Type Process name GPU Memory |\n",
"| ID ID Usage |\n",
"|=============================================================================|\n",
"| No running processes found |\n",
"+-----------------------------------------------------------------------------+\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"os.environ['TOKENIZERS_PARALLELISM'] = 'true'"
],
"metadata": {
"id": "e-ssYW1WL71Y"
},
"execution_count": 65,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"# Run"
],
"metadata": {
"id": "r9WTEt5PN4Oq"
}
},
{
"cell_type": "code",
"source": [
"!wget 'https://git.wmi.amu.edu.pl/s444465/projekt-glebokie/raw/branch/master/run_translation_freezing.py' -O 'run_translation.py'"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "PEXlpaZHOeUn",
"outputId": "4e34b28d-947e-44f9-a076-d0e29133f955"
},
"execution_count": 66,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"--2023-02-12 13:55:27-- https://git.wmi.amu.edu.pl/s444465/projekt-glebokie/raw/branch/master/run_translation_freezing.py\n",
"Resolving git.wmi.amu.edu.pl (git.wmi.amu.edu.pl)... 150.254.78.40\n",
"Connecting to git.wmi.amu.edu.pl (git.wmi.amu.edu.pl)|150.254.78.40|:443... connected.\n",
"HTTP request sent, awaiting response... 200 OK\n",
"Length: 30055 (29K) [text/plain]\n",
"Saving to: run_translation.py\n",
"\n",
"run_translation.py 100%[===================>] 29.35K --.-KB/s in 0.03s \n",
"\n",
"2023-02-12 13:55:27 (1.05 MB/s) - run_translation.py saved [30055/30055]\n",
"\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"torch.cuda.empty_cache()"
],
"metadata": {
"id": "HIjRddM2NWVG"
},
"execution_count": 67,
"outputs": []
},
{
"cell_type": "code",
"source": [
"!python run_translation.py \\\n",
" --cache_dir .cache_training \\\n",
" --model_name_or_path \"google/t5-v1_1-small\" \\\n",
" --freeze_weights \\\n",
" --train_file data/s2s-train.json \\\n",
" --validation_file data/s2s-valid.json \\\n",
" --test_file data/s2s-test.json \\\n",
" --per_device_train_batch_size 8 \\\n",
" --per_device_eval_batch_size 8 \\\n",
" --source_lang \"text\" \\\n",
" --target_lang \"label\" \\\n",
" --source_prefix \"emotion classification\" \\\n",
" --max_source_length 256 \\\n",
" --max_target_length 128 \\\n",
" --generation_max_length 128 \\\n",
" --do_train \\\n",
" --do_eval \\\n",
" --do_predict \\\n",
" --predict_with_generate \\\n",
" --num_train_epochs 1 \\\n",
" --overwrite_output_dir \\\n",
" --output_dir out/emotion/t5_v1_1"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "liSabFBacYLf",
"outputId": "08b6bdd5-ddba-4f21-84b7-1f1b98a999b9"
},
"execution_count": 68,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"2023-02-12 13:55:31.695327: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/lib64-nvidia\n",
"2023-02-12 13:55:31.695472: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/lib64-nvidia\n",
"2023-02-12 13:55:31.695499: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n",
"WARNING:__main__:Process rank: -1, device: cuda:0, n_gpu: 1distributed training: False, 16-bits training: False\n",
"INFO:__main__:Training/evaluation parameters Seq2SeqTrainingArguments(\n",
"_n_gpu=1,\n",
"adafactor=False,\n",
"adam_beta1=0.9,\n",
"adam_beta2=0.999,\n",
"adam_epsilon=1e-08,\n",
"auto_find_batch_size=False,\n",
"bf16=False,\n",
"bf16_full_eval=False,\n",
"data_seed=None,\n",
"dataloader_drop_last=False,\n",
"dataloader_num_workers=0,\n",
"dataloader_pin_memory=True,\n",
"ddp_bucket_cap_mb=None,\n",
"ddp_find_unused_parameters=None,\n",
"ddp_timeout=1800,\n",
"debug=[],\n",
"deepspeed=None,\n",
"disable_tqdm=False,\n",
"do_eval=True,\n",
"do_predict=True,\n",
"do_train=True,\n",
"eval_accumulation_steps=None,\n",
"eval_delay=0,\n",
"eval_steps=None,\n",
"evaluation_strategy=no,\n",
"fp16=False,\n",
"fp16_backend=auto,\n",
"fp16_full_eval=False,\n",
"fp16_opt_level=O1,\n",
"fsdp=[],\n",
"fsdp_min_num_params=0,\n",
"fsdp_transformer_layer_cls_to_wrap=None,\n",
"full_determinism=False,\n",
"generation_max_length=128,\n",
"generation_num_beams=None,\n",
"gradient_accumulation_steps=1,\n",
"gradient_checkpointing=False,\n",
"greater_is_better=None,\n",
"group_by_length=False,\n",
"half_precision_backend=auto,\n",
"hub_model_id=None,\n",
"hub_private_repo=False,\n",
"hub_strategy=every_save,\n",
"hub_token=<HUB_TOKEN>,\n",
"ignore_data_skip=False,\n",
"include_inputs_for_metrics=False,\n",
"jit_mode_eval=False,\n",
"label_names=None,\n",
"label_smoothing_factor=0.0,\n",
"learning_rate=5e-05,\n",
"length_column_name=length,\n",
"load_best_model_at_end=False,\n",
"local_rank=-1,\n",
"log_level=passive,\n",
"log_level_replica=passive,\n",
"log_on_each_node=True,\n",
"logging_dir=out/emotion/t5_v1_1/runs/Feb12_13-55-35_2f3d46b868f5,\n",
"logging_first_step=False,\n",
"logging_nan_inf_filter=True,\n",
"logging_steps=500,\n",
"logging_strategy=steps,\n",
"lr_scheduler_type=linear,\n",
"max_grad_norm=1.0,\n",
"max_steps=-1,\n",
"metric_for_best_model=None,\n",
"mp_parameters=,\n",
"no_cuda=False,\n",
"num_train_epochs=1.0,\n",
"optim=adamw_hf,\n",
"optim_args=None,\n",
"output_dir=out/emotion/t5_v1_1,\n",
"overwrite_output_dir=True,\n",
"past_index=-1,\n",
"per_device_eval_batch_size=8,\n",
"per_device_train_batch_size=8,\n",
"predict_with_generate=True,\n",
"prediction_loss_only=False,\n",
"push_to_hub=False,\n",
"push_to_hub_model_id=None,\n",
"push_to_hub_organization=None,\n",
"push_to_hub_token=<PUSH_TO_HUB_TOKEN>,\n",
"ray_scope=last,\n",
"remove_unused_columns=True,\n",
"report_to=['tensorboard'],\n",
"resume_from_checkpoint=None,\n",
"run_name=out/emotion/t5_v1_1,\n",
"save_on_each_node=False,\n",
"save_steps=500,\n",
"save_strategy=steps,\n",
"save_total_limit=None,\n",
"seed=42,\n",
"sharded_ddp=[],\n",
"skip_memory_metrics=True,\n",
"sortish_sampler=False,\n",
"tf32=None,\n",
"torch_compile=False,\n",
"torch_compile_backend=None,\n",
"torch_compile_mode=None,\n",
"torchdynamo=None,\n",
"tpu_metrics_debug=False,\n",
"tpu_num_cores=None,\n",
"use_ipex=False,\n",
"use_legacy_prediction_loop=False,\n",
"use_mps_device=False,\n",
"warmup_ratio=0.0,\n",
"warmup_steps=0,\n",
"weight_decay=0.0,\n",
"xpu_backend=None,\n",
")\n",
"WARNING:datasets.builder:Using custom data configuration default-1b7ebb62c359adc9\n",
"INFO:datasets.info:Loading Dataset Infos from /usr/local/lib/python3.8/dist-packages/datasets/packaged_modules/json\n",
"INFO:datasets.builder:Generating dataset json (/content/.cache_training/json/default-1b7ebb62c359adc9/0.0.0/0f7e3662623656454fcd2b650f34e886a7db4b9104504885bd462096cc7a9f51)\n",
"Downloading and preparing dataset json/default to /content/.cache_training/json/default-1b7ebb62c359adc9/0.0.0/0f7e3662623656454fcd2b650f34e886a7db4b9104504885bd462096cc7a9f51...\n",
"Downloading data files: 100% 3/3 [00:00<00:00, 8086.70it/s]\n",
"INFO:datasets.download.download_manager:Downloading took 0.0 min\n",
"INFO:datasets.download.download_manager:Checksum Computation took 0.0 min\n",
"Extracting data files: 100% 3/3 [00:00<00:00, 1396.70it/s]\n",
"INFO:datasets.utils.info_utils:Unable to verify checksums.\n",
"INFO:datasets.builder:Generating train split\n",
"INFO:datasets.builder:Generating validation split\n",
"INFO:datasets.builder:Generating test split\n",
"INFO:datasets.utils.info_utils:Unable to verify splits sizes.\n",
"Dataset json downloaded and prepared to /content/.cache_training/json/default-1b7ebb62c359adc9/0.0.0/0f7e3662623656454fcd2b650f34e886a7db4b9104504885bd462096cc7a9f51. Subsequent calls will reuse this data.\n",
"100% 3/3 [00:00<00:00, 178.87it/s]\n",
"[INFO|configuration_utils.py:660] 2023-02-12 13:55:37,412 >> loading configuration file config.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/config.json\n",
"[INFO|configuration_utils.py:712] 2023-02-12 13:55:37,415 >> Model config T5Config {\n",
" \"_name_or_path\": \"google/t5-v1_1-small\",\n",
" \"architectures\": [\n",
" \"T5ForConditionalGeneration\"\n",
" ],\n",
" \"d_ff\": 1024,\n",
" \"d_kv\": 64,\n",
" \"d_model\": 512,\n",
" \"decoder_start_token_id\": 0,\n",
" \"dense_act_fn\": \"gelu_new\",\n",
" \"dropout_rate\": 0.1,\n",
" \"eos_token_id\": 1,\n",
" \"feed_forward_proj\": \"gated-gelu\",\n",
" \"initializer_factor\": 1.0,\n",
" \"is_encoder_decoder\": true,\n",
" \"is_gated_act\": true,\n",
" \"layer_norm_epsilon\": 1e-06,\n",
" \"model_type\": \"t5\",\n",
" \"num_decoder_layers\": 8,\n",
" \"num_heads\": 6,\n",
" \"num_layers\": 8,\n",
" \"output_past\": true,\n",
" \"pad_token_id\": 0,\n",
" \"relative_attention_max_distance\": 128,\n",
" \"relative_attention_num_buckets\": 32,\n",
" \"tie_word_embeddings\": false,\n",
" \"transformers_version\": \"4.26.1\",\n",
" \"use_cache\": true,\n",
" \"vocab_size\": 32128\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:660] 2023-02-12 13:55:37,778 >> loading configuration file config.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/config.json\n",
"[INFO|configuration_utils.py:712] 2023-02-12 13:55:37,779 >> Model config T5Config {\n",
" \"_name_or_path\": \"google/t5-v1_1-small\",\n",
" \"architectures\": [\n",
" \"T5ForConditionalGeneration\"\n",
" ],\n",
" \"d_ff\": 1024,\n",
" \"d_kv\": 64,\n",
" \"d_model\": 512,\n",
" \"decoder_start_token_id\": 0,\n",
" \"dense_act_fn\": \"gelu_new\",\n",
" \"dropout_rate\": 0.1,\n",
" \"eos_token_id\": 1,\n",
" \"feed_forward_proj\": \"gated-gelu\",\n",
" \"initializer_factor\": 1.0,\n",
" \"is_encoder_decoder\": true,\n",
" \"is_gated_act\": true,\n",
" \"layer_norm_epsilon\": 1e-06,\n",
" \"model_type\": \"t5\",\n",
" \"num_decoder_layers\": 8,\n",
" \"num_heads\": 6,\n",
" \"num_layers\": 8,\n",
" \"output_past\": true,\n",
" \"pad_token_id\": 0,\n",
" \"relative_attention_max_distance\": 128,\n",
" \"relative_attention_num_buckets\": 32,\n",
" \"tie_word_embeddings\": false,\n",
" \"transformers_version\": \"4.26.1\",\n",
" \"use_cache\": true,\n",
" \"vocab_size\": 32128\n",
"}\n",
"\n",
"[INFO|tokenization_utils_base.py:1802] 2023-02-12 13:55:37,780 >> loading file spiece.model from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/spiece.model\n",
"[INFO|tokenization_utils_base.py:1802] 2023-02-12 13:55:37,780 >> loading file tokenizer.json from cache at None\n",
"[INFO|tokenization_utils_base.py:1802] 2023-02-12 13:55:37,780 >> loading file added_tokens.json from cache at None\n",
"[INFO|tokenization_utils_base.py:1802] 2023-02-12 13:55:37,780 >> loading file special_tokens_map.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/special_tokens_map.json\n",
"[INFO|tokenization_utils_base.py:1802] 2023-02-12 13:55:37,780 >> loading file tokenizer_config.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/tokenizer_config.json\n",
"[INFO|configuration_utils.py:660] 2023-02-12 13:55:37,781 >> loading configuration file config.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/config.json\n",
"[INFO|configuration_utils.py:712] 2023-02-12 13:55:37,782 >> Model config T5Config {\n",
" \"_name_or_path\": \"google/t5-v1_1-small\",\n",
" \"architectures\": [\n",
" \"T5ForConditionalGeneration\"\n",
" ],\n",
" \"d_ff\": 1024,\n",
" \"d_kv\": 64,\n",
" \"d_model\": 512,\n",
" \"decoder_start_token_id\": 0,\n",
" \"dense_act_fn\": \"gelu_new\",\n",
" \"dropout_rate\": 0.1,\n",
" \"eos_token_id\": 1,\n",
" \"feed_forward_proj\": \"gated-gelu\",\n",
" \"initializer_factor\": 1.0,\n",
" \"is_encoder_decoder\": true,\n",
" \"is_gated_act\": true,\n",
" \"layer_norm_epsilon\": 1e-06,\n",
" \"model_type\": \"t5\",\n",
" \"num_decoder_layers\": 8,\n",
" \"num_heads\": 6,\n",
" \"num_layers\": 8,\n",
" \"output_past\": true,\n",
" \"pad_token_id\": 0,\n",
" \"relative_attention_max_distance\": 128,\n",
" \"relative_attention_num_buckets\": 32,\n",
" \"tie_word_embeddings\": false,\n",
" \"transformers_version\": \"4.26.1\",\n",
" \"use_cache\": true,\n",
" \"vocab_size\": 32128\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:660] 2023-02-12 13:55:37,902 >> loading configuration file config.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/config.json\n",
"[INFO|configuration_utils.py:712] 2023-02-12 13:55:37,903 >> Model config T5Config {\n",
" \"_name_or_path\": \"google/t5-v1_1-small\",\n",
" \"architectures\": [\n",
" \"T5ForConditionalGeneration\"\n",
" ],\n",
" \"d_ff\": 1024,\n",
" \"d_kv\": 64,\n",
" \"d_model\": 512,\n",
" \"decoder_start_token_id\": 0,\n",
" \"dense_act_fn\": \"gelu_new\",\n",
" \"dropout_rate\": 0.1,\n",
" \"eos_token_id\": 1,\n",
" \"feed_forward_proj\": \"gated-gelu\",\n",
" \"initializer_factor\": 1.0,\n",
" \"is_encoder_decoder\": true,\n",
" \"is_gated_act\": true,\n",
" \"layer_norm_epsilon\": 1e-06,\n",
" \"model_type\": \"t5\",\n",
" \"num_decoder_layers\": 8,\n",
" \"num_heads\": 6,\n",
" \"num_layers\": 8,\n",
" \"output_past\": true,\n",
" \"pad_token_id\": 0,\n",
" \"relative_attention_max_distance\": 128,\n",
" \"relative_attention_num_buckets\": 32,\n",
" \"tie_word_embeddings\": false,\n",
" \"transformers_version\": \"4.26.1\",\n",
" \"use_cache\": true,\n",
" \"vocab_size\": 32128\n",
"}\n",
"\n",
"[INFO|modeling_utils.py:2275] 2023-02-12 13:55:38,089 >> loading weights file pytorch_model.bin from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/pytorch_model.bin\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:55:38,395 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|modeling_utils.py:2857] 2023-02-12 13:55:39,453 >> All model checkpoint weights were used when initializing T5ForConditionalGeneration.\n",
"\n",
"[INFO|modeling_utils.py:2865] 2023-02-12 13:55:39,453 >> All the weights of T5ForConditionalGeneration were initialized from the model checkpoint at google/t5-v1_1-small.\n",
"If your task is similar to the task the model of the checkpoint was trained on, you can already use T5ForConditionalGeneration for predictions without further training.\n",
"[INFO|configuration_utils.py:507] 2023-02-12 13:55:39,810 >> loading configuration file generation_config.json from cache at .cache_training/models--google--t5-v1_1-small/snapshots/8a88af75516269158a3aa488d1abdfd3d5e4ee49/generation_config.json\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:55:39,810 >> Generate config GenerationConfig {\n",
" \"_from_model_config\": true,\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"INFO:__main__:Freezing encoder weights\n",
"INFO:__main__:Freezing layer 1\n",
"INFO:__main__:Freezing layer 2\n",
"INFO:__main__:Freezing layer 3\n",
"INFO:__main__:Freezing layer 4\n",
"INFO:__main__:Freezing layer 5\n",
"INFO:__main__:Freezing layer 6\n",
"INFO:__main__:Freezing layer 7\n",
"INFO:__main__:Freezing layer 8\n",
"INFO:__main__:Freezing layer 9\n",
"INFO:__main__:Freezing layer 10\n",
"INFO:__main__:Freezing layer 11\n",
"INFO:__main__:Freezing layer 12\n",
"INFO:__main__:Freezing layer 13\n",
"INFO:__main__:Freezing layer 14\n",
"INFO:__main__:Freezing layer 15\n",
"INFO:__main__:Freezing layer 16\n",
"INFO:__main__:Freezing layer 17\n",
"INFO:__main__:Freezing layer 18\n",
"INFO:__main__:Freezing layer 19\n",
"INFO:__main__:Freezing layer 20\n",
"INFO:__main__:Ignoring layer 21\n",
"INFO:__main__:Ignoring layer 22\n",
"INFO:__main__:Ignoring layer 23\n",
"INFO:__main__:Ignoring layer 24\n",
"INFO:__main__:Ignoring layer 25\n",
"INFO:__main__:Ignoring layer 26\n",
"INFO:__main__:Ignoring layer 27\n",
"INFO:__main__:Ignoring layer 28\n",
"INFO:__main__:Ignoring layer 29\n",
"INFO:__main__:Ignoring layer 30\n",
"INFO:__main__:Ignoring layer 31\n",
"INFO:__main__:Ignoring layer 32\n",
"INFO:__main__:Ignoring layer 33\n",
"INFO:__main__:Ignoring layer 34\n",
"INFO:__main__:Ignoring layer 35\n",
"INFO:__main__:Ignoring layer 36\n",
"INFO:__main__:Ignoring layer 37\n",
"INFO:__main__:Ignoring layer 38\n",
"INFO:__main__:Ignoring layer 39\n",
"INFO:__main__:Ignoring layer 40\n",
"INFO:__main__:Ignoring layer 41\n",
"INFO:__main__:Ignoring layer 42\n",
"INFO:__main__:Ignoring layer 43\n",
"INFO:__main__:Ignoring layer 44\n",
"INFO:__main__:Ignoring layer 45\n",
"INFO:__main__:Ignoring layer 46\n",
"INFO:__main__:Ignoring layer 47\n",
"INFO:__main__:Ignoring layer 48\n",
"INFO:__main__:Ignoring layer 49\n",
"INFO:__main__:Ignoring layer 50\n",
"INFO:__main__:Ignoring layer 51\n",
"INFO:__main__:Ignoring layer 52\n",
"INFO:__main__:Ignoring layer 53\n",
"INFO:__main__:Ignoring layer 54\n",
"INFO:__main__:Ignoring layer 55\n",
"INFO:__main__:Ignoring layer 56\n",
"INFO:__main__:Ignoring layer 57\n",
"INFO:__main__:Ignoring layer 58\n",
"INFO:__main__:Ignoring layer 59\n",
"INFO:__main__:Ignoring layer 60\n",
"INFO:__main__:Ignoring layer 61\n",
"INFO:__main__:Ignoring layer 62\n",
"INFO:__main__:Ignoring layer 63\n",
"INFO:__main__:Ignoring layer 64\n",
"INFO:__main__:Ignoring layer 65\n",
"INFO:__main__:Ignoring layer 66\n",
"INFO:__main__:Ignoring layer 67\n",
"INFO:__main__:Ignoring layer 68\n",
"INFO:__main__:Ignoring layer 69\n",
"INFO:__main__:Ignoring layer 70\n",
"INFO:__main__:Ignoring layer 71\n",
"INFO:__main__:Ignoring layer 72\n",
"INFO:__main__:Ignoring layer 73\n",
"INFO:__main__:Ignoring layer 74\n",
"INFO:__main__:Ignoring layer 75\n",
"INFO:__main__:Using translation prefix: \"emotion classification: \"\n",
"Running tokenizer on train dataset: 0% 0/16 [00:00<?, ?ba/s]INFO:datasets.arrow_dataset:Caching processed dataset at /content/.cache_training/json/default-1b7ebb62c359adc9/0.0.0/0f7e3662623656454fcd2b650f34e886a7db4b9104504885bd462096cc7a9f51/cache-001f4849d453c414.arrow\n",
"Running tokenizer on train dataset: 100% 16/16 [00:01<00:00, 10.96ba/s]\n",
"Running tokenizer on validation dataset: 0% 0/2 [00:00<?, ?ba/s]INFO:datasets.arrow_dataset:Caching processed dataset at /content/.cache_training/json/default-1b7ebb62c359adc9/0.0.0/0f7e3662623656454fcd2b650f34e886a7db4b9104504885bd462096cc7a9f51/cache-c8a02691272c8316.arrow\n",
"Running tokenizer on validation dataset: 100% 2/2 [00:00<00:00, 7.39ba/s]\n",
"Running tokenizer on prediction dataset: 0% 0/2 [00:00<?, ?ba/s]INFO:datasets.arrow_dataset:Caching processed dataset at /content/.cache_training/json/default-1b7ebb62c359adc9/0.0.0/0f7e3662623656454fcd2b650f34e886a7db4b9104504885bd462096cc7a9f51/cache-03f5349878e5462a.arrow\n",
"Running tokenizer on prediction dataset: 100% 2/2 [00:00<00:00, 7.24ba/s]\n",
"/usr/local/lib/python3.8/dist-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n",
" warnings.warn(\n",
"[INFO|trainer.py:1650] 2023-02-12 13:55:46,378 >> ***** Running training *****\n",
"[INFO|trainer.py:1651] 2023-02-12 13:55:46,378 >> Num examples = 16000\n",
"[INFO|trainer.py:1652] 2023-02-12 13:55:46,378 >> Num Epochs = 1\n",
"[INFO|trainer.py:1653] 2023-02-12 13:55:46,378 >> Instantaneous batch size per device = 8\n",
"[INFO|trainer.py:1654] 2023-02-12 13:55:46,378 >> Total train batch size (w. parallel, distributed & accumulation) = 8\n",
"[INFO|trainer.py:1655] 2023-02-12 13:55:46,378 >> Gradient Accumulation steps = 1\n",
"[INFO|trainer.py:1656] 2023-02-12 13:55:46,378 >> Total optimization steps = 2000\n",
"[INFO|trainer.py:1657] 2023-02-12 13:55:46,379 >> Number of trainable parameters = 72211648\n",
" 0% 0/2000 [00:00<?, ?it/s][WARNING|logging.py:281] 2023-02-12 13:55:46,391 >> You're using a T5TokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.\n",
"{'loss': 10.2845, 'learning_rate': 3.7500000000000003e-05, 'epoch': 0.25}\n",
" 25% 500/2000 [00:58<03:25, 7.32it/s][INFO|trainer.py:2709] 2023-02-12 13:56:44,459 >> Saving model checkpoint to out/emotion/t5_v1_1/checkpoint-500\n",
"[INFO|configuration_utils.py:453] 2023-02-12 13:56:44,460 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-500/config.json\n",
"[INFO|configuration_utils.py:336] 2023-02-12 13:56:44,464 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-500/generation_config.json\n",
"[INFO|modeling_utils.py:1704] 2023-02-12 13:56:45,380 >> Model weights saved in out/emotion/t5_v1_1/checkpoint-500/pytorch_model.bin\n",
"[INFO|tokenization_utils_base.py:2160] 2023-02-12 13:56:45,381 >> tokenizer config file saved in out/emotion/t5_v1_1/checkpoint-500/tokenizer_config.json\n",
"[INFO|tokenization_utils_base.py:2167] 2023-02-12 13:56:45,382 >> Special tokens file saved in out/emotion/t5_v1_1/checkpoint-500/special_tokens_map.json\n",
"[INFO|tokenization_t5_fast.py:186] 2023-02-12 13:56:45,445 >> Copy vocab file to out/emotion/t5_v1_1/checkpoint-500/spiece.model\n",
"{'loss': 1.4701, 'learning_rate': 2.5e-05, 'epoch': 0.5}\n",
" 50% 1000/2000 [01:58<01:41, 9.83it/s][INFO|trainer.py:2709] 2023-02-12 13:57:44,710 >> Saving model checkpoint to out/emotion/t5_v1_1/checkpoint-1000\n",
"[INFO|configuration_utils.py:453] 2023-02-12 13:57:44,711 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-1000/config.json\n",
"[INFO|configuration_utils.py:336] 2023-02-12 13:57:44,713 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-1000/generation_config.json\n",
"[INFO|modeling_utils.py:1704] 2023-02-12 13:57:45,458 >> Model weights saved in out/emotion/t5_v1_1/checkpoint-1000/pytorch_model.bin\n",
"[INFO|tokenization_utils_base.py:2160] 2023-02-12 13:57:45,459 >> tokenizer config file saved in out/emotion/t5_v1_1/checkpoint-1000/tokenizer_config.json\n",
"[INFO|tokenization_utils_base.py:2167] 2023-02-12 13:57:45,459 >> Special tokens file saved in out/emotion/t5_v1_1/checkpoint-1000/special_tokens_map.json\n",
"[INFO|tokenization_t5_fast.py:186] 2023-02-12 13:57:45,499 >> Copy vocab file to out/emotion/t5_v1_1/checkpoint-1000/spiece.model\n",
"{'loss': 1.042, 'learning_rate': 1.25e-05, 'epoch': 0.75}\n",
" 75% 1500/2000 [02:59<00:49, 10.12it/s][INFO|trainer.py:2709] 2023-02-12 13:58:45,603 >> Saving model checkpoint to out/emotion/t5_v1_1/checkpoint-1500\n",
"[INFO|configuration_utils.py:453] 2023-02-12 13:58:45,604 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-1500/config.json\n",
"[INFO|configuration_utils.py:336] 2023-02-12 13:58:45,606 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-1500/generation_config.json\n",
"[INFO|modeling_utils.py:1704] 2023-02-12 13:58:46,382 >> Model weights saved in out/emotion/t5_v1_1/checkpoint-1500/pytorch_model.bin\n",
"[INFO|tokenization_utils_base.py:2160] 2023-02-12 13:58:46,383 >> tokenizer config file saved in out/emotion/t5_v1_1/checkpoint-1500/tokenizer_config.json\n",
"[INFO|tokenization_utils_base.py:2167] 2023-02-12 13:58:46,383 >> Special tokens file saved in out/emotion/t5_v1_1/checkpoint-1500/special_tokens_map.json\n",
"[INFO|tokenization_t5_fast.py:186] 2023-02-12 13:58:46,429 >> Copy vocab file to out/emotion/t5_v1_1/checkpoint-1500/spiece.model\n",
"{'loss': 0.9641, 'learning_rate': 0.0, 'epoch': 1.0}\n",
"100% 2000/2000 [03:59<00:00, 10.28it/s][INFO|trainer.py:2709] 2023-02-12 13:59:46,291 >> Saving model checkpoint to out/emotion/t5_v1_1/checkpoint-2000\n",
"[INFO|configuration_utils.py:453] 2023-02-12 13:59:46,292 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-2000/config.json\n",
"[INFO|configuration_utils.py:336] 2023-02-12 13:59:46,294 >> Configuration saved in out/emotion/t5_v1_1/checkpoint-2000/generation_config.json\n",
"[INFO|modeling_utils.py:1704] 2023-02-12 13:59:47,067 >> Model weights saved in out/emotion/t5_v1_1/checkpoint-2000/pytorch_model.bin\n",
"[INFO|tokenization_utils_base.py:2160] 2023-02-12 13:59:47,068 >> tokenizer config file saved in out/emotion/t5_v1_1/checkpoint-2000/tokenizer_config.json\n",
"[INFO|tokenization_utils_base.py:2167] 2023-02-12 13:59:47,068 >> Special tokens file saved in out/emotion/t5_v1_1/checkpoint-2000/special_tokens_map.json\n",
"[INFO|tokenization_t5_fast.py:186] 2023-02-12 13:59:47,112 >> Copy vocab file to out/emotion/t5_v1_1/checkpoint-2000/spiece.model\n",
"[INFO|trainer.py:1901] 2023-02-12 13:59:49,360 >> \n",
"\n",
"Training completed. Do not forget to share your model on huggingface.co/models =)\n",
"\n",
"\n",
"{'train_runtime': 242.9807, 'train_samples_per_second': 65.849, 'train_steps_per_second': 8.231, 'train_loss': 3.4401537475585937, 'epoch': 1.0}\n",
"100% 2000/2000 [04:02<00:00, 8.23it/s]\n",
"[INFO|trainer.py:2709] 2023-02-12 13:59:49,362 >> Saving model checkpoint to out/emotion/t5_v1_1\n",
"[INFO|configuration_utils.py:453] 2023-02-12 13:59:49,363 >> Configuration saved in out/emotion/t5_v1_1/config.json\n",
"[INFO|configuration_utils.py:336] 2023-02-12 13:59:49,366 >> Configuration saved in out/emotion/t5_v1_1/generation_config.json\n",
"[INFO|modeling_utils.py:1704] 2023-02-12 13:59:50,576 >> Model weights saved in out/emotion/t5_v1_1/pytorch_model.bin\n",
"[INFO|tokenization_utils_base.py:2160] 2023-02-12 13:59:50,577 >> tokenizer config file saved in out/emotion/t5_v1_1/tokenizer_config.json\n",
"[INFO|tokenization_utils_base.py:2167] 2023-02-12 13:59:50,577 >> Special tokens file saved in out/emotion/t5_v1_1/special_tokens_map.json\n",
"[INFO|tokenization_t5_fast.py:186] 2023-02-12 13:59:50,620 >> Copy vocab file to out/emotion/t5_v1_1/spiece.model\n",
"***** train metrics *****\n",
" epoch = 1.0\n",
" train_loss = 3.4402\n",
" train_runtime = 0:04:02.98\n",
" train_samples = 16000\n",
" train_samples_per_second = 65.849\n",
" train_steps_per_second = 8.231\n",
"INFO:__main__:*** Evaluate ***\n",
"[INFO|trainer.py:2964] 2023-02-12 13:59:50,792 >> ***** Running Evaluation *****\n",
"[INFO|trainer.py:2966] 2023-02-12 13:59:50,792 >> Num examples = 2000\n",
"[INFO|trainer.py:2969] 2023-02-12 13:59:50,792 >> Batch size = 8\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:50,797 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 0% 0/250 [00:00<?, ?it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:50,893 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:50,972 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 1% 3/250 [00:00<00:12, 19.97it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:51,043 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:51,121 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 2% 5/250 [00:00<00:15, 16.17it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:51,191 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:51,256 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 3% 7/250 [00:00<00:15, 15.92it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:51,320 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:51,384 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 4% 9/250 [00:00<00:15, 15.25it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:51,462 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:51,569 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 4% 11/250 [00:00<00:18, 12.66it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:51,671 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:51,767 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 5% 13/250 [00:00<00:20, 11.82it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:51,864 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:51,957 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 6% 15/250 [00:01<00:20, 11.50it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:52,047 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:52,135 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 7% 17/250 [00:01<00:20, 11.49it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:52,221 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:52,308 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 8% 19/250 [00:01<00:20, 11.44it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:52,398 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:52,493 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 8% 21/250 [00:01<00:20, 11.22it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:52,584 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:52,671 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 9% 23/250 [00:01<00:20, 11.28it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:52,759 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:52,851 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 10% 25/250 [00:02<00:20, 11.17it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:52,943 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:53,030 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 11% 27/250 [00:02<00:20, 11.11it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:53,124 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:53,217 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 12% 29/250 [00:02<00:20, 10.81it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:53,321 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:53,412 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 12% 31/250 [00:02<00:20, 10.94it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:53,499 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:53,596 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 13% 33/250 [00:02<00:19, 10.90it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:53,684 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:53,772 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 14% 35/250 [00:02<00:19, 10.79it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:53,873 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:53,973 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 15% 37/250 [00:03<00:20, 10.57it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:54,072 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:54,179 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 16% 39/250 [00:03<00:20, 10.43it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:54,269 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:54,358 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 16% 41/250 [00:03<00:19, 10.53it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:54,454 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:54,549 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 17% 43/250 [00:03<00:19, 10.63it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:54,639 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:54,728 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 18% 45/250 [00:03<00:19, 10.53it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:54,834 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:54,930 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 19% 47/250 [00:04<00:19, 10.40it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,031 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:55,131 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 20% 49/250 [00:04<00:19, 10.35it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,226 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:55,325 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 20% 51/250 [00:04<00:19, 10.01it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,442 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:55,551 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 21% 53/250 [00:04<00:20, 9.76it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,658 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 22% 54/250 [00:04<00:20, 9.71it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,764 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 22% 55/250 [00:04<00:20, 9.48it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,879 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 22% 56/250 [00:05<00:20, 9.36it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:55,992 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 23% 57/250 [00:05<00:20, 9.30it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,101 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 23% 58/250 [00:05<00:20, 9.36it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,205 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 24% 59/250 [00:05<00:20, 9.44it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,309 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 24% 60/250 [00:05<00:20, 9.22it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,424 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 24% 61/250 [00:05<00:21, 8.89it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,547 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 25% 62/250 [00:05<00:21, 8.60it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,672 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 25% 63/250 [00:05<00:21, 8.82it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,778 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 26% 64/250 [00:05<00:20, 8.92it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,887 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 26% 65/250 [00:06<00:20, 9.05it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:56,994 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 26% 66/250 [00:06<00:20, 9.04it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,105 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 27% 67/250 [00:06<00:20, 9.02it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,217 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:57,294 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 28% 69/250 [00:06<00:16, 10.65it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,362 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:57,427 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 28% 71/250 [00:06<00:14, 12.11it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,493 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:57,567 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 29% 73/250 [00:06<00:14, 12.64it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,639 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:57,707 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 30% 75/250 [00:06<00:13, 13.28it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,775 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:57,844 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 31% 77/250 [00:07<00:12, 13.56it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:57,916 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:57,989 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 32% 79/250 [00:07<00:12, 13.46it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,067 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,134 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 32% 81/250 [00:07<00:12, 13.65it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,209 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,281 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 33% 83/250 [00:07<00:12, 13.88it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,348 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,412 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 34% 85/250 [00:07<00:11, 13.95it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,490 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,558 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 35% 87/250 [00:07<00:11, 14.01it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,631 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,698 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 36% 89/250 [00:07<00:11, 14.03it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,773 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,844 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 36% 91/250 [00:08<00:11, 14.04it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:58,917 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:58,987 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 37% 93/250 [00:08<00:11, 14.07it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,058 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,137 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 38% 95/250 [00:08<00:11, 13.94it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,203 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,269 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 39% 97/250 [00:08<00:10, 14.18it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,339 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,413 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 40% 99/250 [00:08<00:10, 14.27it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,476 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,542 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 40% 101/250 [00:08<00:10, 14.50it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,609 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,682 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 41% 103/250 [00:08<00:10, 14.18it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,758 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,831 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 42% 105/250 [00:09<00:10, 14.06it/s][INFO|configuration_utils.py:543] 2023-02-12 13:59:59,903 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 13:59:59,980 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 43% 107/250 [00:09<00:10, 14.11it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,043 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,121 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 44% 109/250 [00:09<00:10, 13.93it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,191 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,255 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 44% 111/250 [00:09<00:09, 14.13it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,328 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,400 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 45% 113/250 [00:09<00:09, 14.07it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,472 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,537 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 46% 115/250 [00:09<00:09, 14.11it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,612 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,676 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 47% 117/250 [00:09<00:09, 14.51it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,741 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,808 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 48% 119/250 [00:09<00:08, 14.59it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:00,878 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:00,945 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 48% 121/250 [00:10<00:08, 14.41it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,019 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,089 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 49% 123/250 [00:10<00:08, 14.22it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,164 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,243 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 50% 125/250 [00:10<00:08, 14.02it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,311 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,380 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 51% 127/250 [00:10<00:08, 14.08it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,452 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,534 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 52% 129/250 [00:10<00:08, 13.78it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,605 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,674 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 52% 131/250 [00:10<00:08, 13.96it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,744 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,827 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 53% 133/250 [00:11<00:08, 13.51it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:01,907 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:01,986 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 54% 135/250 [00:11<00:08, 13.36it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,056 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,135 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 55% 137/250 [00:11<00:08, 13.15it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,214 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,280 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 56% 139/250 [00:11<00:08, 13.61it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,348 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,422 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 56% 141/250 [00:11<00:07, 13.80it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,489 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,558 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 57% 143/250 [00:11<00:07, 13.98it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,627 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,704 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 58% 145/250 [00:11<00:07, 13.91it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,773 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,842 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 59% 147/250 [00:12<00:07, 14.03it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:02,914 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:02,990 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 60% 149/250 [00:12<00:07, 13.87it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,061 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,132 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 60% 151/250 [00:12<00:07, 13.80it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,207 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,274 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 61% 153/250 [00:12<00:06, 14.01it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,345 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,410 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 62% 155/250 [00:12<00:06, 14.30it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,478 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,542 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 63% 157/250 [00:12<00:06, 14.27it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,619 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,689 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 64% 159/250 [00:12<00:06, 14.25it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,760 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,829 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 64% 161/250 [00:13<00:06, 14.15it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:03,905 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:03,971 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 65% 163/250 [00:13<00:06, 14.45it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,035 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,103 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 66% 165/250 [00:13<00:05, 14.62it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,168 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,251 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 67% 167/250 [00:13<00:05, 14.29it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,315 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,378 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 68% 169/250 [00:13<00:05, 14.70it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,442 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,514 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 68% 171/250 [00:13<00:05, 14.58it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,582 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,649 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 69% 173/250 [00:13<00:05, 14.41it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,725 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,791 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 70% 175/250 [00:13<00:05, 14.39it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:04,864 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:04,931 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 71% 177/250 [00:14<00:05, 14.25it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,008 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,081 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 72% 179/250 [00:14<00:04, 14.31it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,146 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,216 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 72% 181/250 [00:14<00:04, 14.12it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,295 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,372 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 73% 183/250 [00:14<00:04, 14.06it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,436 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,503 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 74% 185/250 [00:14<00:04, 14.22it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,573 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,659 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 75% 187/250 [00:14<00:04, 13.69it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,731 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,801 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 76% 189/250 [00:14<00:04, 13.79it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:05,874 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:05,953 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 76% 191/250 [00:15<00:04, 13.66it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,023 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,097 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 77% 193/250 [00:15<00:04, 13.48it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,177 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,254 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 78% 195/250 [00:15<00:04, 13.37it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,329 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,394 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 79% 197/250 [00:15<00:03, 13.83it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,462 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,542 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 80% 199/250 [00:15<00:03, 13.75it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,610 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,678 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 80% 201/250 [00:15<00:03, 13.96it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,747 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,821 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 81% 203/250 [00:15<00:03, 13.95it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:06,891 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:06,955 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 82% 205/250 [00:16<00:03, 14.40it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:07,019 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:07,085 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 83% 207/250 [00:16<00:02, 14.41it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:07,158 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:07,227 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 84% 209/250 [00:16<00:03, 13.53it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:07,330 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:07,429 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 84% 211/250 [00:16<00:03, 12.27it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:07,527 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:07,621 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 85% 213/250 [00:16<00:03, 11.42it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:07,735 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:07,838 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 86% 215/250 [00:17<00:03, 10.97it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:07,930 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:08,036 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 87% 217/250 [00:17<00:03, 10.59it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:08,134 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:08,227 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 88% 219/250 [00:17<00:02, 10.43it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:08,333 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:08,432 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 88% 221/250 [00:17<00:02, 10.52it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:08,519 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:08,619 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 89% 223/250 [00:17<00:02, 10.30it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:08,722 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:08,815 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 90% 225/250 [00:18<00:02, 10.29it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:08,918 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:09,025 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 91% 227/250 [00:18<00:02, 10.05it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:09,127 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:09,230 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 92% 229/250 [00:18<00:02, 9.87it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:09,338 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 92% 230/250 [00:18<00:02, 9.82it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:09,443 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 92% 231/250 [00:18<00:01, 9.73it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:09,550 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:09,645 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 93% 233/250 [00:18<00:01, 10.05it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:09,736 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:09,835 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 94% 235/250 [00:19<00:01, 10.13it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:09,931 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:10,033 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 95% 237/250 [00:19<00:01, 10.08it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:10,134 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:10,231 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 96% 239/250 [00:19<00:01, 10.24it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:10,320 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:10,410 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 96% 241/250 [00:19<00:00, 10.06it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:10,527 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:10,622 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 97% 243/250 [00:19<00:00, 10.24it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:10,715 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:10,825 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 98% 245/250 [00:20<00:00, 10.13it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:10,917 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:11,014 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 99% 247/250 [00:20<00:00, 10.22it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:11,108 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:11,208 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"100% 249/250 [00:20<00:00, 10.06it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:11,314 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"100% 250/250 [00:21<00:00, 11.88it/s]\n",
"***** eval metrics *****\n",
" epoch = 1.0\n",
" eval_accuracy = 1.0\n",
" eval_bleu = 0.0\n",
" eval_gen_len = 2.0\n",
" eval_loss = 0.5495\n",
" eval_runtime = 0:00:21.13\n",
" eval_samples = 2000\n",
" eval_samples_per_second = 94.642\n",
" eval_steps_per_second = 11.83\n",
"INFO:__main__:*** Predict ***\n",
"[INFO|trainer.py:2964] 2023-02-12 14:00:11,930 >> ***** Running Prediction *****\n",
"[INFO|trainer.py:2966] 2023-02-12 14:00:11,930 >> Num examples = 2000\n",
"[INFO|trainer.py:2969] 2023-02-12 14:00:11,930 >> Batch size = 8\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:11,940 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 0% 0/250 [00:00<?, ?it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:12,048 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 1% 2/250 [00:00<00:13, 18.68it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:12,155 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:12,259 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 2% 4/250 [00:00<00:20, 11.93it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:12,365 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:12,470 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 2% 6/250 [00:00<00:23, 10.52it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:12,584 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:12,694 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 3% 8/250 [00:00<00:25, 9.66it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:12,816 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:12,926 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 4% 10/250 [00:00<00:25, 9.42it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,035 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,103 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 5% 12/250 [00:01<00:22, 10.71it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,172 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,245 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 6% 14/250 [00:01<00:20, 11.56it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,317 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,381 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 6% 16/250 [00:01<00:18, 12.55it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,447 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,513 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 7% 18/250 [00:01<00:18, 12.71it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,600 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,670 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 8% 20/250 [00:01<00:17, 13.21it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,738 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,806 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 9% 22/250 [00:01<00:17, 13.28it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:13,887 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:13,955 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 10% 24/250 [00:01<00:16, 13.68it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,023 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,090 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 10% 26/250 [00:02<00:16, 13.82it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,164 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,234 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 11% 28/250 [00:02<00:15, 14.00it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,302 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,372 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 12% 30/250 [00:02<00:15, 14.03it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,446 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,515 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 13% 32/250 [00:02<00:15, 14.18it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,582 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,655 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 14% 34/250 [00:02<00:15, 14.21it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,722 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,802 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 14% 36/250 [00:02<00:15, 13.86it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:14,875 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:14,944 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 15% 38/250 [00:02<00:15, 14.05it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,013 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,088 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 16% 40/250 [00:03<00:14, 14.09it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,153 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,217 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 17% 42/250 [00:03<00:14, 14.49it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,282 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,358 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 18% 44/250 [00:03<00:14, 14.25it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,428 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,490 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 18% 46/250 [00:03<00:13, 14.68it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,554 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,622 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 19% 48/250 [00:03<00:14, 14.39it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,700 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,773 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 20% 50/250 [00:03<00:14, 14.28it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,842 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:15,905 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 21% 52/250 [00:03<00:13, 14.33it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:15,981 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,048 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 22% 54/250 [00:04<00:13, 14.39it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,119 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,186 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 22% 56/250 [00:04<00:13, 14.46it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,257 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,328 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 23% 58/250 [00:04<00:13, 14.12it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,404 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,471 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 24% 60/250 [00:04<00:13, 14.39it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,537 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,609 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 25% 62/250 [00:04<00:13, 14.11it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,686 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,753 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 26% 64/250 [00:04<00:13, 13.96it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,833 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:16,909 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 26% 66/250 [00:04<00:13, 14.05it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:16,972 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,037 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 27% 68/250 [00:05<00:12, 14.44it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,102 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,178 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 28% 70/250 [00:05<00:12, 14.31it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,245 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,309 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 29% 72/250 [00:05<00:12, 14.64it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,374 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,440 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 30% 74/250 [00:05<00:12, 14.52it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,515 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,583 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 30% 76/250 [00:05<00:11, 14.60it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,650 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,721 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 31% 78/250 [00:05<00:12, 14.28it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,798 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,865 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 32% 80/250 [00:05<00:11, 14.46it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:17,931 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:17,997 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 33% 82/250 [00:06<00:11, 14.56it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,068 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,142 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 34% 84/250 [00:06<00:11, 14.39it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,209 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,274 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 34% 86/250 [00:06<00:11, 14.60it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,341 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,418 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 35% 88/250 [00:06<00:11, 14.21it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,491 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,559 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 36% 90/250 [00:06<00:11, 14.40it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,626 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,704 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 37% 92/250 [00:06<00:11, 14.01it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,778 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,853 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 38% 94/250 [00:06<00:11, 14.01it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:18,920 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:18,998 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 38% 96/250 [00:07<00:11, 13.93it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,066 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,134 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 39% 98/250 [00:07<00:10, 14.12it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,203 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,277 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 40% 100/250 [00:07<00:10, 14.03it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,348 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,413 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 41% 102/250 [00:07<00:10, 14.17it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,487 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,562 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 42% 104/250 [00:07<00:10, 14.10it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,629 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,698 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 42% 106/250 [00:07<00:10, 13.92it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,778 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,855 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 43% 108/250 [00:07<00:10, 13.88it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:19,922 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:19,988 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 44% 110/250 [00:08<00:09, 14.15it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,056 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:20,128 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 45% 112/250 [00:08<00:09, 14.24it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,195 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:20,263 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 46% 114/250 [00:08<00:09, 14.27it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,334 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:20,411 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 46% 116/250 [00:08<00:09, 14.15it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,478 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:20,550 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 47% 118/250 [00:08<00:09, 14.08it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,623 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:20,709 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 48% 120/250 [00:08<00:09, 13.31it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,792 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:20,864 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 49% 122/250 [00:08<00:09, 13.47it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:20,936 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,025 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 50% 124/250 [00:09<00:09, 13.24it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,093 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,161 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 50% 126/250 [00:09<00:09, 13.59it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,231 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,314 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 51% 128/250 [00:09<00:08, 13.62it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,377 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,439 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 52% 130/250 [00:09<00:08, 14.27it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,502 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,564 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 53% 132/250 [00:09<00:08, 14.45it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,636 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,701 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 54% 134/250 [00:09<00:07, 14.59it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,772 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,843 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 54% 136/250 [00:09<00:07, 14.29it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:21,917 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:21,981 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 55% 138/250 [00:09<00:07, 14.71it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,043 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,110 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 56% 140/250 [00:10<00:07, 14.87it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,176 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,245 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 57% 142/250 [00:10<00:07, 14.95it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,307 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,373 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 58% 144/250 [00:10<00:07, 14.83it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,444 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,517 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 58% 146/250 [00:10<00:07, 14.69it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,583 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,651 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 59% 148/250 [00:10<00:06, 14.79it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,716 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,795 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 60% 150/250 [00:10<00:07, 13.82it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:22,883 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:22,946 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 61% 152/250 [00:10<00:06, 14.26it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:23,013 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:23,117 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 62% 154/250 [00:11<00:07, 12.66it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:23,216 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:23,315 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 62% 156/250 [00:11<00:08, 11.45it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:23,430 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:23,539 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 63% 158/250 [00:11<00:08, 10.91it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:23,632 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:23,742 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 64% 160/250 [00:11<00:08, 10.49it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:23,842 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:23,944 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 65% 162/250 [00:12<00:08, 10.09it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:24,055 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:24,145 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 66% 164/250 [00:12<00:08, 10.13it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:24,251 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:24,342 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 66% 166/250 [00:12<00:08, 10.44it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:24,429 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:24,548 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 67% 168/250 [00:12<00:08, 9.86it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:24,658 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:24,758 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 68% 170/250 [00:12<00:08, 9.64it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:24,877 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 68% 171/250 [00:12<00:08, 9.64it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:24,981 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:25,074 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 69% 173/250 [00:13<00:07, 9.90it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:25,172 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:25,265 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 70% 175/250 [00:13<00:07, 10.17it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:25,357 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:25,452 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 71% 177/250 [00:13<00:07, 10.37it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:25,542 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:25,632 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 72% 179/250 [00:13<00:06, 10.46it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:25,729 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:25,835 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 72% 181/250 [00:13<00:06, 10.34it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:25,928 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:26,044 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 73% 183/250 [00:14<00:06, 10.03it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:26,141 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:26,232 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 74% 185/250 [00:14<00:06, 10.26it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:26,325 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:26,419 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 75% 187/250 [00:14<00:06, 10.35it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:26,515 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:26,606 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 76% 189/250 [00:14<00:05, 10.38it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:26,706 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:26,806 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 76% 191/250 [00:14<00:05, 10.04it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:26,921 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:27,015 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 77% 193/250 [00:15<00:05, 10.00it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,129 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:27,235 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 78% 195/250 [00:15<00:05, 9.70it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,342 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 78% 196/250 [00:15<00:05, 9.63it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,451 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 79% 197/250 [00:15<00:05, 9.53it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,560 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 79% 198/250 [00:15<00:05, 9.43it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,672 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 80% 199/250 [00:15<00:05, 9.42it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,776 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 80% 200/250 [00:15<00:05, 9.31it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:27,887 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 80% 201/250 [00:15<00:05, 9.15it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,005 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 81% 202/250 [00:16<00:05, 9.03it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,116 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 81% 203/250 [00:16<00:05, 9.13it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,222 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 82% 204/250 [00:16<00:05, 8.75it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,352 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 82% 205/250 [00:16<00:05, 8.54it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,472 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 82% 206/250 [00:16<00:05, 8.77it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,579 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 83% 207/250 [00:16<00:05, 8.45it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,708 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 83% 208/250 [00:16<00:04, 8.66it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,816 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 84% 209/250 [00:16<00:04, 8.63it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:28,934 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 84% 210/250 [00:16<00:04, 8.71it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,045 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,138 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 85% 212/250 [00:17<00:03, 10.00it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,211 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,286 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 86% 214/250 [00:17<00:03, 11.22it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,353 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,428 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 86% 216/250 [00:17<00:02, 12.02it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,499 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,566 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 87% 218/250 [00:17<00:02, 12.72it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,639 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,705 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 88% 220/250 [00:17<00:02, 13.10it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,782 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,854 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 89% 222/250 [00:17<00:02, 13.39it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:29,924 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:29,992 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 90% 224/250 [00:18<00:01, 13.49it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,070 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,137 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 90% 226/250 [00:18<00:01, 13.71it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,211 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,277 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 91% 228/250 [00:18<00:01, 13.85it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,353 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,420 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 92% 230/250 [00:18<00:01, 14.02it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,491 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,558 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 93% 232/250 [00:18<00:01, 14.23it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,628 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,696 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 94% 234/250 [00:18<00:01, 14.41it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,761 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,834 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 94% 236/250 [00:18<00:00, 14.29it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:30,904 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:30,977 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 95% 238/250 [00:18<00:00, 14.30it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:31,043 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:31,107 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 96% 240/250 [00:19<00:00, 14.62it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:31,175 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:31,257 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 97% 242/250 [00:19<00:00, 14.09it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:31,327 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:31,394 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 98% 244/250 [00:19<00:00, 14.49it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:31,455 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:31,523 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 98% 246/250 [00:19<00:00, 14.32it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:31,599 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:31,666 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
" 99% 248/250 [00:19<00:00, 14.58it/s][INFO|configuration_utils.py:543] 2023-02-12 14:00:31,730 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"[INFO|configuration_utils.py:543] 2023-02-12 14:00:31,796 >> Generate config GenerationConfig {\n",
" \"decoder_start_token_id\": 0,\n",
" \"eos_token_id\": 1,\n",
" \"pad_token_id\": 0,\n",
" \"transformers_version\": \"4.26.1\"\n",
"}\n",
"\n",
"100% 250/250 [00:19<00:00, 12.52it/s]\n",
"***** predict metrics *****\n",
" predict_accuracy = 1.0\n",
" predict_bleu = 0.0\n",
" predict_gen_len = 2.0\n",
" predict_loss = 0.5152\n",
" predict_runtime = 0:00:20.07\n",
" predict_samples = 2000\n",
" predict_samples_per_second = 99.604\n",
" predict_steps_per_second = 12.451\n",
"[INFO|modelcard.py:449] 2023-02-12 14:00:32,421 >> Dropping the following result as it does not have all the necessary fields:\n",
"{'task': {'name': 'Translation', 'type': 'translation'}, 'metrics': [{'name': 'Bleu', 'type': 'bleu', 'value': 0.0}, {'name': 'Accuracy', 'type': 'accuracy', 'value': 1.0}]}\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# Save model"
],
"metadata": {
"id": "f5MgGoTB4F43"
}
},
{
"cell_type": "code",
"source": [
"drive.mount('/content/drive')\n",
"!cp -r /content/out /content/drive/MyDrive/models"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "M1Wj7R0k38fL",
"outputId": "12944458-db4c-46ef-dc41-86257c10747e"
},
"execution_count": 69,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount(\"/content/drive\", force_remount=True).\n"
]
}
]
}
]
}