chinese-couplets-matching/modele_1a_1b.ipynb

228 KiB
Raw Blame History

%pip install --upgrade requests
%pip install jieba
%pip install pypinyin
%pip install -v torch torchtext --index-url https://download.pytorch.org/whl/cu118
%pip install chardet
%pip install transformers
%pip install ipywidgets
%pip install --upgrade jupyter_core jupyter_client
%pip install pandas
%pip install re
%pip install scikit-learn
%pip install scipy==1.10.1
%pip install gensim
Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (2.32.3)
Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests) (3.3.2)
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests) (3.7)
Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests) (2.0.7)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests) (2024.6.2)
Requirement already satisfied: jieba in /usr/local/lib/python3.10/dist-packages (0.42.1)
Requirement already satisfied: pypinyin in /usr/local/lib/python3.10/dist-packages (0.51.0)
Using pip 23.1.2 from /usr/local/lib/python3.10/dist-packages/pip (python 3.10)
Looking in indexes: https://download.pytorch.org/whl/cu118
Requirement already satisfied: torch in /usr/local/lib/python3.10/dist-packages (2.3.1+cu118)
Requirement already satisfied: torchtext in /usr/local/lib/python3.10/dist-packages (0.18.0)
Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch) (3.14.0)
Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.10/dist-packages (from torch) (4.12.1)
Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch) (1.12.1)
Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch) (3.3)
Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch) (3.1.4)
Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch) (2023.6.0)
Requirement already satisfied: nvidia-cuda-nvrtc-cu11==11.8.89 in /usr/local/lib/python3.10/dist-packages (from torch) (11.8.89)
Requirement already satisfied: nvidia-cuda-runtime-cu11==11.8.89 in /usr/local/lib/python3.10/dist-packages (from torch) (11.8.89)
Requirement already satisfied: nvidia-cuda-cupti-cu11==11.8.87 in /usr/local/lib/python3.10/dist-packages (from torch) (11.8.87)
Requirement already satisfied: nvidia-cudnn-cu11==8.7.0.84 in /usr/local/lib/python3.10/dist-packages (from torch) (8.7.0.84)
Requirement already satisfied: nvidia-cublas-cu11==11.11.3.6 in /usr/local/lib/python3.10/dist-packages (from torch) (11.11.3.6)
Requirement already satisfied: nvidia-cufft-cu11==10.9.0.58 in /usr/local/lib/python3.10/dist-packages (from torch) (10.9.0.58)
Requirement already satisfied: nvidia-curand-cu11==10.3.0.86 in /usr/local/lib/python3.10/dist-packages (from torch) (10.3.0.86)
Requirement already satisfied: nvidia-cusolver-cu11==11.4.1.48 in /usr/local/lib/python3.10/dist-packages (from torch) (11.4.1.48)
Requirement already satisfied: nvidia-cusparse-cu11==11.7.5.86 in /usr/local/lib/python3.10/dist-packages (from torch) (11.7.5.86)
Requirement already satisfied: nvidia-nccl-cu11==2.20.5 in /usr/local/lib/python3.10/dist-packages (from torch) (2.20.5)
Requirement already satisfied: nvidia-nvtx-cu11==11.8.86 in /usr/local/lib/python3.10/dist-packages (from torch) (11.8.86)
Requirement already satisfied: triton==2.3.1 in /usr/local/lib/python3.10/dist-packages (from torch) (2.3.1)
Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from torchtext) (4.66.4)
Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from torchtext) (2.32.3)
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from torchtext) (1.25.2)
Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch) (2.1.5)
Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->torchtext) (3.3.2)
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->torchtext) (3.7)
Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->torchtext) (2.0.7)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->torchtext) (2024.6.2)
Requirement already satisfied: mpmath<1.4.0,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch) (1.3.0)
Requirement already satisfied: chardet in /usr/local/lib/python3.10/dist-packages (5.2.0)
Requirement already satisfied: transformers in /usr/local/lib/python3.10/dist-packages (4.41.2)
Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers) (3.14.0)
Requirement already satisfied: huggingface-hub<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.23.2)
Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (1.25.2)
Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from transformers) (24.0)
Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (6.0.1)
Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (2024.5.15)
Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers) (2.32.3)
Requirement already satisfied: tokenizers<0.20,>=0.19 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.19.1)
Requirement already satisfied: safetensors>=0.4.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.4.3)
Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers) (4.66.4)
Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub<1.0,>=0.23.0->transformers) (2023.6.0)
Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub<1.0,>=0.23.0->transformers) (4.12.1)
Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.3.2)
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.7)
Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2.0.7)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2024.6.2)
Requirement already satisfied: ipywidgets in /usr/local/lib/python3.10/dist-packages (7.7.1)
Requirement already satisfied: ipykernel>=4.5.1 in /usr/local/lib/python3.10/dist-packages (from ipywidgets) (5.5.6)
Requirement already satisfied: ipython-genutils~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets) (0.2.0)
Requirement already satisfied: traitlets>=4.3.1 in /usr/local/lib/python3.10/dist-packages (from ipywidgets) (5.7.1)
Requirement already satisfied: widgetsnbextension~=3.6.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets) (3.6.6)
Requirement already satisfied: ipython>=4.0.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets) (7.34.0)
Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets) (3.0.11)
Requirement already satisfied: jupyter-client in /usr/local/lib/python3.10/dist-packages (from ipykernel>=4.5.1->ipywidgets) (8.6.2)
Requirement already satisfied: tornado>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipykernel>=4.5.1->ipywidgets) (6.3.3)
Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (67.7.2)
Requirement already satisfied: jedi>=0.16 in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (0.19.1)
Requirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (4.4.2)
Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (0.7.5)
Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (3.0.45)
Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (2.16.1)
Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (0.2.0)
Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (0.1.7)
Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython>=4.0.0->ipywidgets) (4.9.0)
Requirement already satisfied: notebook>=4.4.1 in /usr/local/lib/python3.10/dist-packages (from widgetsnbextension~=3.6.0->ipywidgets) (6.5.5)
Requirement already satisfied: parso<0.9.0,>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython>=4.0.0->ipywidgets) (0.8.4)
Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (3.1.4)
Requirement already satisfied: pyzmq<25,>=17 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (24.0.1)
Requirement already satisfied: argon2-cffi in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (23.1.0)
Requirement already satisfied: jupyter-core>=4.6.1 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (5.7.2)
Collecting jupyter-client (from ipykernel>=4.5.1->ipywidgets)
  Using cached jupyter_client-7.4.9-py3-none-any.whl (133 kB)
Requirement already satisfied: nbformat in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (5.10.4)
Requirement already satisfied: nbconvert>=5 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (6.5.4)
Requirement already satisfied: nest-asyncio>=1.5 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.6.0)
Requirement already satisfied: Send2Trash>=1.8.0 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.8.3)
Requirement already satisfied: terminado>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.18.1)
Requirement already satisfied: prometheus-client in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.20.0)
Requirement already satisfied: nbclassic>=0.4.7 in /usr/local/lib/python3.10/dist-packages (from notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.1.0)
Requirement already satisfied: entrypoints in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel>=4.5.1->ipywidgets) (0.4)
Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel>=4.5.1->ipywidgets) (2.8.2)
Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.10/dist-packages (from pexpect>4.3->ipython>=4.0.0->ipywidgets) (0.7.0)
Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=4.0.0->ipywidgets) (0.2.13)
Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter-core>=4.6.1->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (4.2.2)
Requirement already satisfied: notebook-shim>=0.2.3 in /usr/local/lib/python3.10/dist-packages (from nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.2.4)
Requirement already satisfied: lxml in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (4.9.4)
Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (4.12.3)
Requirement already satisfied: bleach in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (6.1.0)
Requirement already satisfied: defusedxml in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.7.1)
Requirement already satisfied: jupyterlab-pygments in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.3.0)
Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (2.1.5)
Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.8.4)
Requirement already satisfied: nbclient>=0.5.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.10.0)
Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (24.0)
Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.5.1)
Requirement already satisfied: tinycss2 in /usr/local/lib/python3.10/dist-packages (from nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.3.0)
Requirement already satisfied: fastjsonschema>=2.15 in /usr/local/lib/python3.10/dist-packages (from nbformat->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (2.19.1)
Requirement already satisfied: jsonschema>=2.6 in /usr/local/lib/python3.10/dist-packages (from nbformat->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (4.19.2)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->jupyter-client->ipykernel>=4.5.1->ipywidgets) (1.16.0)
Requirement already satisfied: argon2-cffi-bindings in /usr/local/lib/python3.10/dist-packages (from argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (21.2.0)
Requirement already satisfied: attrs>=22.2.0 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (23.2.0)
Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (2023.12.1)
Requirement already satisfied: referencing>=0.28.4 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.35.1)
Requirement already satisfied: rpds-py>=0.7.1 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.18.1)
Requirement already satisfied: jupyter-server<3,>=1.8 in /usr/local/lib/python3.10/dist-packages (from notebook-shim>=0.2.3->nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.24.0)
Requirement already satisfied: cffi>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from argon2-cffi-bindings->argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.16.0)
Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4->nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (2.5)
Requirement already satisfied: webencodings in /usr/local/lib/python3.10/dist-packages (from bleach->nbconvert>=5->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (0.5.1)
Requirement already satisfied: pycparser in /usr/local/lib/python3.10/dist-packages (from cffi>=1.0.1->argon2-cffi-bindings->argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (2.22)
Requirement already satisfied: anyio<4,>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from jupyter-server<3,>=1.8->notebook-shim>=0.2.3->nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (3.7.1)
Requirement already satisfied: websocket-client in /usr/local/lib/python3.10/dist-packages (from jupyter-server<3,>=1.8->notebook-shim>=0.2.3->nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.8.0)
Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.10/dist-packages (from anyio<4,>=3.1.0->jupyter-server<3,>=1.8->notebook-shim>=0.2.3->nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (3.7)
Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.10/dist-packages (from anyio<4,>=3.1.0->jupyter-server<3,>=1.8->notebook-shim>=0.2.3->nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.3.1)
Requirement already satisfied: exceptiongroup in /usr/local/lib/python3.10/dist-packages (from anyio<4,>=3.1.0->jupyter-server<3,>=1.8->notebook-shim>=0.2.3->nbclassic>=0.4.7->notebook>=4.4.1->widgetsnbextension~=3.6.0->ipywidgets) (1.2.1)
Installing collected packages: jupyter-client
  Attempting uninstall: jupyter-client
    Found existing installation: jupyter_client 8.6.2
    Uninstalling jupyter_client-8.6.2:
      Successfully uninstalled jupyter_client-8.6.2
Successfully installed jupyter-client-7.4.9
Requirement already satisfied: jupyter_core in /usr/local/lib/python3.10/dist-packages (5.7.2)
Requirement already satisfied: jupyter_client in /usr/local/lib/python3.10/dist-packages (7.4.9)
Collecting jupyter_client
  Using cached jupyter_client-8.6.2-py3-none-any.whl (105 kB)
Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter_core) (4.2.2)
Requirement already satisfied: traitlets>=5.3 in /usr/local/lib/python3.10/dist-packages (from jupyter_core) (5.7.1)
Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from jupyter_client) (2.8.2)
Requirement already satisfied: pyzmq>=23.0 in /usr/local/lib/python3.10/dist-packages (from jupyter_client) (24.0.1)
Requirement already satisfied: tornado>=6.2 in /usr/local/lib/python3.10/dist-packages (from jupyter_client) (6.3.3)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->jupyter_client) (1.16.0)
Installing collected packages: jupyter_client
  Attempting uninstall: jupyter_client
    Found existing installation: jupyter_client 7.4.9
    Uninstalling jupyter_client-7.4.9:
      Successfully uninstalled jupyter_client-7.4.9
ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.
notebook 6.5.5 requires jupyter-client<8,>=5.3.4, but you have jupyter-client 8.6.2 which is incompatible.
Successfully installed jupyter_client-8.6.2
Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.0.3)
Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2)
Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4)
Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1)
Requirement already satisfied: numpy>=1.21.0 in /usr/local/lib/python3.10/dist-packages (from pandas) (1.25.2)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas) (1.16.0)
ERROR: Could not find a version that satisfies the requirement re (from versions: none)
ERROR: No matching distribution found for re
Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.2.2)
Requirement already satisfied: numpy>=1.17.3 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.25.2)
Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.10.1)
Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2)
Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0)
Requirement already satisfied: scipy==1.10.1 in /usr/local/lib/python3.10/dist-packages (1.10.1)
Requirement already satisfied: numpy<1.27.0,>=1.19.5 in /usr/local/lib/python3.10/dist-packages (from scipy==1.10.1) (1.25.2)
Requirement already satisfied: gensim in /usr/local/lib/python3.10/dist-packages (4.3.2)
Requirement already satisfied: numpy>=1.18.5 in /usr/local/lib/python3.10/dist-packages (from gensim) (1.25.2)
Requirement already satisfied: scipy>=1.7.0 in /usr/local/lib/python3.10/dist-packages (from gensim) (1.10.1)
Requirement already satisfied: smart-open>=1.8.1 in /usr/local/lib/python3.10/dist-packages (from gensim) (6.4.0)
!jupyter nbextension enable --py widgetsnbextension
import jieba
import pypinyin
import torch
from transformers import AutoTokenizer, AutoModel
import pandas
import re
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_iris
import numpy
Enabling notebook extension jupyter-js-widgets/extension...
Paths used for configuration of notebook: 
    	/root/.jupyter/nbconfig/notebook.json
Paths used for configuration of notebook: 
    	
      - Validating: OK
Paths used for configuration of notebook: 
    	/root/.jupyter/nbconfig/notebook.json
print(torch.cuda.is_available())
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
True

Normalizacja wejścia - pozbycie się spacji i znaków innych niż chińskie (interpunkcyjnych).

TODO - przepisać używając słownika znaków chińskich?

# lower() - male litery
# strip() - bez krancowych znakow niedrukowalnych
# bez znakow interpunkcyjnych
def normalizeString(s):
    s = s.lower().strip()
    s = re.sub(r"([.!?])", r"", s)
    s = re.sub(r"([,;:-])", r"", s)
    s = re.sub(r"([。,?”“《》·、!:;π…ㄚ])", r"", s)
    s = re.sub(r"([/])", r"", s)
    s = re.sub(r"(['\"])", r" ", s)
    return s.strip()

def normalizeChinese(s):
    s = normalizeString(s)
    pom = ""
    for c in s:
        if c != " ":
            pom+=c
        #pom+=" "
    return pom.strip()
fixed_couplets_in = pandas.read_csv("fixed_couplets_in.txt", sep="\t", names=["in"], header=None)
fixed_couplets_out = pandas.read_csv("fixed_couplets_out.txt", sep="\t", names=["out"], header=None)

normalized_fixed_couplets_in=[]
for _ in fixed_couplets_in["in"]:
    normalized_fixed_couplets_in.append(normalizeChinese(_))
normalized_fixed_couplets_out=[]
for _ in fixed_couplets_out["out"]:
    normalized_fixed_couplets_out.append(normalizeChinese(_))

print(normalized_fixed_couplets_in[0])
print(normalized_fixed_couplets_out[0])
腾飞上铁锐意改革谋发展勇当千里马
和谐南供安全送电保畅通争做领头羊
fixed_couplets = pandas.DataFrame(
    {"in": normalized_fixed_couplets_in,
     "out": normalized_fixed_couplets_out
    }
    )
print(fixed_couplets)
                      in               out
0       腾飞上铁锐意改革谋发展勇当千里马  和谐南供安全送电保畅通争做领头羊
1                风弦未拨心先乱           夜幕已沉梦更闲
2                花梦粘于春袖口           莺声溅落柳枝头
3                晋世文章昌二陆           魏家词赋重三曹
4                一句相思吟岁月           千杯美酒醉风情
...                  ...               ...
744910           半榻诗书盈陋室           一墙字画靓寒庐
744911           借角青山埋姓字           掬壶明月洗尘心
744912      苑内尽天姿锦窠仙髻无双艳      亭前多国色金粉紫檀第一香
744913         浩淼洞庭极目天为界         安闲钓叟静心孰羡鱼
744914           志踏云梯能揽月           坚磨铁棒可成针

[744915 rows x 2 columns]

Odrzucenie 99% danych - więcej niż 5% zajmuje całą pamięć i wywala program.

Podział danych na zbiór treningowy i testowy.

male, duze = train_test_split(fixed_couplets,test_size=0.99,random_state=42)
treningowe, testowe = train_test_split(male,test_size=0.2,random_state=42)
print(treningowe)
print(testowe)
                       in                out
567354              宇高炎暑净              秋爽飒风来
118920              忧乐关天下              安危系一身
738591            一盏相思量寂寞            三分惆怅兑凄凉
509346  孝驻锦绣城喜吕梁歌飞春融三晋千秋画  义圆和谐梦看汾河景瑞水起九州万卷诗
75388             春临八桂海豚舞            福满九州彩凤飞
...                   ...                ...
116492  创中华古老文明当同日月齐辉功垂万代  启黎庶鸿蒙草昧是与山河并寿德颂千秋
91658         纠缠海角指相思何时作罢        浪迹天涯心倦怠哪处归依
101376       特地显英灵化被逢人歌泽渥       配天昭厚德恩深无处不波恬
262048        温暖鹅城展翅奋飞中国梦        祥和蛇岁铺春欢庆小康年
415192       百业一支歌歌伴和风谐雨唱       九江千古梦梦同朗月艳阳圆

[5959 rows x 2 columns]
                in         out
274864     林霭渐浓迷古寺     尘烟已远隐青山
222320     自古青天匡正义     而今华夏振雄风
100260  真心请客就该一五一五  假意为情何必我开我开
435928     爱本有心今不见     人如无欲意何求
446991     欲抹闲愁实不易     谁将片语问何求
...            ...         ...
213030     万象随缘观自在     一心发愿待君归
299155     春联妙句动心魄     小院雅风入彩光
643294     梅亭吹雪横霜笛     松麓邀云放月筝
628861     红似桃花白似雪     绿如李叶亮如霜
566605     数字双音分两用     联文对句限孤平

[1490 rows x 2 columns]

Przywrócenie numeracji od 0.

treningowe = treningowe.reset_index(drop=True)
testowe = testowe.reset_index(drop=True)
print(treningowe)
print(testowe)
                     in                out
0                 宇高炎暑净              秋爽飒风来
1                 忧乐关天下              安危系一身
2               一盏相思量寂寞            三分惆怅兑凄凉
3     孝驻锦绣城喜吕梁歌飞春融三晋千秋画  义圆和谐梦看汾河景瑞水起九州万卷诗
4               春临八桂海豚舞            福满九州彩凤飞
...                 ...                ...
5954  创中华古老文明当同日月齐辉功垂万代  启黎庶鸿蒙草昧是与山河并寿德颂千秋
5955        纠缠海角指相思何时作罢        浪迹天涯心倦怠哪处归依
5956       特地显英灵化被逢人歌泽渥       配天昭厚德恩深无处不波恬
5957        温暖鹅城展翅奋飞中国梦        祥和蛇岁铺春欢庆小康年
5958       百业一支歌歌伴和风谐雨唱       九江千古梦梦同朗月艳阳圆

[5959 rows x 2 columns]
              in         out
0        林霭渐浓迷古寺     尘烟已远隐青山
1        自古青天匡正义     而今华夏振雄风
2     真心请客就该一五一五  假意为情何必我开我开
3        爱本有心今不见     人如无欲意何求
4        欲抹闲愁实不易     谁将片语问何求
...          ...         ...
1485     万象随缘观自在     一心发愿待君归
1486     春联妙句动心魄     小院雅风入彩光
1487     梅亭吹雪横霜笛     松麓邀云放月筝
1488     红似桃花白似雪     绿如李叶亮如霜
1489     数字双音分两用     联文对句限孤平

[1490 rows x 2 columns]

Pakiet _pypinyin przewiduje wymowę pinyin dobrze bez potrzeby używania pakietu jieba.

from pypinyin import pinyin, lazy_pinyin, Style

zdanie = treningowe["in"][4]
print(zdanie)
print(pinyin(zdanie, style=Style.TONE3, neutral_tone_with_five=True))
print(lazy_pinyin(zdanie, style=Style.TONE3, neutral_tone_with_five=True))

slowa = list(jieba.cut(zdanie))
print(slowa)
print(pinyin(slowa, style=Style.TONE3, neutral_tone_with_five=True))
print(lazy_pinyin(slowa, style=Style.TONE3, neutral_tone_with_five=True))
春临八桂海豚舞
[['chun1'], ['lin2'], ['ba1'], ['gui4'], ['hai3'], ['tun2'], ['wu3']]
['chun1', 'lin2', 'ba1', 'gui4', 'hai3', 'tun2', 'wu3']
['春临', '八桂', '海豚', '舞']
[['chun1'], ['lin2'], ['ba1'], ['gui4'], ['hai3'], ['tun2'], ['wu3']]
['chun1', 'lin2', 'ba1', 'gui4', 'hai3', 'tun2', 'wu3']

Podział wymowy pinyin na początki (initials), końcówki (finals) i tony.

Zamina w liczby przy pomocy słownika.

from pypinyin.contrib.tone_convert import to_finals, to_initials
# 声母表
_INITIALS=['b','p','m','f','d','t','n','l','g','k','h','j','q','x','zh','ch','sh','r','z','c','s',]
# 声母表,把 y,w 也当作声母
_INITIALS_NOT_STRICT=_INITIALS+['y','w']
# 韵母表
_FINALS=['i','u','ü','a','ia','ua','o','uo','e','ie','üe','ai','uai','ei','uei','ao','iao','ou','iou','an','ian','uan','üan','en','in','uen','ün','ang','iang','uang','eng','ing','ueng','ong','iong','er','ê',]

slownik_initials = {}
licznik = 1
for indeks_wersu_pierwszego in _INITIALS+[""]:
    slownik_initials[indeks_wersu_pierwszego] = licznik
    licznik+=1

slownik_finals = {}
licznik = 1
for indeks_wersu_pierwszego in _FINALS+[""]:
    slownik_finals[indeks_wersu_pierwszego] = licznik
    licznik+=1

def poczatek_koniec_ton(zapis_pinyin_3):
    poczatek = slownik_initials[to_initials(zapis_pinyin_3)]
    koniec = slownik_finals[to_finals(zapis_pinyin_3).replace('v', 'ü')]
    ton = int(zapis_pinyin_3[-1])
    return poczatek, koniec, ton

def wymowy_i_tony_zdania(zdanie):
    zapis_pinyin_3_zdania = lazy_pinyin(zdanie, style=Style.TONE3, neutral_tone_with_five=True)
    poczatki = []
    konce =[]
    tony = []
    # print(zdanie, zapis_pinyin_3_zdania)
    for zp3 in zapis_pinyin_3_zdania:
        p,k,t = poczatek_koniec_ton(zp3)
        poczatki.append(p)
        konce.append(k)
        tony.append(t)
    return poczatki, konce, tony

def dopasuj_dlugosc_wektora(wektor, dlugosc_wektora):
    if len(wektor)>dlugosc_wektora:
        wynik = wektor[:dlugosc_wektora]
    else:
        wynik = numpy.pad(wektor,(0,dlugosc_wektora-len(wektor)), mode='constant', constant_values=0)
    return wynik
def poczatki_konce_tony_dla_zdan(zdania, liczba_wejscia):
    poczatki_wyn = []
    konce_wyn = []
    tony_wyn = []

    for zdanie in zdania:
        poczatki, konce, tony = wymowy_i_tony_zdania(zdanie)

        poczatki = dopasuj_dlugosc_wektora(poczatki, liczba_wejscia)
        konce = dopasuj_dlugosc_wektora(konce, liczba_wejscia)
        tony = dopasuj_dlugosc_wektora(tony, liczba_wejscia)

        poczatki_wyn.append(poczatki)
        konce_wyn.append(konce)
        tony_wyn.append(tony)

    return poczatki_wyn, konce_wyn, tony_wyn

Początki, końcówki i tony wierszy treningowych.

print(len(treningowe["in"]))
print(treningowe["in"][0])
print(treningowe["out"][0])

liczba_wejscia = 35

poczatki_treningowe_in, konce_treningowe_in, tony_treningowe_in = poczatki_konce_tony_dla_zdan(treningowe["in"], liczba_wejscia)
poczatki_treningowe_out, konce_treningowe_out, tony_treningowe_out = poczatki_konce_tony_dla_zdan(treningowe["out"], liczba_wejscia)

print(len(poczatki_treningowe_in))
print(poczatki_treningowe_in[0])
print(konce_treningowe_in[0])
print(tony_treningowe_in[0])
print(poczatki_treningowe_out[0])
print(konce_treningowe_out[0])
print(tony_treningowe_out[0])
5959
宇高炎暑净
秋爽飒风来
5959
[22  9 22 17 12  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[ 3 16 21  2 32  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[3 1 2 3 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
[13 17 21  4  8  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[19 30  4 31 12  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[1 3 4 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]

Zanurzenia BAAI wierszy treningowych. https://huggingface.co/BAAI/bge-small-zh-v1.5

# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained("BAAI/bge-small-zh-v1.5")
model = AutoModel.from_pretrained("BAAI/bge-small-zh-v1.5")
model.eval()

def zanurzenia_zdan(lista_zdan):
    # Sentences we want sentence embeddings for
    #sentences = ["样例数据-1样例数据", "样例数据-2样例数据"]
    sentences = lista_zdan

    # Tokenize sentences
    encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
    # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages)
    # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt')

    # Compute token embeddings
    with torch.no_grad():
        model_output = model(**encoded_input)
        # Perform pooling. In this case, cls pooling.
        sentence_embeddings = model_output[0][:, 0]
    # normalize embeddings
    sentence_embeddings = torch.nn.functional.normalize(sentence_embeddings, p=2, dim=1)
    # print("Sentence embeddings shape:", sentence_embeddings.shape)
    # print("Sentence embeddings:", sentence_embeddings)

    return sentence_embeddings

def zanurzenie_zdania(zdanie):
    # Tokenize sentences
    encoded_input = tokenizer(zdanie, padding=True, truncation=True, return_tensors='pt')
    # for s2p(short query to long passage) retrieval task, add an instruction to query (not add instruction for passages)
    # encoded_input = tokenizer([instruction + q for q in queries], padding=True, truncation=True, return_tensors='pt')

    # Compute token embeddings
    with torch.no_grad():
        model_output = model(**encoded_input)
        # Perform pooling. In this case, cls pooling.
        sentence_embedding = model_output[0][:, 0]
    # normalize embeddings
    sentence_embedding = torch.nn.functional.normalize(sentence_embedding, p=2, dim=1)

    return sentence_embedding
treningowe_in_lista = treningowe["in"].tolist()
treningowe_out_lista = treningowe["out"].tolist()

print(len(treningowe_in_lista))
print(treningowe_in_lista[0])
print(treningowe_out_lista[0])

zanurzenia_treningowe_in = zanurzenia_zdan(treningowe_in_lista)
zanurzenia_treningowe_out = zanurzenia_zdan(treningowe_out_lista)

print(zanurzenia_treningowe_in.shape)
print(zanurzenia_treningowe_in[0])
print(zanurzenia_treningowe_out[0])
5959
宇高炎暑净
秋爽飒风来
torch.Size([5959, 512])
tensor([-3.1409e-02,  4.2919e-02, -1.4236e-02,  6.2288e-02, -3.2497e-02,
        -5.3290e-02,  4.7686e-02,  7.7745e-02, -2.0447e-02,  2.8347e-02,
         1.3510e-02, -2.3332e-01,  8.9271e-04,  4.0544e-02,  9.2784e-05,
        -1.1740e-02, -1.6238e-02, -4.7785e-02, -8.7547e-02,  6.0501e-02,
        -1.7588e-02, -4.3948e-03, -3.1034e-02, -8.8176e-03,  3.8507e-02,
        -5.6918e-02,  6.8194e-02,  7.5235e-03,  6.6778e-03,  2.0831e-02,
         9.5349e-04,  4.5033e-02, -9.4392e-03, -3.2470e-02, -3.5810e-02,
        -2.1215e-02, -2.9089e-02,  3.8043e-02, -2.6267e-02,  5.2310e-02,
        -6.3633e-02,  4.2117e-02,  1.7834e-02, -5.5019e-02, -7.9315e-02,
         8.2654e-04, -1.0802e-02,  1.8213e-02,  6.5130e-02, -7.6177e-03,
        -3.8167e-02,  5.3484e-02,  3.5490e-02,  2.7366e-02, -3.0560e-02,
        -7.8364e-02, -3.3920e-02,  7.5826e-03, -1.6268e-02,  1.9344e-02,
         3.4404e-02,  1.1855e-02, -3.5319e-02, -1.1730e-02,  6.7642e-02,
         3.7650e-04,  1.9837e-02,  8.3773e-03, -1.1069e-02,  3.2787e-02,
         4.2034e-03,  3.5917e-03,  5.5923e-02, -5.6078e-02, -3.9402e-02,
        -5.3479e-02,  1.2802e-03, -3.0281e-02, -5.1191e-02, -1.7256e-02,
         1.4092e-02, -2.8433e-02, -5.4772e-02, -4.3614e-02, -1.3096e-02,
         1.8400e-02, -1.1333e-01, -4.3593e-02,  2.8702e-02,  5.6857e-04,
        -9.5228e-03, -9.2662e-03,  1.4085e-02, -1.0477e-02, -7.0193e-02,
         6.9635e-02, -2.4111e-02,  1.2565e-02, -6.6401e-02, -4.1899e-02,
        -2.5085e-02, -6.3970e-02,  5.7718e-02,  6.8888e-02, -6.1210e-02,
         6.5007e-02, -8.2084e-02, -5.9957e-02,  6.8816e-03, -1.0067e-03,
         3.7481e-02,  2.3379e-02,  2.7860e-02,  4.7394e-02, -3.3720e-02,
        -6.1802e-02,  9.0069e-02,  1.4320e-02, -6.6455e-02, -8.1411e-02,
        -4.2551e-02, -4.0180e-02,  7.1318e-02,  4.7259e-02, -6.1807e-03,
         1.6717e-02, -2.7057e-02, -1.9109e-02,  5.8335e-02,  4.2307e-02,
         3.6037e-03,  8.2558e-03, -2.4797e-03, -1.4135e-02, -2.2754e-02,
        -2.2781e-03, -5.8836e-03, -1.1159e-01,  2.3304e-03, -1.3209e-02,
        -1.2044e-03, -2.6060e-02, -1.6546e-02, -1.3189e-02,  7.0787e-02,
        -2.1846e-02, -3.3586e-02, -2.8873e-02,  3.0573e-02, -5.1569e-02,
         7.2466e-03,  5.6971e-02,  3.2711e-02, -1.2560e-02, -5.2461e-02,
        -5.3417e-03, -4.4110e-03, -1.7080e-02, -1.5891e-02,  5.0826e-02,
         5.6342e-02, -1.2345e-03,  2.6094e-02,  3.0109e-02, -1.6446e-02,
        -2.2257e-02,  3.8077e-03,  8.1443e-02, -4.4790e-02,  7.7875e-02,
         5.6147e-02, -1.2718e-02, -4.7217e-02,  3.8158e-02, -4.8242e-03,
        -3.3682e-02, -3.7652e-02,  5.8250e-02,  1.5820e-02,  3.1382e-02,
         1.3865e-02,  7.9274e-02, -2.9852e-02,  5.9575e-02, -9.7192e-03,
        -1.3883e-02,  1.3156e-02, -2.2232e-02, -2.1665e-02, -3.9232e-02,
        -9.3653e-03, -6.2756e-03,  3.1561e-02,  3.8607e-02,  8.6990e-03,
         6.4413e-02, -7.7960e-02, -4.7945e-02, -1.4091e-01,  5.0533e-02,
         2.0320e-03, -5.1708e-02, -1.9163e-03,  2.4024e-02, -2.0240e-02,
         2.1377e-02,  3.5398e-03,  3.7191e-02, -3.6258e-02, -5.4974e-02,
        -1.6857e-02,  6.0301e-02,  1.4563e-02, -4.0892e-02,  1.4841e-01,
         2.6193e-02, -9.0119e-04,  8.5365e-03,  1.2007e-02, -1.0382e-01,
         3.3631e-02,  7.6653e-02,  2.4468e-02, -7.0872e-03, -1.8002e-02,
        -8.5119e-03, -1.4168e-02,  1.5942e-02, -2.9196e-02, -6.4297e-04,
         1.6337e-02, -1.9513e-02,  1.3898e-02,  3.9867e-03,  2.6298e-02,
         4.6379e-02, -7.1418e-02,  1.0134e-02,  5.1168e-02, -4.9732e-02,
        -5.5967e-02, -7.8217e-03,  2.8585e-02, -8.7352e-03, -9.9658e-05,
         1.2468e-02, -7.3671e-02, -2.2079e-03,  5.4546e-03,  3.5459e-02,
        -3.2250e-02,  9.3758e-02, -1.1456e-02, -3.1892e-02, -4.1353e-02,
         3.3040e-02, -3.7227e-03,  2.6740e-02, -6.4840e-02,  4.1143e-02,
        -1.8554e-02,  1.5613e-02,  5.0357e-03,  1.1793e-02,  1.3087e-02,
        -4.2158e-02, -1.5489e-02,  2.7196e-02, -3.9413e-04,  1.1546e-02,
        -3.9742e-02,  7.9554e-03,  5.7563e-02, -5.5298e-02,  1.0457e-02,
         5.1986e-02, -3.2875e-03,  2.1230e-02,  6.3298e-03,  7.0061e-03,
         1.7268e-02,  7.4763e-02, -8.9870e-02,  1.9039e-02, -5.3741e-03,
        -4.5542e-02, -1.4080e-01,  3.6304e-02, -1.4179e-02, -2.1746e-02,
         1.3878e-02, -8.1540e-02,  4.5647e-02,  2.8653e-02, -1.1617e-02,
         2.7410e-02, -3.9985e-02, -6.1613e-03,  6.7774e-02,  1.1290e-02,
         4.6115e-02,  2.3358e-02, -2.1498e-02, -4.1548e-02,  1.3849e-02,
         1.2356e-02,  4.1165e-03,  4.5328e-02, -3.5151e-02, -2.3484e-03,
         1.2952e-03, -2.0535e-02, -3.0788e-02, -4.7044e-02,  1.5876e-02,
        -1.1296e-03, -1.8713e-02,  1.8543e-02,  5.3209e-02,  2.7803e-02,
         1.1028e-03,  2.9207e-02, -3.6119e-02, -1.6165e-02,  1.7555e-02,
        -8.2125e-03,  6.3445e-03, -4.5027e-02, -4.0817e-02,  4.5773e-02,
        -2.2641e-02,  5.2889e-02,  1.4512e-02, -1.9522e-02, -5.6481e-02,
        -1.1060e-02, -4.3722e-02,  1.4095e-02,  2.4259e-02,  6.0377e-02,
        -7.2628e-02,  3.9760e-02, -8.1585e-02, -9.1420e-03,  1.8809e-03,
         1.0487e-01, -4.9327e-02, -5.5549e-03,  4.6258e-02,  1.6701e-02,
        -1.6163e-02,  2.6286e-02, -2.7700e-02,  8.0984e-03,  2.4454e-02,
         4.4797e-02,  3.4455e-02, -6.0768e-02, -2.5864e-02,  1.0166e-03,
         5.3068e-03,  4.2425e-03, -5.4753e-02, -3.1478e-02,  3.7924e-02,
         1.0266e-03,  8.4248e-03,  4.5199e-02,  1.5580e-02, -5.6708e-03,
         3.6769e-02, -5.3641e-02,  4.1779e-02, -3.2060e-02, -2.6757e-02,
        -4.7505e-02, -8.2457e-02,  7.2944e-02,  1.5763e-02,  1.6309e-02,
         7.5724e-04,  6.4329e-02,  1.9464e-02, -2.7392e-03,  4.2363e-02,
        -1.0416e-01, -1.6209e-02,  4.5399e-02,  1.0589e-01, -2.4638e-02,
         2.6731e-02,  2.6622e-02,  4.0844e-02,  1.0323e-01,  3.3835e-02,
        -4.2006e-02,  4.2951e-02,  3.1068e-02, -2.4564e-03, -1.2811e-02,
        -8.4661e-03,  3.9647e-02, -1.1733e-01,  2.6631e-02, -3.1336e-02,
        -1.0026e-01, -6.3246e-03, -1.8747e-02, -8.8051e-03, -6.3902e-02,
         2.0967e-01,  3.4409e-02, -1.6454e-02,  3.0606e-02, -2.1813e-02,
         9.1961e-02,  4.4120e-02, -2.1517e-02, -3.4456e-02, -5.8409e-02,
         2.7488e-02,  1.9422e-02, -1.2918e-02, -7.2962e-03,  2.8859e-02,
        -4.2516e-02, -4.2966e-02, -1.9645e-02, -6.4296e-02, -4.0894e-02,
        -2.8706e-02, -5.6346e-02,  3.4201e-02, -3.9250e-03,  7.5307e-02,
         6.5123e-03, -4.7450e-02, -3.1443e-02, -5.0485e-02,  6.2536e-02,
        -2.6723e-02,  3.9097e-02,  2.5871e-03,  4.8988e-02, -1.2248e-05,
         1.8120e-02, -1.4111e-02, -2.9327e-02,  7.4617e-02,  8.2369e-03,
        -3.3414e-02, -1.0466e-02, -4.4706e-03, -1.3613e-02, -5.4163e-02,
        -4.4011e-02, -7.4851e-02, -5.5124e-02,  8.7570e-03,  3.8449e-02,
        -5.1844e-02,  5.9674e-03,  7.5129e-03,  1.0718e-02,  2.1981e-02,
         4.4945e-02, -3.4382e-02, -5.1930e-02,  1.5666e-02, -3.3479e-02,
        -2.9640e-03, -1.2958e-02, -3.5843e-02, -2.9896e-02,  7.1761e-02,
        -3.2109e-02,  1.1761e-01,  1.1047e-02, -4.7208e-02, -3.3970e-02,
         7.1073e-02, -9.1408e-02,  6.3568e-03, -7.5566e-03, -8.2016e-03,
        -9.3746e-03,  1.5221e-02,  7.5551e-03, -4.2618e-02,  2.9687e-02,
         4.7213e-02, -5.6087e-02, -3.5213e-02, -6.5220e-02,  1.8469e-02,
         6.4949e-02, -1.9809e-02, -8.2783e-02, -3.2709e-03, -5.1782e-02,
        -6.3309e-02,  3.6822e-02, -8.6364e-04, -1.7256e-02,  6.0698e-03,
        -2.0665e-02,  1.7764e-02,  8.8567e-02, -5.4184e-02, -1.4816e-02,
         6.1665e-02,  2.7374e-02])
tensor([ 2.4096e-02,  7.3348e-02, -8.4988e-03,  2.1168e-02, -5.1689e-02,
        -3.5376e-04,  4.7075e-02,  2.5451e-02,  3.5129e-02,  4.6819e-02,
         6.8763e-02, -2.2109e-01, -2.9449e-02,  5.0597e-02, -2.5865e-02,
        -9.3008e-03,  2.8629e-02, -6.2801e-02, -6.8237e-02, -5.9068e-02,
         2.1109e-02, -3.3667e-02, -3.0538e-02,  1.0535e-01,  3.0778e-02,
        -2.6921e-02, -4.7817e-03,  2.0352e-02, -6.7792e-02,  6.7208e-02,
         1.7218e-02,  1.9034e-02, -5.1180e-02, -1.4875e-02, -1.5020e-02,
        -1.2319e-02, -6.5349e-02,  4.0683e-02,  5.6421e-02, -2.2507e-02,
        -2.5330e-02,  4.9632e-02,  5.9727e-02,  9.1537e-03, -2.7953e-02,
        -4.3726e-02, -3.3593e-02,  1.8592e-02,  1.5352e-03, -9.2273e-03,
        -6.5650e-02,  3.0612e-02, -6.9992e-02, -2.7435e-02,  2.9220e-02,
        -3.2722e-02, -3.1333e-02,  1.2232e-02, -6.3038e-02,  6.2572e-04,
         2.0118e-02, -4.7327e-02, -4.6759e-02,  1.6298e-02,  2.4694e-02,
        -1.5708e-02, -1.7262e-02, -1.1750e-02, -3.4596e-03, -5.3582e-02,
        -8.0472e-02,  5.7651e-02,  3.8062e-02, -7.1649e-02,  4.5374e-02,
        -7.1557e-02,  1.8123e-02,  3.5019e-02, -8.7280e-02, -5.9952e-03,
         1.3746e-02,  1.6378e-02, -4.3599e-02,  1.0333e-02, -1.3245e-02,
        -3.2981e-02, -6.4206e-02, -2.4593e-02,  3.1208e-02, -9.5114e-03,
        -5.2171e-02, -4.6604e-02,  5.0359e-02,  4.7381e-02, -7.6541e-03,
         1.7540e-02, -7.5362e-03, -1.0370e-03, -2.0973e-02, -5.8539e-02,
        -4.2109e-03, -7.7784e-02,  4.7974e-02,  1.5605e-02,  1.1676e-02,
         5.4789e-02,  2.6982e-02,  2.8896e-02,  1.4084e-02,  3.6774e-02,
        -5.7120e-02,  8.5216e-02, -1.8359e-02,  1.8367e-02, -5.9878e-02,
         2.1155e-02,  3.2800e-03,  1.5960e-02, -1.3590e-01,  6.6871e-02,
         5.0083e-03,  4.7189e-03,  9.8846e-02, -4.0727e-02, -1.1970e-01,
         4.3001e-03, -3.3519e-02, -1.2028e-02,  5.3046e-02,  6.3472e-02,
         8.0517e-03, -1.6034e-02,  1.1180e-02, -2.7315e-02, -1.9381e-02,
        -2.0683e-02,  3.7952e-03, -7.2708e-02, -3.0257e-02,  7.5861e-03,
        -3.0704e-02, -7.9766e-03,  9.0976e-03, -6.8949e-02,  9.3395e-02,
        -5.1396e-02,  4.6734e-02, -1.2085e-03,  2.2747e-02,  4.4702e-02,
        -1.9269e-02, -3.2044e-02,  4.6390e-02,  5.6546e-02, -3.7156e-02,
         3.9877e-02,  1.0895e-02, -1.6061e-02, -6.7260e-02,  1.6562e-02,
         1.2008e-03,  3.7859e-02,  3.9005e-02,  3.4202e-02, -1.4327e-02,
        -8.2659e-02,  1.9792e-02,  1.5776e-03, -6.7330e-02,  4.3296e-02,
        -4.3103e-02, -8.2537e-03,  3.0699e-02, -1.7245e-02,  5.5340e-02,
        -7.3155e-03,  2.0148e-02, -2.6217e-02, -1.6741e-03,  7.1648e-02,
         2.5549e-02,  2.2865e-02, -2.0414e-03, -1.6362e-02,  4.6387e-03,
         2.8256e-02,  2.3293e-02, -2.2062e-02, -9.2340e-03, -1.1985e-02,
         8.0533e-04, -2.3884e-02,  5.9400e-02, -1.1038e-02,  4.8180e-03,
         3.5944e-02, -6.4729e-02, -1.1301e-02, -5.6865e-02,  1.8658e-02,
        -1.4537e-02, -2.3870e-02,  1.8639e-02,  6.1247e-02,  1.8494e-03,
         3.9511e-03, -1.1623e-02,  2.7783e-02, -9.0809e-02, -4.3361e-02,
        -4.4524e-02,  9.5100e-03,  8.1598e-03, -5.9092e-02,  2.2854e-02,
         1.0801e-02,  5.5640e-02, -7.4158e-03, -3.0120e-02, -4.7106e-02,
        -2.8703e-02,  6.2336e-02, -8.6966e-02, -8.8282e-02, -2.9747e-02,
        -2.8669e-02,  2.8053e-02, -3.0225e-02, -2.4561e-02, -1.2942e-02,
        -4.3129e-02, -5.1436e-02,  3.2625e-02, -4.6949e-02, -1.2704e-02,
         2.7554e-02,  1.4629e-02,  3.8203e-02, -8.7354e-02, -2.7942e-02,
        -4.2217e-02,  4.5440e-02, -1.1199e-02,  1.5859e-02, -5.7629e-02,
        -3.4809e-02, -5.4919e-02,  1.9037e-02,  1.0293e-02,  6.9702e-03,
        -3.0121e-02,  7.6800e-02, -1.9755e-02, -1.2176e-01, -4.2284e-02,
        -5.6440e-02, -3.4314e-02, -3.0538e-02, -5.3078e-02, -2.0438e-02,
        -2.7687e-03,  1.5685e-02,  8.3713e-03,  1.4941e-02,  2.8835e-02,
        -1.5773e-02, -2.2957e-02,  3.4821e-02,  8.3100e-03, -3.6987e-02,
         1.0159e-03,  3.6687e-02,  1.5403e-02, -7.7245e-02,  1.1903e-02,
         3.9656e-02,  5.8933e-02,  1.1769e-03, -7.7724e-03,  1.0608e-01,
        -1.3163e-02, -6.9340e-03, -2.9777e-02,  3.8290e-02,  2.5452e-02,
        -4.4490e-02, -1.2190e-01, -9.1041e-03,  8.4519e-03, -1.0265e-03,
         3.0511e-02, -4.8933e-02,  3.1984e-03,  1.9107e-02, -1.9031e-02,
        -2.7986e-02,  2.8155e-02, -3.2111e-02,  5.3439e-02, -6.6016e-02,
         2.2510e-02, -2.5893e-02,  2.5647e-02,  6.2114e-02,  3.6392e-03,
         2.1521e-02,  1.0638e-03,  4.0820e-02, -2.1784e-02,  2.3471e-02,
         6.5689e-03,  4.1211e-02,  2.2548e-02, -6.9367e-02,  7.2007e-02,
        -2.3249e-02,  9.7457e-03,  5.0128e-03,  1.9682e-03,  1.1460e-02,
        -1.1636e-03,  1.2196e-02, -8.2566e-03, -1.2993e-02,  4.0637e-02,
        -1.2862e-02, -9.3435e-03,  3.5840e-02, -1.3115e-02,  6.7564e-02,
        -1.3449e-02,  8.3304e-02,  1.3780e-02, -6.5205e-03,  2.1614e-02,
        -4.6509e-02, -2.3400e-02, -1.1252e-02, -2.1349e-03,  9.9767e-02,
         5.9413e-02, -6.5736e-03, -4.4302e-02,  1.0448e-02, -1.8580e-02,
         6.8594e-02, -1.4184e-02, -7.0092e-02, -3.2865e-02,  1.1723e-02,
         9.1901e-03, -1.5251e-02, -1.4926e-02, -3.3470e-02, -3.6489e-03,
        -3.8432e-02,  1.9594e-02,  2.5313e-02, -4.9300e-02,  6.5015e-02,
        -3.0438e-02, -9.3662e-03,  3.4233e-02, -7.8762e-02, -6.7159e-03,
         3.1354e-02, -2.0526e-02,  5.4133e-03,  1.1246e-02,  2.1658e-02,
        -1.0054e-02,  2.1285e-02,  1.1186e-01, -3.7673e-02,  2.4505e-02,
        -2.0750e-02, -3.7844e-02, -2.8911e-02,  9.4496e-03,  1.4896e-02,
        -3.0971e-02, -1.8133e-02, -4.7326e-02, -2.8264e-02,  4.9661e-02,
        -1.8136e-02, -2.1942e-02, -2.6936e-02,  2.0541e-02,  4.2219e-03,
         6.6803e-02, -6.7906e-02, -3.7795e-02, -2.2262e-02,  3.3751e-02,
         1.1071e-02,  4.1053e-02, -6.2190e-02,  4.3035e-03, -3.6697e-02,
        -2.4697e-03,  3.2390e-02, -6.7590e-02,  3.7872e-02,  3.5083e-02,
        -4.1133e-02,  1.5301e-02, -9.9107e-03, -5.2390e-02,  6.0837e-02,
         2.2806e-01, -7.3393e-02,  2.9662e-02, -6.6508e-02, -1.7553e-02,
         7.5196e-02, -3.6798e-02,  4.5125e-03,  3.4317e-02, -5.6979e-02,
         5.9627e-02,  7.8637e-02, -6.2195e-02,  4.5452e-02, -2.8716e-03,
         8.0530e-02, -1.8484e-02,  2.2444e-02, -2.6805e-02, -2.2107e-02,
        -2.1742e-02, -3.0206e-02,  7.3662e-02,  4.2316e-02,  4.5892e-02,
        -2.8320e-02,  6.5208e-02, -4.3190e-02, -5.5195e-02, -7.3266e-02,
        -1.6800e-03,  4.9327e-02,  3.7236e-02,  1.3723e-02,  2.8840e-02,
         9.9783e-03, -4.3477e-02,  2.6408e-02, -5.9908e-03,  3.1495e-02,
        -1.3816e-03,  1.8268e-02, -2.0290e-02, -7.3615e-02, -4.2263e-02,
         3.5367e-02, -4.4292e-02, -7.9611e-02,  7.9907e-02,  4.5494e-02,
        -3.2248e-02,  1.6629e-02, -7.5351e-03,  2.1802e-02, -3.3684e-02,
        -1.4436e-02,  2.1520e-02, -6.3879e-02,  1.0100e-02, -2.5601e-05,
        -1.9271e-02,  4.7454e-02, -2.4488e-02,  6.5203e-03,  5.9140e-02,
         3.7843e-02,  3.8729e-02,  3.5719e-02,  6.4549e-02,  3.9975e-02,
        -7.7090e-03, -3.8202e-02, -4.2739e-02,  6.9333e-02, -3.2327e-02,
         1.3822e-01,  7.5231e-03,  1.8590e-02, -2.8336e-02,  7.5397e-02,
        -8.1537e-03, -7.2928e-02, -6.6228e-02,  1.4838e-02, -2.3286e-02,
         4.9019e-02,  1.8467e-02, -6.7986e-02, -4.8970e-02, -2.9831e-02,
         4.9185e-02,  3.9403e-03,  6.8458e-02,  4.9250e-02, -9.2371e-02,
        -1.7414e-02,  3.7454e-02,  4.5524e-02, -4.9280e-02,  5.0603e-02,
         5.4588e-03, -5.6567e-02])

Tensory - reprezentacje pierwszych wersów wierszy treningowych.

x = []
for indeks_wersu_pierwszego in range(len(zanurzenia_treningowe_in)):
    x.append(torch.cat(
        (
            zanurzenia_treningowe_in[indeks_wersu_pierwszego],
            torch.from_numpy(poczatki_treningowe_in[indeks_wersu_pierwszego]),
            torch.from_numpy(konce_treningowe_in[indeks_wersu_pierwszego]),
            torch.from_numpy(tony_treningowe_in[indeks_wersu_pierwszego])
        )
    ))
print(len(x))
print(x[0].shape)
print(x[0])
5959
torch.Size([617])
tensor([-3.1409e-02,  4.2919e-02, -1.4236e-02,  6.2288e-02, -3.2497e-02,
        -5.3290e-02,  4.7686e-02,  7.7745e-02, -2.0447e-02,  2.8347e-02,
         1.3510e-02, -2.3332e-01,  8.9271e-04,  4.0544e-02,  9.2784e-05,
        -1.1740e-02, -1.6238e-02, -4.7785e-02, -8.7547e-02,  6.0501e-02,
        -1.7588e-02, -4.3948e-03, -3.1034e-02, -8.8176e-03,  3.8507e-02,
        -5.6918e-02,  6.8194e-02,  7.5235e-03,  6.6778e-03,  2.0831e-02,
         9.5349e-04,  4.5033e-02, -9.4392e-03, -3.2470e-02, -3.5810e-02,
        -2.1215e-02, -2.9089e-02,  3.8043e-02, -2.6267e-02,  5.2310e-02,
        -6.3633e-02,  4.2117e-02,  1.7834e-02, -5.5019e-02, -7.9315e-02,
         8.2654e-04, -1.0802e-02,  1.8213e-02,  6.5130e-02, -7.6177e-03,
        -3.8167e-02,  5.3484e-02,  3.5490e-02,  2.7366e-02, -3.0560e-02,
        -7.8364e-02, -3.3920e-02,  7.5826e-03, -1.6268e-02,  1.9344e-02,
         3.4404e-02,  1.1855e-02, -3.5319e-02, -1.1730e-02,  6.7642e-02,
         3.7650e-04,  1.9837e-02,  8.3773e-03, -1.1069e-02,  3.2787e-02,
         4.2034e-03,  3.5917e-03,  5.5923e-02, -5.6078e-02, -3.9402e-02,
        -5.3479e-02,  1.2802e-03, -3.0281e-02, -5.1191e-02, -1.7256e-02,
         1.4092e-02, -2.8433e-02, -5.4772e-02, -4.3614e-02, -1.3096e-02,
         1.8400e-02, -1.1333e-01, -4.3593e-02,  2.8702e-02,  5.6857e-04,
        -9.5228e-03, -9.2662e-03,  1.4085e-02, -1.0477e-02, -7.0193e-02,
         6.9635e-02, -2.4111e-02,  1.2565e-02, -6.6401e-02, -4.1899e-02,
        -2.5085e-02, -6.3970e-02,  5.7718e-02,  6.8888e-02, -6.1210e-02,
         6.5007e-02, -8.2084e-02, -5.9957e-02,  6.8816e-03, -1.0067e-03,
         3.7481e-02,  2.3379e-02,  2.7860e-02,  4.7394e-02, -3.3720e-02,
        -6.1802e-02,  9.0069e-02,  1.4320e-02, -6.6455e-02, -8.1411e-02,
        -4.2551e-02, -4.0180e-02,  7.1318e-02,  4.7259e-02, -6.1807e-03,
         1.6717e-02, -2.7057e-02, -1.9109e-02,  5.8335e-02,  4.2307e-02,
         3.6037e-03,  8.2558e-03, -2.4797e-03, -1.4135e-02, -2.2754e-02,
        -2.2781e-03, -5.8836e-03, -1.1159e-01,  2.3304e-03, -1.3209e-02,
        -1.2044e-03, -2.6060e-02, -1.6546e-02, -1.3189e-02,  7.0787e-02,
        -2.1846e-02, -3.3586e-02, -2.8873e-02,  3.0573e-02, -5.1569e-02,
         7.2466e-03,  5.6971e-02,  3.2711e-02, -1.2560e-02, -5.2461e-02,
        -5.3417e-03, -4.4110e-03, -1.7080e-02, -1.5891e-02,  5.0826e-02,
         5.6342e-02, -1.2345e-03,  2.6094e-02,  3.0109e-02, -1.6446e-02,
        -2.2257e-02,  3.8077e-03,  8.1443e-02, -4.4790e-02,  7.7875e-02,
         5.6147e-02, -1.2718e-02, -4.7217e-02,  3.8158e-02, -4.8242e-03,
        -3.3682e-02, -3.7652e-02,  5.8250e-02,  1.5820e-02,  3.1382e-02,
         1.3865e-02,  7.9274e-02, -2.9852e-02,  5.9575e-02, -9.7192e-03,
        -1.3883e-02,  1.3156e-02, -2.2232e-02, -2.1665e-02, -3.9232e-02,
        -9.3653e-03, -6.2756e-03,  3.1561e-02,  3.8607e-02,  8.6990e-03,
         6.4413e-02, -7.7960e-02, -4.7945e-02, -1.4091e-01,  5.0533e-02,
         2.0320e-03, -5.1708e-02, -1.9163e-03,  2.4024e-02, -2.0240e-02,
         2.1377e-02,  3.5398e-03,  3.7191e-02, -3.6258e-02, -5.4974e-02,
        -1.6857e-02,  6.0301e-02,  1.4563e-02, -4.0892e-02,  1.4841e-01,
         2.6193e-02, -9.0119e-04,  8.5365e-03,  1.2007e-02, -1.0382e-01,
         3.3631e-02,  7.6653e-02,  2.4468e-02, -7.0872e-03, -1.8002e-02,
        -8.5119e-03, -1.4168e-02,  1.5942e-02, -2.9196e-02, -6.4297e-04,
         1.6337e-02, -1.9513e-02,  1.3898e-02,  3.9867e-03,  2.6298e-02,
         4.6379e-02, -7.1418e-02,  1.0134e-02,  5.1168e-02, -4.9732e-02,
        -5.5967e-02, -7.8217e-03,  2.8585e-02, -8.7352e-03, -9.9658e-05,
         1.2468e-02, -7.3671e-02, -2.2079e-03,  5.4546e-03,  3.5459e-02,
        -3.2250e-02,  9.3758e-02, -1.1456e-02, -3.1892e-02, -4.1353e-02,
         3.3040e-02, -3.7227e-03,  2.6740e-02, -6.4840e-02,  4.1143e-02,
        -1.8554e-02,  1.5613e-02,  5.0357e-03,  1.1793e-02,  1.3087e-02,
        -4.2158e-02, -1.5489e-02,  2.7196e-02, -3.9413e-04,  1.1546e-02,
        -3.9742e-02,  7.9554e-03,  5.7563e-02, -5.5298e-02,  1.0457e-02,
         5.1986e-02, -3.2875e-03,  2.1230e-02,  6.3298e-03,  7.0061e-03,
         1.7268e-02,  7.4763e-02, -8.9870e-02,  1.9039e-02, -5.3741e-03,
        -4.5542e-02, -1.4080e-01,  3.6304e-02, -1.4179e-02, -2.1746e-02,
         1.3878e-02, -8.1540e-02,  4.5647e-02,  2.8653e-02, -1.1617e-02,
         2.7410e-02, -3.9985e-02, -6.1613e-03,  6.7774e-02,  1.1290e-02,
         4.6115e-02,  2.3358e-02, -2.1498e-02, -4.1548e-02,  1.3849e-02,
         1.2356e-02,  4.1165e-03,  4.5328e-02, -3.5151e-02, -2.3484e-03,
         1.2952e-03, -2.0535e-02, -3.0788e-02, -4.7044e-02,  1.5876e-02,
        -1.1296e-03, -1.8713e-02,  1.8543e-02,  5.3209e-02,  2.7803e-02,
         1.1028e-03,  2.9207e-02, -3.6119e-02, -1.6165e-02,  1.7555e-02,
        -8.2125e-03,  6.3445e-03, -4.5027e-02, -4.0817e-02,  4.5773e-02,
        -2.2641e-02,  5.2889e-02,  1.4512e-02, -1.9522e-02, -5.6481e-02,
        -1.1060e-02, -4.3722e-02,  1.4095e-02,  2.4259e-02,  6.0377e-02,
        -7.2628e-02,  3.9760e-02, -8.1585e-02, -9.1420e-03,  1.8809e-03,
         1.0487e-01, -4.9327e-02, -5.5549e-03,  4.6258e-02,  1.6701e-02,
        -1.6163e-02,  2.6286e-02, -2.7700e-02,  8.0984e-03,  2.4454e-02,
         4.4797e-02,  3.4455e-02, -6.0768e-02, -2.5864e-02,  1.0166e-03,
         5.3068e-03,  4.2425e-03, -5.4753e-02, -3.1478e-02,  3.7924e-02,
         1.0266e-03,  8.4248e-03,  4.5199e-02,  1.5580e-02, -5.6708e-03,
         3.6769e-02, -5.3641e-02,  4.1779e-02, -3.2060e-02, -2.6757e-02,
        -4.7505e-02, -8.2457e-02,  7.2944e-02,  1.5763e-02,  1.6309e-02,
         7.5724e-04,  6.4329e-02,  1.9464e-02, -2.7392e-03,  4.2363e-02,
        -1.0416e-01, -1.6209e-02,  4.5399e-02,  1.0589e-01, -2.4638e-02,
         2.6731e-02,  2.6622e-02,  4.0844e-02,  1.0323e-01,  3.3835e-02,
        -4.2006e-02,  4.2951e-02,  3.1068e-02, -2.4564e-03, -1.2811e-02,
        -8.4661e-03,  3.9647e-02, -1.1733e-01,  2.6631e-02, -3.1336e-02,
        -1.0026e-01, -6.3246e-03, -1.8747e-02, -8.8051e-03, -6.3902e-02,
         2.0967e-01,  3.4409e-02, -1.6454e-02,  3.0606e-02, -2.1813e-02,
         9.1961e-02,  4.4120e-02, -2.1517e-02, -3.4456e-02, -5.8409e-02,
         2.7488e-02,  1.9422e-02, -1.2918e-02, -7.2962e-03,  2.8859e-02,
        -4.2516e-02, -4.2966e-02, -1.9645e-02, -6.4296e-02, -4.0894e-02,
        -2.8706e-02, -5.6346e-02,  3.4201e-02, -3.9250e-03,  7.5307e-02,
         6.5123e-03, -4.7450e-02, -3.1443e-02, -5.0485e-02,  6.2536e-02,
        -2.6723e-02,  3.9097e-02,  2.5871e-03,  4.8988e-02, -1.2248e-05,
         1.8120e-02, -1.4111e-02, -2.9327e-02,  7.4617e-02,  8.2369e-03,
        -3.3414e-02, -1.0466e-02, -4.4706e-03, -1.3613e-02, -5.4163e-02,
        -4.4011e-02, -7.4851e-02, -5.5124e-02,  8.7570e-03,  3.8449e-02,
        -5.1844e-02,  5.9674e-03,  7.5129e-03,  1.0718e-02,  2.1981e-02,
         4.4945e-02, -3.4382e-02, -5.1930e-02,  1.5666e-02, -3.3479e-02,
        -2.9640e-03, -1.2958e-02, -3.5843e-02, -2.9896e-02,  7.1761e-02,
        -3.2109e-02,  1.1761e-01,  1.1047e-02, -4.7208e-02, -3.3970e-02,
         7.1073e-02, -9.1408e-02,  6.3568e-03, -7.5566e-03, -8.2016e-03,
        -9.3746e-03,  1.5221e-02,  7.5551e-03, -4.2618e-02,  2.9687e-02,
         4.7213e-02, -5.6087e-02, -3.5213e-02, -6.5220e-02,  1.8469e-02,
         6.4949e-02, -1.9809e-02, -8.2783e-02, -3.2709e-03, -5.1782e-02,
        -6.3309e-02,  3.6822e-02, -8.6364e-04, -1.7256e-02,  6.0698e-03,
        -2.0665e-02,  1.7764e-02,  8.8567e-02, -5.4184e-02, -1.4816e-02,
         6.1665e-02,  2.7374e-02,  2.2000e+01,  9.0000e+00,  2.2000e+01,
         1.7000e+01,  1.2000e+01,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  3.0000e+00,  1.6000e+01,  2.1000e+01,
         2.0000e+00,  3.2000e+01,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  3.0000e+00,  1.0000e+00,  2.0000e+00,
         3.0000e+00,  4.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00])

Tensory - reprezentacje drugich wersów wierszy treningowych.

y = []
for indeks_wersu_pierwszego in range(len(zanurzenia_treningowe_out)):
    y.append(
        torch.cat(
        (
            zanurzenia_treningowe_out[indeks_wersu_pierwszego],
            torch.from_numpy(poczatki_treningowe_out[indeks_wersu_pierwszego]),
            torch.from_numpy(konce_treningowe_out[indeks_wersu_pierwszego]),
            torch.from_numpy(tony_treningowe_out[indeks_wersu_pierwszego])
        )
    )
    )
print(len(y))
print(y[0].shape)
print(y[0])
5959
torch.Size([617])
tensor([ 2.4096e-02,  7.3348e-02, -8.4988e-03,  2.1168e-02, -5.1689e-02,
        -3.5376e-04,  4.7075e-02,  2.5451e-02,  3.5129e-02,  4.6819e-02,
         6.8763e-02, -2.2109e-01, -2.9449e-02,  5.0597e-02, -2.5865e-02,
        -9.3008e-03,  2.8629e-02, -6.2801e-02, -6.8237e-02, -5.9068e-02,
         2.1109e-02, -3.3667e-02, -3.0538e-02,  1.0535e-01,  3.0778e-02,
        -2.6921e-02, -4.7817e-03,  2.0352e-02, -6.7792e-02,  6.7208e-02,
         1.7218e-02,  1.9034e-02, -5.1180e-02, -1.4875e-02, -1.5020e-02,
        -1.2319e-02, -6.5349e-02,  4.0683e-02,  5.6421e-02, -2.2507e-02,
        -2.5330e-02,  4.9632e-02,  5.9727e-02,  9.1537e-03, -2.7953e-02,
        -4.3726e-02, -3.3593e-02,  1.8592e-02,  1.5352e-03, -9.2273e-03,
        -6.5650e-02,  3.0612e-02, -6.9992e-02, -2.7435e-02,  2.9220e-02,
        -3.2722e-02, -3.1333e-02,  1.2232e-02, -6.3038e-02,  6.2572e-04,
         2.0118e-02, -4.7327e-02, -4.6759e-02,  1.6298e-02,  2.4694e-02,
        -1.5708e-02, -1.7262e-02, -1.1750e-02, -3.4596e-03, -5.3582e-02,
        -8.0472e-02,  5.7651e-02,  3.8062e-02, -7.1649e-02,  4.5374e-02,
        -7.1557e-02,  1.8123e-02,  3.5019e-02, -8.7280e-02, -5.9952e-03,
         1.3746e-02,  1.6378e-02, -4.3599e-02,  1.0333e-02, -1.3245e-02,
        -3.2981e-02, -6.4206e-02, -2.4593e-02,  3.1208e-02, -9.5114e-03,
        -5.2171e-02, -4.6604e-02,  5.0359e-02,  4.7381e-02, -7.6541e-03,
         1.7540e-02, -7.5362e-03, -1.0370e-03, -2.0973e-02, -5.8539e-02,
        -4.2109e-03, -7.7784e-02,  4.7974e-02,  1.5605e-02,  1.1676e-02,
         5.4789e-02,  2.6982e-02,  2.8896e-02,  1.4084e-02,  3.6774e-02,
        -5.7120e-02,  8.5216e-02, -1.8359e-02,  1.8367e-02, -5.9878e-02,
         2.1155e-02,  3.2800e-03,  1.5960e-02, -1.3590e-01,  6.6871e-02,
         5.0083e-03,  4.7189e-03,  9.8846e-02, -4.0727e-02, -1.1970e-01,
         4.3001e-03, -3.3519e-02, -1.2028e-02,  5.3046e-02,  6.3472e-02,
         8.0517e-03, -1.6034e-02,  1.1180e-02, -2.7315e-02, -1.9381e-02,
        -2.0683e-02,  3.7952e-03, -7.2708e-02, -3.0257e-02,  7.5861e-03,
        -3.0704e-02, -7.9766e-03,  9.0976e-03, -6.8949e-02,  9.3395e-02,
        -5.1396e-02,  4.6734e-02, -1.2085e-03,  2.2747e-02,  4.4702e-02,
        -1.9269e-02, -3.2044e-02,  4.6390e-02,  5.6546e-02, -3.7156e-02,
         3.9877e-02,  1.0895e-02, -1.6061e-02, -6.7260e-02,  1.6562e-02,
         1.2008e-03,  3.7859e-02,  3.9005e-02,  3.4202e-02, -1.4327e-02,
        -8.2659e-02,  1.9792e-02,  1.5776e-03, -6.7330e-02,  4.3296e-02,
        -4.3103e-02, -8.2537e-03,  3.0699e-02, -1.7245e-02,  5.5340e-02,
        -7.3155e-03,  2.0148e-02, -2.6217e-02, -1.6741e-03,  7.1648e-02,
         2.5549e-02,  2.2865e-02, -2.0414e-03, -1.6362e-02,  4.6387e-03,
         2.8256e-02,  2.3293e-02, -2.2062e-02, -9.2340e-03, -1.1985e-02,
         8.0533e-04, -2.3884e-02,  5.9400e-02, -1.1038e-02,  4.8180e-03,
         3.5944e-02, -6.4729e-02, -1.1301e-02, -5.6865e-02,  1.8658e-02,
        -1.4537e-02, -2.3870e-02,  1.8639e-02,  6.1247e-02,  1.8494e-03,
         3.9511e-03, -1.1623e-02,  2.7783e-02, -9.0809e-02, -4.3361e-02,
        -4.4524e-02,  9.5100e-03,  8.1598e-03, -5.9092e-02,  2.2854e-02,
         1.0801e-02,  5.5640e-02, -7.4158e-03, -3.0120e-02, -4.7106e-02,
        -2.8703e-02,  6.2336e-02, -8.6966e-02, -8.8282e-02, -2.9747e-02,
        -2.8669e-02,  2.8053e-02, -3.0225e-02, -2.4561e-02, -1.2942e-02,
        -4.3129e-02, -5.1436e-02,  3.2625e-02, -4.6949e-02, -1.2704e-02,
         2.7554e-02,  1.4629e-02,  3.8203e-02, -8.7354e-02, -2.7942e-02,
        -4.2217e-02,  4.5440e-02, -1.1199e-02,  1.5859e-02, -5.7629e-02,
        -3.4809e-02, -5.4919e-02,  1.9037e-02,  1.0293e-02,  6.9702e-03,
        -3.0121e-02,  7.6800e-02, -1.9755e-02, -1.2176e-01, -4.2284e-02,
        -5.6440e-02, -3.4314e-02, -3.0538e-02, -5.3078e-02, -2.0438e-02,
        -2.7687e-03,  1.5685e-02,  8.3713e-03,  1.4941e-02,  2.8835e-02,
        -1.5773e-02, -2.2957e-02,  3.4821e-02,  8.3100e-03, -3.6987e-02,
         1.0159e-03,  3.6687e-02,  1.5403e-02, -7.7245e-02,  1.1903e-02,
         3.9656e-02,  5.8933e-02,  1.1769e-03, -7.7724e-03,  1.0608e-01,
        -1.3163e-02, -6.9340e-03, -2.9777e-02,  3.8290e-02,  2.5452e-02,
        -4.4490e-02, -1.2190e-01, -9.1041e-03,  8.4519e-03, -1.0265e-03,
         3.0511e-02, -4.8933e-02,  3.1984e-03,  1.9107e-02, -1.9031e-02,
        -2.7986e-02,  2.8155e-02, -3.2111e-02,  5.3439e-02, -6.6016e-02,
         2.2510e-02, -2.5893e-02,  2.5647e-02,  6.2114e-02,  3.6392e-03,
         2.1521e-02,  1.0638e-03,  4.0820e-02, -2.1784e-02,  2.3471e-02,
         6.5689e-03,  4.1211e-02,  2.2548e-02, -6.9367e-02,  7.2007e-02,
        -2.3249e-02,  9.7457e-03,  5.0128e-03,  1.9682e-03,  1.1460e-02,
        -1.1636e-03,  1.2196e-02, -8.2566e-03, -1.2993e-02,  4.0637e-02,
        -1.2862e-02, -9.3435e-03,  3.5840e-02, -1.3115e-02,  6.7564e-02,
        -1.3449e-02,  8.3304e-02,  1.3780e-02, -6.5205e-03,  2.1614e-02,
        -4.6509e-02, -2.3400e-02, -1.1252e-02, -2.1349e-03,  9.9767e-02,
         5.9413e-02, -6.5736e-03, -4.4302e-02,  1.0448e-02, -1.8580e-02,
         6.8594e-02, -1.4184e-02, -7.0092e-02, -3.2865e-02,  1.1723e-02,
         9.1901e-03, -1.5251e-02, -1.4926e-02, -3.3470e-02, -3.6489e-03,
        -3.8432e-02,  1.9594e-02,  2.5313e-02, -4.9300e-02,  6.5015e-02,
        -3.0438e-02, -9.3662e-03,  3.4233e-02, -7.8762e-02, -6.7159e-03,
         3.1354e-02, -2.0526e-02,  5.4133e-03,  1.1246e-02,  2.1658e-02,
        -1.0054e-02,  2.1285e-02,  1.1186e-01, -3.7673e-02,  2.4505e-02,
        -2.0750e-02, -3.7844e-02, -2.8911e-02,  9.4496e-03,  1.4896e-02,
        -3.0971e-02, -1.8133e-02, -4.7326e-02, -2.8264e-02,  4.9661e-02,
        -1.8136e-02, -2.1942e-02, -2.6936e-02,  2.0541e-02,  4.2219e-03,
         6.6803e-02, -6.7906e-02, -3.7795e-02, -2.2262e-02,  3.3751e-02,
         1.1071e-02,  4.1053e-02, -6.2190e-02,  4.3035e-03, -3.6697e-02,
        -2.4697e-03,  3.2390e-02, -6.7590e-02,  3.7872e-02,  3.5083e-02,
        -4.1133e-02,  1.5301e-02, -9.9107e-03, -5.2390e-02,  6.0837e-02,
         2.2806e-01, -7.3393e-02,  2.9662e-02, -6.6508e-02, -1.7553e-02,
         7.5196e-02, -3.6798e-02,  4.5125e-03,  3.4317e-02, -5.6979e-02,
         5.9627e-02,  7.8637e-02, -6.2195e-02,  4.5452e-02, -2.8716e-03,
         8.0530e-02, -1.8484e-02,  2.2444e-02, -2.6805e-02, -2.2107e-02,
        -2.1742e-02, -3.0206e-02,  7.3662e-02,  4.2316e-02,  4.5892e-02,
        -2.8320e-02,  6.5208e-02, -4.3190e-02, -5.5195e-02, -7.3266e-02,
        -1.6800e-03,  4.9327e-02,  3.7236e-02,  1.3723e-02,  2.8840e-02,
         9.9783e-03, -4.3477e-02,  2.6408e-02, -5.9908e-03,  3.1495e-02,
        -1.3816e-03,  1.8268e-02, -2.0290e-02, -7.3615e-02, -4.2263e-02,
         3.5367e-02, -4.4292e-02, -7.9611e-02,  7.9907e-02,  4.5494e-02,
        -3.2248e-02,  1.6629e-02, -7.5351e-03,  2.1802e-02, -3.3684e-02,
        -1.4436e-02,  2.1520e-02, -6.3879e-02,  1.0100e-02, -2.5601e-05,
        -1.9271e-02,  4.7454e-02, -2.4488e-02,  6.5203e-03,  5.9140e-02,
         3.7843e-02,  3.8729e-02,  3.5719e-02,  6.4549e-02,  3.9975e-02,
        -7.7090e-03, -3.8202e-02, -4.2739e-02,  6.9333e-02, -3.2327e-02,
         1.3822e-01,  7.5231e-03,  1.8590e-02, -2.8336e-02,  7.5397e-02,
        -8.1537e-03, -7.2928e-02, -6.6228e-02,  1.4838e-02, -2.3286e-02,
         4.9019e-02,  1.8467e-02, -6.7986e-02, -4.8970e-02, -2.9831e-02,
         4.9185e-02,  3.9403e-03,  6.8458e-02,  4.9250e-02, -9.2371e-02,
        -1.7414e-02,  3.7454e-02,  4.5524e-02, -4.9280e-02,  5.0603e-02,
         5.4588e-03, -5.6567e-02,  1.3000e+01,  1.7000e+01,  2.1000e+01,
         4.0000e+00,  8.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  1.9000e+01,  3.0000e+01,  4.0000e+00,
         3.1000e+01,  1.2000e+01,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  1.0000e+00,  3.0000e+00,  4.0000e+00,
         1.0000e+00,  2.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00])

Wejście do sieci neuronowej.

Odpowiadające sobie wersy i kilka losowo dobranych nieodpowiadających sobie wersów.

from random import sample
X = []
Y = []
for indeks_wersu_drugiego in range(len(x)):
    indeksy = sample(range(len(y)), 3)
    if indeks_wersu_drugiego not in indeksy:
        indeksy[0] = indeks_wersu_drugiego
    for k in indeksy:
        X.append(
            torch.cat(
                (x[indeks_wersu_drugiego], y[k])
            )
        )
        if indeks_wersu_drugiego==k:
            Y.append(1)
        else:
            Y.append(0)

print(len(X))
print(X[0])
print(len(Y))
print(Y[0])
17877
tensor([-0.0314,  0.0429, -0.0142,  ...,  0.0000,  0.0000,  0.0000])
17877
1

Modele sklearn.

from sklearn.neural_network import MLPClassifier
klasyfikator = MLPClassifier()   # activation="tanh"

klasyfikator.fit(X, Y)
MLPClassifier()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
MLPClassifier()
from sklearn.neural_network import MLPRegressor
regresor = MLPRegressor()   # activation="tanh"

regresor.fit(X, Y)
MLPRegressor()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
MLPRegressor()

Początki, końcówki i tony wierszy testowych.

print(len(testowe["in"]))
print(testowe["in"][0])
print(testowe["out"][0])

liczba_wejscia = 35

poczatki_testowe_in, konce_testowe_in, tony_testowe_in = poczatki_konce_tony_dla_zdan(testowe["in"], liczba_wejscia)
poczatki_testowe_out, konce_testowe_out, tony_testowe_out = poczatki_konce_tony_dla_zdan(testowe["out"], liczba_wejscia)

print(len(poczatki_testowe_in))
print(poczatki_testowe_in[0])
print(konce_testowe_in[0])
print(tony_testowe_in[0])
print(poczatki_testowe_out[0])
print(konce_testowe_out[0])
print(tony_testowe_out[0])
1490
林霭渐浓迷古寺
尘烟已远隐青山
1490
[ 8 22 12  7  3  9 21  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[25 12 21 34  1  2  1  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[2 3 4 2 2 3 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
[16 22 22 22 22 13 17  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[24 21  1 23 25 32 20  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0  0
  0  0  0  0  0  0  0  0  0  0  0]
[2 1 3 3 3 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]

Zanurzenia BAAI wierszy testowych.

testowe_in_lista = testowe["in"].tolist()
testowe_out_lista = testowe["out"].tolist()

print(len(testowe_in_lista))
print(testowe_in_lista[0])
print(testowe_out_lista[0])

zanurzenia_testowe_in = zanurzenia_zdan(testowe_in_lista)
zanurzenia_testowe_out = zanurzenia_zdan(testowe_out_lista)

print(zanurzenia_testowe_in.shape)
print(zanurzenia_testowe_in[0])
print(zanurzenia_testowe_out[0])
1490
林霭渐浓迷古寺
尘烟已远隐青山
torch.Size([1490, 512])
tensor([-0.0803, -0.0044, -0.0786, -0.0128,  0.0160, -0.0353,  0.0014,  0.0223,
         0.0380, -0.0011,  0.0339, -0.2229,  0.0089,  0.0073, -0.0201,  0.0610,
        -0.0445, -0.0449, -0.0055, -0.0014, -0.0261, -0.0536, -0.0592, -0.0063,
         0.0381, -0.0866,  0.0715,  0.0058, -0.0275,  0.0164,  0.0154,  0.0230,
        -0.0277,  0.0550,  0.0030,  0.0233, -0.0007,  0.0052, -0.1081, -0.0225,
         0.0060,  0.0156,  0.0174, -0.0953, -0.0445, -0.0736,  0.0245,  0.0071,
        -0.0047, -0.0154,  0.0251,  0.0371,  0.0372,  0.0557,  0.0354,  0.0049,
        -0.0377,  0.0925, -0.0479,  0.0592, -0.0294, -0.0117, -0.0099, -0.0365,
        -0.0016,  0.0338,  0.0182,  0.0122, -0.0254,  0.0362,  0.0191, -0.0080,
        -0.0086, -0.0128, -0.0514, -0.0405, -0.0103,  0.0150, -0.0543, -0.0259,
         0.0189, -0.0283,  0.0074,  0.0298,  0.0197, -0.0688,  0.0169,  0.0327,
         0.0889, -0.0453,  0.0061, -0.0504,  0.0300,  0.0526,  0.0672,  0.0366,
        -0.0350, -0.0167, -0.0342, -0.0425,  0.0959, -0.0246,  0.0599,  0.0506,
         0.0071, -0.0019, -0.0378, -0.0397, -0.0615, -0.0451,  0.0148,  0.0754,
         0.0448,  0.0079,  0.0079, -0.0323,  0.0420,  0.0261, -0.0189, -0.0006,
        -0.0555, -0.0707,  0.0748, -0.0446,  0.0205,  0.0555, -0.0474, -0.0284,
        -0.0078,  0.0699,  0.0321, -0.0116,  0.0031,  0.0383, -0.0438, -0.0464,
        -0.0280,  0.0207, -0.0657,  0.0622, -0.0342, -0.0725, -0.0604,  0.0033,
         0.0161, -0.0520,  0.0893,  0.0283,  0.0264,  0.0397,  0.0252,  0.0352,
        -0.0568, -0.0103,  0.0428, -0.0079,  0.0340,  0.0678, -0.0089, -0.0216,
         0.0258, -0.0137, -0.0024, -0.0292, -0.0136, -0.0459, -0.0651, -0.0068,
        -0.0241,  0.0677, -0.0161, -0.0421, -0.0768,  0.0280,  0.0130, -0.0030,
        -0.0158,  0.0035, -0.0023,  0.0578, -0.0297,  0.0666, -0.0248,  0.0051,
        -0.0228,  0.0210, -0.0042, -0.0209, -0.0648, -0.0295,  0.0431,  0.0029,
         0.0094, -0.0634,  0.0191,  0.0710, -0.0919, -0.0410, -0.1001,  0.0639,
        -0.0363, -0.0589,  0.0432,  0.0345, -0.0109,  0.0306, -0.0363,  0.0605,
        -0.0013, -0.0236, -0.0073,  0.0215, -0.0072, -0.0054,  0.0342,  0.1021,
        -0.0012, -0.0165, -0.0412, -0.0310,  0.0153,  0.0478,  0.0047, -0.0200,
         0.0466, -0.0296,  0.0525, -0.0086, -0.0481,  0.0070,  0.0184,  0.0016,
        -0.0953,  0.0013,  0.1069,  0.0215, -0.0232,  0.0104, -0.0105, -0.0317,
        -0.0467,  0.0119,  0.0255, -0.0118, -0.0739, -0.0692, -0.0154, -0.0009,
         0.0805,  0.0470, -0.0154, -0.0147,  0.0111, -0.0048,  0.0023, -0.0210,
         0.0198, -0.0203,  0.0076,  0.0339,  0.0109,  0.0072,  0.0375,  0.0244,
         0.0248,  0.0157, -0.0538,  0.1174, -0.0760, -0.0135,  0.0005,  0.0435,
        -0.0583,  0.0124, -0.0299,  0.0655,  0.0473,  0.0527, -0.0647,  0.0033,
        -0.0037,  0.0615, -0.0907,  0.0393, -0.0100, -0.0449, -0.1082, -0.0489,
         0.0798, -0.0139,  0.0306, -0.0693,  0.0855,  0.0304, -0.0006, -0.0617,
         0.0730, -0.0322,  0.0346,  0.0150,  0.0505, -0.0537, -0.0049, -0.0557,
        -0.0587, -0.0152,  0.0275,  0.0546, -0.0402,  0.0414,  0.0082,  0.0187,
         0.0807,  0.0023, -0.0020, -0.0127,  0.0018,  0.0367, -0.0196,  0.0370,
         0.0481,  0.0114, -0.0740, -0.0470,  0.0473, -0.0203,  0.0007, -0.0120,
         0.0184,  0.0408,  0.0107, -0.0040,  0.0381, -0.0439, -0.0488, -0.1110,
        -0.0242, -0.0229,  0.0843,  0.0632,  0.0496,  0.0440, -0.0541,  0.0328,
         0.0049,  0.0339, -0.0236,  0.0681, -0.0083,  0.0119, -0.0179, -0.0340,
         0.0168,  0.0519,  0.0075, -0.0363, -0.0171,  0.0245,  0.0448,  0.0626,
         0.0217,  0.0014, -0.0181, -0.0617,  0.0774, -0.0584, -0.0292,  0.0295,
        -0.0710, -0.0215, -0.0300, -0.0251, -0.0351, -0.0061,  0.0562, -0.0010,
         0.0253,  0.0533,  0.0115,  0.0012, -0.0555,  0.0206,  0.0161, -0.0110,
         0.0324, -0.0452, -0.0211,  0.0295,  0.0695, -0.0363,  0.0241, -0.0955,
         0.0015,  0.0520,  0.0293, -0.0128,  0.0318, -0.0065,  0.0288, -0.0172,
         0.0413, -0.0386, -0.0374, -0.0453, -0.0624, -0.0277,  0.0209,  0.0129,
         0.0102, -0.0380,  0.2030, -0.0521, -0.0468,  0.0020,  0.0141,  0.0326,
        -0.0218, -0.0495, -0.0097, -0.0504,  0.0061,  0.1062, -0.0181, -0.0192,
        -0.0529,  0.0135, -0.0018,  0.0083, -0.0582, -0.0124,  0.0261,  0.0147,
         0.0661,  0.0707,  0.0129,  0.0510,  0.0094,  0.0139, -0.0332,  0.0405,
        -0.0319,  0.0064,  0.0060, -0.0278, -0.0744, -0.0532, -0.0796, -0.0301,
         0.0271, -0.0158, -0.0048, -0.0131, -0.0572,  0.0206,  0.0347,  0.0211,
        -0.0953, -0.0821,  0.0239,  0.0533, -0.0734, -0.0091, -0.0394,  0.0181,
        -0.0606,  0.0127, -0.0173, -0.0278, -0.0144,  0.0172,  0.0281, -0.0363,
        -0.0219,  0.0014,  0.0365,  0.0259,  0.0199, -0.0597, -0.0501, -0.0056,
        -0.0631, -0.0121, -0.0168, -0.0255,  0.0857,  0.0378,  0.0286,  0.0531,
        -0.0618, -0.0443, -0.0697, -0.0020,  0.0079, -0.0031, -0.0016,  0.0083,
         0.0450, -0.0572, -0.0373,  0.0035,  0.0904, -0.0523,  0.0262,  0.0277,
        -0.0096, -0.0129,  0.1094,  0.0445, -0.0495, -0.0920,  0.0300, -0.0253])
tensor([-4.2662e-02, -3.1708e-02, -5.6512e-02,  4.0084e-02, -3.8748e-02,
         9.6097e-03,  4.7008e-02,  7.5668e-02, -1.3552e-02,  1.1450e-02,
         7.6131e-02, -2.4726e-01, -2.2119e-02, -3.7263e-02, -1.6699e-03,
        -2.8011e-03, -7.1475e-03,  1.0944e-02,  1.0504e-02, -1.9638e-02,
         3.1485e-02, -6.9084e-03, -2.0084e-02, -2.8139e-02, -2.1698e-02,
        -4.5725e-02,  6.2876e-02, -2.4744e-02, -5.4077e-02,  1.1777e-02,
         3.2030e-02, -6.9665e-03,  1.5294e-02, -8.0923e-02,  2.8682e-02,
         6.8040e-03, -3.3103e-03,  2.6560e-02, -6.0841e-02,  2.1151e-03,
        -3.2793e-02,  1.4114e-02, -3.0558e-02, -6.7261e-02, -7.8080e-02,
        -5.6005e-02, -2.4466e-02,  3.1131e-02,  1.2249e-02, -2.6226e-02,
         5.6157e-03, -1.4219e-02,  6.1117e-02,  8.7854e-02,  3.2294e-02,
        -7.5786e-02, -3.8687e-03,  5.8290e-02, -1.8115e-02,  9.2157e-03,
        -1.9775e-02, -1.4765e-02, -8.1864e-02, -5.2147e-02, -3.3906e-02,
         4.7297e-02,  1.7286e-02, -3.8963e-02, -7.2971e-03,  4.6875e-02,
        -7.3317e-02,  2.3185e-02, -1.0216e-02, -4.6695e-02, -1.0624e-02,
        -8.6403e-02,  1.0679e-02,  3.8384e-02, -2.0390e-02, -4.8561e-02,
         1.7503e-02, -2.4822e-02, -1.0118e-01, -1.1536e-02,  1.2367e-02,
        -2.8817e-02,  3.8065e-02, -4.8299e-03,  3.6943e-02, -8.7723e-03,
        -3.3175e-02, -2.9473e-02, -1.4506e-02,  4.3669e-02, -1.3243e-02,
         9.1637e-03, -5.2170e-02,  6.5091e-02, -6.0895e-03, -8.3091e-02,
         4.1754e-03, -8.1591e-03,  1.3681e-02,  4.9336e-02,  5.1179e-02,
         3.0331e-02,  8.2378e-03,  3.4116e-03,  8.7714e-03,  2.6082e-02,
        -5.9453e-02,  5.4098e-02,  1.5604e-02,  6.1685e-02, -6.5803e-02,
        -4.3957e-03,  2.8998e-02,  7.4083e-02, -7.5646e-02,  9.9494e-03,
        -3.0923e-02, -4.0035e-03,  2.6690e-02, -1.7936e-02,  5.9101e-02,
        -2.6199e-02, -8.9598e-03, -3.3770e-02,  2.5304e-02, -2.2467e-02,
        -8.5894e-03, -6.5153e-02, -1.8516e-02, -3.2236e-02, -6.9214e-02,
        -6.8273e-02, -6.8416e-03,  3.0168e-02,  2.6452e-02,  2.7235e-02,
        -1.4605e-02,  4.0644e-02, -3.1382e-02, -6.6872e-02,  3.6762e-02,
         3.3090e-02,  4.1689e-02,  6.2443e-02,  1.1023e-01,  4.5009e-02,
        -5.9660e-02, -1.9799e-02, -2.1019e-02, -1.4585e-02, -4.9002e-02,
         6.7098e-03,  4.8637e-02,  2.6957e-02, -1.2555e-01, -4.4153e-02,
        -3.7129e-02, -3.5815e-03,  3.7462e-03, -3.9305e-02,  6.6185e-03,
        -5.7863e-03, -4.5151e-02,  5.8374e-02, -7.9883e-02,  5.8874e-02,
         2.9350e-02, -3.0508e-02, -5.7538e-02,  1.4630e-02,  1.7199e-02,
        -4.2483e-02, -6.1783e-02,  1.8635e-02, -5.8922e-03,  3.2036e-02,
         1.1720e-02,  2.9453e-02, -4.1426e-02, -1.3919e-04,  7.4231e-02,
         4.0965e-02,  5.2020e-02, -8.5824e-03, -5.0865e-02,  3.3741e-02,
         3.1936e-02, -2.8163e-02,  2.5174e-02, -6.6295e-03,  5.4489e-03,
         4.8801e-02, -3.4366e-02,  1.5345e-02, -4.9533e-02,  4.1175e-02,
         1.0052e-02, -8.0770e-03,  1.6990e-03,  2.7498e-02, -2.0195e-02,
         7.3058e-02, -1.3125e-02,  5.5075e-02,  1.0222e-02, -6.1044e-02,
         1.6483e-02,  1.7518e-02, -3.5582e-02,  6.5297e-03,  4.8142e-02,
        -9.1526e-03, -2.5292e-02, -9.2555e-02, -2.7960e-02, -6.7617e-02,
        -2.7919e-02,  2.5797e-02, -3.3792e-02, -1.7559e-02,  5.0696e-03,
         1.9502e-02,  9.8111e-02, -3.5467e-02, -7.8175e-03, -1.3069e-02,
         1.1869e-02, -1.2426e-02, -3.8581e-02,  1.3946e-02,  8.5697e-02,
         5.9456e-03, -7.0115e-03,  3.2226e-03,  3.0390e-03, -5.4671e-03,
        -4.7213e-02,  3.5392e-02, -1.5239e-03,  7.3812e-02, -1.7222e-02,
        -1.2772e-03,  2.1770e-02, -4.6258e-02,  3.5956e-02,  2.6292e-02,
         5.5864e-03,  3.9837e-03,  1.4386e-02, -3.0420e-03,  1.6326e-02,
        -4.3116e-02,  6.0202e-04, -6.6577e-02,  4.4794e-02, -6.0236e-02,
         3.3927e-03,  9.1377e-02,  6.7364e-02, -1.6208e-02, -4.1574e-02,
         3.2682e-02,  7.2001e-03,  2.8854e-02, -2.5179e-02, -4.5137e-02,
         5.7746e-03,  3.8036e-02, -1.8377e-02,  5.4035e-02,  1.5138e-02,
         4.5383e-02,  1.1084e-02, -2.0204e-02, -1.0160e-02,  1.8784e-02,
         9.1744e-03,  6.3865e-02, -6.9441e-02, -3.0443e-02,  1.0225e-02,
         9.9409e-03, -1.1863e-01, -2.9981e-02,  5.5444e-02,  3.4524e-03,
         4.7667e-02, -4.6273e-02,  5.8857e-02,  6.7802e-02, -1.0152e-03,
        -2.3958e-02,  2.7715e-02, -1.5837e-02,  3.0255e-02,  2.7764e-02,
        -8.5772e-03, -5.2482e-03, -3.4041e-02, -1.7598e-02, -2.3632e-02,
        -1.9890e-02,  9.4271e-03,  5.0524e-02, -3.7680e-02,  8.0661e-02,
         3.0959e-02,  2.4361e-02,  2.6617e-02, -1.4361e-02,  1.9438e-02,
        -3.3619e-02,  3.0872e-02,  6.3582e-03, -1.4521e-03, -5.6649e-03,
         6.3105e-03,  4.5886e-02, -9.0654e-02, -3.0558e-02,  1.0905e-01,
        -5.4971e-02,  3.0258e-02, -4.5004e-02,  4.7370e-02,  4.0472e-02,
        -1.0167e-03, -2.9013e-02,  6.6706e-03,  1.3320e-02,  2.7442e-02,
        -1.4620e-02, -3.8887e-02, -2.3700e-02,  6.3660e-02,  5.3285e-02,
         9.2250e-03,  1.3351e-02, -6.2697e-02,  4.8761e-02,  6.6263e-02,
        -5.4074e-03, -7.5083e-02,  3.8809e-03, -3.1598e-02, -2.8409e-03,
        -2.3826e-02,  1.9625e-02,  4.6371e-03, -4.6053e-02, -1.1388e-02,
        -9.7393e-02,  2.5569e-02,  7.0440e-04, -1.6392e-02, -1.7159e-02,
        -1.8155e-02,  1.1288e-02, -6.0118e-02, -5.2117e-02, -2.0518e-03,
         1.2293e-02, -4.8702e-02,  5.0681e-02, -7.8261e-03, -6.0259e-02,
         2.5790e-02, -2.5972e-02,  1.4556e-03,  1.5539e-02,  2.5964e-02,
         4.1747e-02, -4.6959e-02,  4.5961e-02,  1.2592e-02,  1.6024e-02,
        -7.0781e-02,  2.9707e-02, -1.9970e-02, -2.4453e-02,  1.0534e-01,
        -3.5986e-02, -1.1281e-02,  3.6299e-02,  7.7332e-03, -8.4582e-03,
         1.0414e-01, -7.5003e-02,  1.8392e-02,  2.5007e-02,  5.3709e-05,
         1.1661e-02,  5.3652e-02, -4.0520e-03, -2.6330e-02, -1.1297e-01,
         2.0622e-02, -1.1506e-02, -3.9126e-02, -3.2559e-02,  1.6903e-02,
        -7.6179e-02,  1.0538e-01, -3.3585e-02,  2.9592e-02, -9.8638e-02,
         2.3429e-01, -1.2447e-02,  4.3283e-02, -5.9157e-02,  9.1898e-02,
         2.2950e-02, -5.2114e-02, -2.2760e-02, -1.0465e-02, -1.1089e-01,
         2.2758e-02,  3.4435e-02,  2.1491e-02, -5.0505e-02, -1.5447e-02,
        -1.0494e-02,  1.6680e-02,  2.6068e-02, -9.1764e-02, -4.0106e-03,
         4.8605e-02, -2.4436e-02,  1.7709e-02,  1.9984e-02,  4.5895e-02,
         7.1538e-02,  6.0457e-02, -5.3648e-02, -2.2555e-02, -7.4055e-02,
        -3.8501e-02, -5.9608e-03,  2.8740e-02, -2.4337e-02,  1.9937e-02,
         4.6275e-02, -6.1770e-02,  2.4231e-02, -5.1672e-02,  3.2617e-02,
         4.1688e-02, -5.5629e-02,  4.5233e-02,  2.8302e-02,  7.1679e-02,
         3.3194e-02, -3.8711e-02,  2.4011e-02,  7.0962e-02,  2.2012e-02,
        -2.7202e-02,  3.5133e-02,  8.8852e-03,  1.1321e-02,  5.4091e-03,
         4.8605e-04,  9.4150e-03,  1.7323e-02, -2.4181e-02, -3.8130e-02,
        -3.6088e-02,  1.1920e-03, -4.5289e-02, -2.7036e-02,  4.3711e-02,
        -4.3235e-02, -9.8394e-03, -1.6918e-02, -8.2358e-02,  1.1723e-02,
        -2.4647e-02, -1.4637e-02,  2.5583e-02, -6.2279e-02,  5.4639e-02,
         5.6699e-02,  3.7210e-02,  3.1042e-02, -6.2109e-02, -8.3125e-02,
        -5.3028e-02,  2.0527e-02, -2.1677e-02, -7.7717e-02, -3.5835e-02,
         1.5833e-02,  3.2097e-02, -9.4851e-02,  2.1548e-02, -1.4085e-02,
         1.0706e-02,  1.0919e-03, -2.1583e-02,  7.1433e-03, -5.3452e-02,
        -3.6993e-02,  7.3571e-02, -2.1420e-02, -2.2431e-03, -4.2342e-02,
         6.1201e-02,  2.1760e-02])

Tensory - reprezentacje pierwszych wersów wierszy testowych.

x_test = []
for indeks_wersu_pierwszego in range(len(zanurzenia_testowe_in)):
    x_test.append(torch.cat(
        (
            zanurzenia_testowe_in[indeks_wersu_pierwszego],
            torch.from_numpy(poczatki_testowe_in[indeks_wersu_pierwszego]),
            torch.from_numpy(konce_testowe_in[indeks_wersu_pierwszego]),
            torch.from_numpy(tony_testowe_in[indeks_wersu_pierwszego])
        )
    ))
print(len(x_test))
print(x_test[0].shape)
print(x_test[0])
1490
torch.Size([617])
tensor([-8.0259e-02, -4.3918e-03, -7.8638e-02, -1.2816e-02,  1.5959e-02,
        -3.5314e-02,  1.3728e-03,  2.2271e-02,  3.7977e-02, -1.1226e-03,
         3.3940e-02, -2.2293e-01,  8.9278e-03,  7.3494e-03, -2.0129e-02,
         6.0954e-02, -4.4464e-02, -4.4885e-02, -5.4901e-03, -1.3792e-03,
        -2.6079e-02, -5.3571e-02, -5.9173e-02, -6.3475e-03,  3.8071e-02,
        -8.6587e-02,  7.1489e-02,  5.7802e-03, -2.7479e-02,  1.6377e-02,
         1.5447e-02,  2.3025e-02, -2.7736e-02,  5.4993e-02,  3.0404e-03,
         2.3296e-02, -7.4090e-04,  5.2373e-03, -1.0805e-01, -2.2483e-02,
         5.9799e-03,  1.5623e-02,  1.7414e-02, -9.5345e-02, -4.4511e-02,
        -7.3634e-02,  2.4519e-02,  7.1082e-03, -4.7360e-03, -1.5421e-02,
         2.5125e-02,  3.7092e-02,  3.7218e-02,  5.5716e-02,  3.5407e-02,
         4.8798e-03, -3.7684e-02,  9.2484e-02, -4.7923e-02,  5.9173e-02,
        -2.9434e-02, -1.1721e-02, -9.8530e-03, -3.6479e-02, -1.6005e-03,
         3.3847e-02,  1.8198e-02,  1.2230e-02, -2.5434e-02,  3.6206e-02,
         1.9129e-02, -7.9656e-03, -8.5516e-03, -1.2792e-02, -5.1442e-02,
        -4.0518e-02, -1.0302e-02,  1.5046e-02, -5.4264e-02, -2.5881e-02,
         1.8931e-02, -2.8339e-02,  7.3752e-03,  2.9828e-02,  1.9680e-02,
        -6.8792e-02,  1.6888e-02,  3.2706e-02,  8.8921e-02, -4.5339e-02,
         6.0546e-03, -5.0379e-02,  3.0022e-02,  5.2648e-02,  6.7230e-02,
         3.6637e-02, -3.5036e-02, -1.6691e-02, -3.4201e-02, -4.2494e-02,
         9.5922e-02, -2.4616e-02,  5.9890e-02,  5.0630e-02,  7.1451e-03,
        -1.8633e-03, -3.7804e-02, -3.9666e-02, -6.1533e-02, -4.5068e-02,
         1.4756e-02,  7.5407e-02,  4.4799e-02,  7.8929e-03,  7.9047e-03,
        -3.2292e-02,  4.2006e-02,  2.6062e-02, -1.8948e-02, -6.3121e-04,
        -5.5451e-02, -7.0686e-02,  7.4786e-02, -4.4551e-02,  2.0545e-02,
         5.5521e-02, -4.7396e-02, -2.8430e-02, -7.7508e-03,  6.9895e-02,
         3.2122e-02, -1.1592e-02,  3.1207e-03,  3.8250e-02, -4.3830e-02,
        -4.6368e-02, -2.7975e-02,  2.0730e-02, -6.5732e-02,  6.2236e-02,
        -3.4223e-02, -7.2456e-02, -6.0369e-02,  3.2526e-03,  1.6138e-02,
        -5.2011e-02,  8.9266e-02,  2.8281e-02,  2.6365e-02,  3.9720e-02,
         2.5165e-02,  3.5209e-02, -5.6761e-02, -1.0289e-02,  4.2798e-02,
        -7.8977e-03,  3.4015e-02,  6.7800e-02, -8.9471e-03, -2.1570e-02,
         2.5793e-02, -1.3697e-02, -2.4169e-03, -2.9232e-02, -1.3631e-02,
        -4.5947e-02, -6.5127e-02, -6.7566e-03, -2.4052e-02,  6.7700e-02,
        -1.6108e-02, -4.2147e-02, -7.6850e-02,  2.7986e-02,  1.2986e-02,
        -2.9518e-03, -1.5767e-02,  3.5351e-03, -2.3380e-03,  5.7797e-02,
        -2.9714e-02,  6.6649e-02, -2.4828e-02,  5.1280e-03, -2.2814e-02,
         2.1011e-02, -4.1826e-03, -2.0886e-02, -6.4805e-02, -2.9477e-02,
         4.3137e-02,  2.8640e-03,  9.4253e-03, -6.3438e-02,  1.9139e-02,
         7.1025e-02, -9.1945e-02, -4.1015e-02, -1.0007e-01,  6.3948e-02,
        -3.6265e-02, -5.8861e-02,  4.3239e-02,  3.4465e-02, -1.0871e-02,
         3.0624e-02, -3.6270e-02,  6.0533e-02, -1.2985e-03, -2.3633e-02,
        -7.3279e-03,  2.1527e-02, -7.2312e-03, -5.3978e-03,  3.4182e-02,
         1.0214e-01, -1.1936e-03, -1.6545e-02, -4.1180e-02, -3.1017e-02,
         1.5271e-02,  4.7796e-02,  4.7201e-03, -1.9967e-02,  4.6641e-02,
        -2.9648e-02,  5.2540e-02, -8.6410e-03, -4.8120e-02,  6.9946e-03,
         1.8410e-02,  1.5676e-03, -9.5262e-02,  1.3219e-03,  1.0692e-01,
         2.1499e-02, -2.3225e-02,  1.0442e-02, -1.0519e-02, -3.1683e-02,
        -4.6675e-02,  1.1863e-02,  2.5517e-02, -1.1842e-02, -7.3877e-02,
        -6.9165e-02, -1.5360e-02, -9.3084e-04,  8.0512e-02,  4.7033e-02,
        -1.5356e-02, -1.4746e-02,  1.1060e-02, -4.7805e-03,  2.3480e-03,
        -2.0961e-02,  1.9839e-02, -2.0290e-02,  7.5530e-03,  3.3894e-02,
         1.0894e-02,  7.2060e-03,  3.7515e-02,  2.4427e-02,  2.4834e-02,
         1.5667e-02, -5.3774e-02,  1.1743e-01, -7.5955e-02, -1.3473e-02,
         4.7957e-04,  4.3490e-02, -5.8301e-02,  1.2368e-02, -2.9888e-02,
         6.5488e-02,  4.7251e-02,  5.2683e-02, -6.4732e-02,  3.2558e-03,
        -3.6795e-03,  6.1479e-02, -9.0703e-02,  3.9305e-02, -1.0025e-02,
        -4.4901e-02, -1.0818e-01, -4.8924e-02,  7.9814e-02, -1.3939e-02,
         3.0613e-02, -6.9328e-02,  8.5495e-02,  3.0384e-02, -6.3244e-04,
        -6.1677e-02,  7.2960e-02, -3.2197e-02,  3.4590e-02,  1.5042e-02,
         5.0482e-02, -5.3699e-02, -4.8971e-03, -5.5674e-02, -5.8709e-02,
        -1.5166e-02,  2.7467e-02,  5.4554e-02, -4.0191e-02,  4.1437e-02,
         8.1562e-03,  1.8709e-02,  8.0741e-02,  2.2606e-03, -2.0263e-03,
        -1.2661e-02,  1.7509e-03,  3.6689e-02, -1.9632e-02,  3.6983e-02,
         4.8102e-02,  1.1438e-02, -7.4046e-02, -4.6997e-02,  4.7330e-02,
        -2.0255e-02,  6.5378e-04, -1.1966e-02,  1.8390e-02,  4.0788e-02,
         1.0731e-02, -4.0035e-03,  3.8093e-02, -4.3862e-02, -4.8802e-02,
        -1.1096e-01, -2.4208e-02, -2.2915e-02,  8.4262e-02,  6.3164e-02,
         4.9571e-02,  4.3982e-02, -5.4121e-02,  3.2768e-02,  4.9195e-03,
         3.3878e-02, -2.3583e-02,  6.8083e-02, -8.3218e-03,  1.1866e-02,
        -1.7878e-02, -3.4026e-02,  1.6795e-02,  5.1865e-02,  7.5081e-03,
        -3.6319e-02, -1.7098e-02,  2.4454e-02,  4.4789e-02,  6.2583e-02,
         2.1719e-02,  1.4467e-03, -1.8073e-02, -6.1717e-02,  7.7397e-02,
        -5.8391e-02, -2.9188e-02,  2.9497e-02, -7.1020e-02, -2.1456e-02,
        -2.9981e-02, -2.5142e-02, -3.5100e-02, -6.0593e-03,  5.6244e-02,
        -1.0404e-03,  2.5305e-02,  5.3293e-02,  1.1524e-02,  1.1573e-03,
        -5.5486e-02,  2.0558e-02,  1.6097e-02, -1.1003e-02,  3.2383e-02,
        -4.5159e-02, -2.1123e-02,  2.9454e-02,  6.9495e-02, -3.6259e-02,
         2.4084e-02, -9.5467e-02,  1.4566e-03,  5.1955e-02,  2.9325e-02,
        -1.2771e-02,  3.1845e-02, -6.5127e-03,  2.8762e-02, -1.7184e-02,
         4.1251e-02, -3.8555e-02, -3.7427e-02, -4.5343e-02, -6.2431e-02,
        -2.7663e-02,  2.0913e-02,  1.2883e-02,  1.0214e-02, -3.8013e-02,
         2.0302e-01, -5.2109e-02, -4.6819e-02,  2.0404e-03,  1.4092e-02,
         3.2596e-02, -2.1809e-02, -4.9518e-02, -9.6773e-03, -5.0380e-02,
         6.1177e-03,  1.0617e-01, -1.8102e-02, -1.9206e-02, -5.2882e-02,
         1.3522e-02, -1.7683e-03,  8.2799e-03, -5.8206e-02, -1.2375e-02,
         2.6137e-02,  1.4741e-02,  6.6093e-02,  7.0706e-02,  1.2897e-02,
         5.1019e-02,  9.4399e-03,  1.3937e-02, -3.3179e-02,  4.0461e-02,
        -3.1880e-02,  6.4492e-03,  6.0355e-03, -2.7819e-02, -7.4351e-02,
        -5.3170e-02, -7.9592e-02, -3.0133e-02,  2.7070e-02, -1.5751e-02,
        -4.8386e-03, -1.3120e-02, -5.7183e-02,  2.0625e-02,  3.4714e-02,
         2.1150e-02, -9.5332e-02, -8.2080e-02,  2.3911e-02,  5.3298e-02,
        -7.3405e-02, -9.1094e-03, -3.9370e-02,  1.8139e-02, -6.0592e-02,
         1.2654e-02, -1.7347e-02, -2.7792e-02, -1.4431e-02,  1.7151e-02,
         2.8065e-02, -3.6322e-02, -2.1867e-02,  1.4305e-03,  3.6528e-02,
         2.5914e-02,  1.9917e-02, -5.9702e-02, -5.0098e-02, -5.6331e-03,
        -6.3057e-02, -1.2070e-02, -1.6831e-02, -2.5532e-02,  8.5733e-02,
         3.7801e-02,  2.8635e-02,  5.3134e-02, -6.1810e-02, -4.4343e-02,
        -6.9733e-02, -2.0240e-03,  7.8664e-03, -3.0743e-03, -1.5851e-03,
         8.3178e-03,  4.4983e-02, -5.7173e-02, -3.7285e-02,  3.5458e-03,
         9.0360e-02, -5.2260e-02,  2.6192e-02,  2.7662e-02, -9.6391e-03,
        -1.2950e-02,  1.0944e-01,  4.4497e-02, -4.9483e-02, -9.2012e-02,
         2.9951e-02, -2.5255e-02,  8.0000e+00,  2.2000e+01,  1.2000e+01,
         7.0000e+00,  3.0000e+00,  9.0000e+00,  2.1000e+01,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  2.5000e+01,  1.2000e+01,  2.1000e+01,
         3.4000e+01,  1.0000e+00,  2.0000e+00,  1.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  2.0000e+00,  3.0000e+00,  4.0000e+00,
         2.0000e+00,  2.0000e+00,  3.0000e+00,  4.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00])

Tensory - reprezentacje drugich wersów wierszy testowych.

y_test = []
for indeks_wersu_pierwszego in range(len(zanurzenia_testowe_out)):
    y_test.append(
        torch.cat(
        (
            zanurzenia_testowe_out[indeks_wersu_pierwszego],
            torch.from_numpy(poczatki_testowe_out[indeks_wersu_pierwszego]),
            torch.from_numpy(konce_testowe_out[indeks_wersu_pierwszego]),
            torch.from_numpy(tony_testowe_out[indeks_wersu_pierwszego])
        )
    )
    )
print(len(y_test))
print(y_test[0].shape)
print(y_test[0])
1490
torch.Size([617])
tensor([-4.2662e-02, -3.1708e-02, -5.6512e-02,  4.0084e-02, -3.8748e-02,
         9.6097e-03,  4.7008e-02,  7.5668e-02, -1.3552e-02,  1.1450e-02,
         7.6131e-02, -2.4726e-01, -2.2119e-02, -3.7263e-02, -1.6699e-03,
        -2.8011e-03, -7.1475e-03,  1.0944e-02,  1.0504e-02, -1.9638e-02,
         3.1485e-02, -6.9084e-03, -2.0084e-02, -2.8139e-02, -2.1698e-02,
        -4.5725e-02,  6.2876e-02, -2.4744e-02, -5.4077e-02,  1.1777e-02,
         3.2030e-02, -6.9665e-03,  1.5294e-02, -8.0923e-02,  2.8682e-02,
         6.8040e-03, -3.3103e-03,  2.6560e-02, -6.0841e-02,  2.1151e-03,
        -3.2793e-02,  1.4114e-02, -3.0558e-02, -6.7261e-02, -7.8080e-02,
        -5.6005e-02, -2.4466e-02,  3.1131e-02,  1.2249e-02, -2.6226e-02,
         5.6157e-03, -1.4219e-02,  6.1117e-02,  8.7854e-02,  3.2294e-02,
        -7.5786e-02, -3.8687e-03,  5.8290e-02, -1.8115e-02,  9.2157e-03,
        -1.9775e-02, -1.4765e-02, -8.1864e-02, -5.2147e-02, -3.3906e-02,
         4.7297e-02,  1.7286e-02, -3.8963e-02, -7.2971e-03,  4.6875e-02,
        -7.3317e-02,  2.3185e-02, -1.0216e-02, -4.6695e-02, -1.0624e-02,
        -8.6403e-02,  1.0679e-02,  3.8384e-02, -2.0390e-02, -4.8561e-02,
         1.7503e-02, -2.4822e-02, -1.0118e-01, -1.1536e-02,  1.2367e-02,
        -2.8817e-02,  3.8065e-02, -4.8299e-03,  3.6943e-02, -8.7723e-03,
        -3.3175e-02, -2.9473e-02, -1.4506e-02,  4.3669e-02, -1.3243e-02,
         9.1637e-03, -5.2170e-02,  6.5091e-02, -6.0895e-03, -8.3091e-02,
         4.1754e-03, -8.1591e-03,  1.3681e-02,  4.9336e-02,  5.1179e-02,
         3.0331e-02,  8.2378e-03,  3.4116e-03,  8.7714e-03,  2.6082e-02,
        -5.9453e-02,  5.4098e-02,  1.5604e-02,  6.1685e-02, -6.5803e-02,
        -4.3957e-03,  2.8998e-02,  7.4083e-02, -7.5646e-02,  9.9494e-03,
        -3.0923e-02, -4.0035e-03,  2.6690e-02, -1.7936e-02,  5.9101e-02,
        -2.6199e-02, -8.9598e-03, -3.3770e-02,  2.5304e-02, -2.2467e-02,
        -8.5894e-03, -6.5153e-02, -1.8516e-02, -3.2236e-02, -6.9214e-02,
        -6.8273e-02, -6.8416e-03,  3.0168e-02,  2.6452e-02,  2.7235e-02,
        -1.4605e-02,  4.0644e-02, -3.1382e-02, -6.6872e-02,  3.6762e-02,
         3.3090e-02,  4.1689e-02,  6.2443e-02,  1.1023e-01,  4.5009e-02,
        -5.9660e-02, -1.9799e-02, -2.1019e-02, -1.4585e-02, -4.9002e-02,
         6.7098e-03,  4.8637e-02,  2.6957e-02, -1.2555e-01, -4.4153e-02,
        -3.7129e-02, -3.5815e-03,  3.7462e-03, -3.9305e-02,  6.6185e-03,
        -5.7863e-03, -4.5151e-02,  5.8374e-02, -7.9883e-02,  5.8874e-02,
         2.9350e-02, -3.0508e-02, -5.7538e-02,  1.4630e-02,  1.7199e-02,
        -4.2483e-02, -6.1783e-02,  1.8635e-02, -5.8922e-03,  3.2036e-02,
         1.1720e-02,  2.9453e-02, -4.1426e-02, -1.3919e-04,  7.4231e-02,
         4.0965e-02,  5.2020e-02, -8.5824e-03, -5.0865e-02,  3.3741e-02,
         3.1936e-02, -2.8163e-02,  2.5174e-02, -6.6295e-03,  5.4489e-03,
         4.8801e-02, -3.4366e-02,  1.5345e-02, -4.9533e-02,  4.1175e-02,
         1.0052e-02, -8.0770e-03,  1.6990e-03,  2.7498e-02, -2.0195e-02,
         7.3058e-02, -1.3125e-02,  5.5075e-02,  1.0222e-02, -6.1044e-02,
         1.6483e-02,  1.7518e-02, -3.5582e-02,  6.5297e-03,  4.8142e-02,
        -9.1526e-03, -2.5292e-02, -9.2555e-02, -2.7960e-02, -6.7617e-02,
        -2.7919e-02,  2.5797e-02, -3.3792e-02, -1.7559e-02,  5.0696e-03,
         1.9502e-02,  9.8111e-02, -3.5467e-02, -7.8175e-03, -1.3069e-02,
         1.1869e-02, -1.2426e-02, -3.8581e-02,  1.3946e-02,  8.5697e-02,
         5.9456e-03, -7.0115e-03,  3.2226e-03,  3.0390e-03, -5.4671e-03,
        -4.7213e-02,  3.5392e-02, -1.5239e-03,  7.3812e-02, -1.7222e-02,
        -1.2772e-03,  2.1770e-02, -4.6258e-02,  3.5956e-02,  2.6292e-02,
         5.5864e-03,  3.9837e-03,  1.4386e-02, -3.0420e-03,  1.6326e-02,
        -4.3116e-02,  6.0202e-04, -6.6577e-02,  4.4794e-02, -6.0236e-02,
         3.3927e-03,  9.1377e-02,  6.7364e-02, -1.6208e-02, -4.1574e-02,
         3.2682e-02,  7.2001e-03,  2.8854e-02, -2.5179e-02, -4.5137e-02,
         5.7746e-03,  3.8036e-02, -1.8377e-02,  5.4035e-02,  1.5138e-02,
         4.5383e-02,  1.1084e-02, -2.0204e-02, -1.0160e-02,  1.8784e-02,
         9.1744e-03,  6.3865e-02, -6.9441e-02, -3.0443e-02,  1.0225e-02,
         9.9409e-03, -1.1863e-01, -2.9981e-02,  5.5444e-02,  3.4524e-03,
         4.7667e-02, -4.6273e-02,  5.8857e-02,  6.7802e-02, -1.0152e-03,
        -2.3958e-02,  2.7715e-02, -1.5837e-02,  3.0255e-02,  2.7764e-02,
        -8.5772e-03, -5.2482e-03, -3.4041e-02, -1.7598e-02, -2.3632e-02,
        -1.9890e-02,  9.4271e-03,  5.0524e-02, -3.7680e-02,  8.0661e-02,
         3.0959e-02,  2.4361e-02,  2.6617e-02, -1.4361e-02,  1.9438e-02,
        -3.3619e-02,  3.0872e-02,  6.3582e-03, -1.4521e-03, -5.6649e-03,
         6.3105e-03,  4.5886e-02, -9.0654e-02, -3.0558e-02,  1.0905e-01,
        -5.4971e-02,  3.0258e-02, -4.5004e-02,  4.7370e-02,  4.0472e-02,
        -1.0167e-03, -2.9013e-02,  6.6706e-03,  1.3320e-02,  2.7442e-02,
        -1.4620e-02, -3.8887e-02, -2.3700e-02,  6.3660e-02,  5.3285e-02,
         9.2250e-03,  1.3351e-02, -6.2697e-02,  4.8761e-02,  6.6263e-02,
        -5.4074e-03, -7.5083e-02,  3.8809e-03, -3.1598e-02, -2.8409e-03,
        -2.3826e-02,  1.9625e-02,  4.6371e-03, -4.6053e-02, -1.1388e-02,
        -9.7393e-02,  2.5569e-02,  7.0440e-04, -1.6392e-02, -1.7159e-02,
        -1.8155e-02,  1.1288e-02, -6.0118e-02, -5.2117e-02, -2.0518e-03,
         1.2293e-02, -4.8702e-02,  5.0681e-02, -7.8261e-03, -6.0259e-02,
         2.5790e-02, -2.5972e-02,  1.4556e-03,  1.5539e-02,  2.5964e-02,
         4.1747e-02, -4.6959e-02,  4.5961e-02,  1.2592e-02,  1.6024e-02,
        -7.0781e-02,  2.9707e-02, -1.9970e-02, -2.4453e-02,  1.0534e-01,
        -3.5986e-02, -1.1281e-02,  3.6299e-02,  7.7332e-03, -8.4582e-03,
         1.0414e-01, -7.5003e-02,  1.8392e-02,  2.5007e-02,  5.3709e-05,
         1.1661e-02,  5.3652e-02, -4.0520e-03, -2.6330e-02, -1.1297e-01,
         2.0622e-02, -1.1506e-02, -3.9126e-02, -3.2559e-02,  1.6903e-02,
        -7.6179e-02,  1.0538e-01, -3.3585e-02,  2.9592e-02, -9.8638e-02,
         2.3429e-01, -1.2447e-02,  4.3283e-02, -5.9157e-02,  9.1898e-02,
         2.2950e-02, -5.2114e-02, -2.2760e-02, -1.0465e-02, -1.1089e-01,
         2.2758e-02,  3.4435e-02,  2.1491e-02, -5.0505e-02, -1.5447e-02,
        -1.0494e-02,  1.6680e-02,  2.6068e-02, -9.1764e-02, -4.0106e-03,
         4.8605e-02, -2.4436e-02,  1.7709e-02,  1.9984e-02,  4.5895e-02,
         7.1538e-02,  6.0457e-02, -5.3648e-02, -2.2555e-02, -7.4055e-02,
        -3.8501e-02, -5.9608e-03,  2.8740e-02, -2.4337e-02,  1.9937e-02,
         4.6275e-02, -6.1770e-02,  2.4231e-02, -5.1672e-02,  3.2617e-02,
         4.1688e-02, -5.5629e-02,  4.5233e-02,  2.8302e-02,  7.1679e-02,
         3.3194e-02, -3.8711e-02,  2.4011e-02,  7.0962e-02,  2.2012e-02,
        -2.7202e-02,  3.5133e-02,  8.8852e-03,  1.1321e-02,  5.4091e-03,
         4.8605e-04,  9.4150e-03,  1.7323e-02, -2.4181e-02, -3.8130e-02,
        -3.6088e-02,  1.1920e-03, -4.5289e-02, -2.7036e-02,  4.3711e-02,
        -4.3235e-02, -9.8394e-03, -1.6918e-02, -8.2358e-02,  1.1723e-02,
        -2.4647e-02, -1.4637e-02,  2.5583e-02, -6.2279e-02,  5.4639e-02,
         5.6699e-02,  3.7210e-02,  3.1042e-02, -6.2109e-02, -8.3125e-02,
        -5.3028e-02,  2.0527e-02, -2.1677e-02, -7.7717e-02, -3.5835e-02,
         1.5833e-02,  3.2097e-02, -9.4851e-02,  2.1548e-02, -1.4085e-02,
         1.0706e-02,  1.0919e-03, -2.1583e-02,  7.1433e-03, -5.3452e-02,
        -3.6993e-02,  7.3571e-02, -2.1420e-02, -2.2431e-03, -4.2342e-02,
         6.1201e-02,  2.1760e-02,  1.6000e+01,  2.2000e+01,  2.2000e+01,
         2.2000e+01,  2.2000e+01,  1.3000e+01,  1.7000e+01,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  2.4000e+01,  2.1000e+01,  1.0000e+00,
         2.3000e+01,  2.5000e+01,  3.2000e+01,  2.0000e+01,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  2.0000e+00,  1.0000e+00,  3.0000e+00,
         3.0000e+00,  3.0000e+00,  1.0000e+00,  1.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,  0.0000e+00,
         0.0000e+00,  0.0000e+00])

Wejście do sieci neuronowej.

X_test = []
Y_test = []
for indeks_wersu_drugiego in range(len(x_test)):
    indeksy = sample(range(len(y_test)), 3)
    if indeks_wersu_drugiego not in indeksy:
        indeksy[0] = indeks_wersu_drugiego
    for k in indeksy:
        X_test.append(
            torch.cat(
                (x_test[indeks_wersu_drugiego], y_test[k])
            )
        )
        if indeks_wersu_drugiego==k:
            Y_test.append(1)
        else:
            Y_test.append(0)

print(len(X_test))
print(X_test[0])
print(len(Y_test))
print(Y_test[0])
4470
tensor([-0.0803, -0.0044, -0.0786,  ...,  0.0000,  0.0000,  0.0000])
4470
1

Przewidywania sieci neuronowych.

przewidywania_klasyfikatora = klasyfikator.predict(X_test)
przewidywania_regresora = regresor.predict(X_test)
print(numpy.min(przewidywania_regresora),numpy.max(przewidywania_regresora),numpy.mean(przewidywania_regresora),numpy.median(przewidywania_regresora))
-1.250942997983307 2.1043229513866173 0.33757049502639574 0.28136695410859397

Dokładność na przygotowanych danych testowych.

### MLPClassifier

licznik = 0
mianownik = 0
for indeks_wersu_pierwszego in range(len(przewidywania_klasyfikatora)):
    mianownik+=1
    if przewidywania_klasyfikatora[indeks_wersu_pierwszego]==Y_test[indeks_wersu_pierwszego]:
        licznik+=1

print(licznik/mianownik*1.0)
0.8402684563758389
### MLPRegressor

# Dopasowanie powyżej 0.5
licznik = 0
mianownik = 0
for indeks_wersu_pierwszego in range(len(przewidywania_regresora)):
    mianownik+=1
    if Y_test[indeks_wersu_pierwszego]==1 and przewidywania_regresora[indeks_wersu_pierwszego]>0.5:
        licznik+=1
    elif Y_test[indeks_wersu_pierwszego]==0 and przewidywania_regresora[indeks_wersu_pierwszego]<0.5:
        licznik+=1

print(licznik/mianownik*1.0)

#Dopasowanie powyżej 0.9
licznik = 0
mianownik = 0
for indeks_wersu_pierwszego in range(len(przewidywania_regresora)):
    mianownik+=1
    if Y_test[indeks_wersu_pierwszego]==1 and przewidywania_regresora[indeks_wersu_pierwszego]>0.9:
        licznik+=1
    elif Y_test[indeks_wersu_pierwszego]==0 and przewidywania_regresora[indeks_wersu_pierwszego]<0.9:
        licznik+=1

print(licznik/mianownik*1.0)
0.8217002237136465
0.7507829977628635

Metryka oceniająca proponowanie przez model drugiego wersu.

Jeżeli wśród propozycji nie ma spodziewanego poprawnego wersu, metryka przyjmuje minimalną wartość 0,0.

Im mniej błędnych propozycji , tym wyższy wynik metryki.

Jeżeli model proponuje tylko jeden wers i jest on poprawny, metryka przyjmuje maksymalną wartość 1,0.

def jagosz_score(spodziewany_wers,proponowane_wersy):
    if spodziewany_wers in proponowane_wersy:
        licznik = 1
    else:
        licznik = 0
    mianownik = len(proponowane_wersy)
    if mianownik==0:
        mianownik=1
    return licznik/mianownik*1.0

Wersja metryki dla całego zbioru.

def jagosz_score_dla_zbioru(krotki):
    licznik = 0
    mianownik = 0

    for k in krotki:
        spodziewany_wers = k[0]
        proponowane_wersy = k[1]
        if spodziewany_wers in proponowane_wersy:
            licznik += 1
        mianownik += len(proponowane_wersy)

    if mianownik==0:
        return 0
    else:
        return licznik/mianownik*1.0
wybrane_dane_testowe = sample(range(len(x_test)),10)

MLPClassifier

Proponuje wszystkie wersy, dla których ocena modelu to 1.

for indeks_wersu_pierwszego in wybrane_dane_testowe:
    wers_pierwszy = testowe["in"][indeks_wersu_pierwszego]
    print("wers pierwszy:\t\t", wers_pierwszy)
    poprawny_wers_drugi = testowe["out"][indeks_wersu_pierwszego]
    print("poprawny wers drugi:\t", poprawny_wers_drugi)
    print()

    reprezentacja_wersu_pierwszego = x_test[indeks_wersu_pierwszego]
    mozliwe_indeksy_wersu_drugiego = []
    for indeks_wersu_drugiego in range(len(y_test)):
        reprezentacja_wersu_drugiego = y_test[indeks_wersu_drugiego]
        wejscie_do_MLP = torch.cat((reprezentacja_wersu_pierwszego, reprezentacja_wersu_drugiego))
        if klasyfikator.predict([wejscie_do_MLP])[0] == 1:
            mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)

    proponowane_wersy = [testowe["out"][i] for i in mozliwe_indeksy_wersu_drugiego]
    print("proponowane drugie wersy:", proponowane_wersy)
    print("czy poprawny wers jest pośród proponowanych wersów?:", poprawny_wers_drugi in proponowane_wersy)
    print("liczba proponowanych wersów:", len(proponowane_wersy))
    print()

    print("wynik przyjętej metryki:", jagosz_score(poprawny_wers_drugi, proponowane_wersy))
    print()
    print("-"*50)
    print()
wers pierwszy:		 花好月圆涵画意
poprawny wers drugi:	 年丰人寿沁诗声

proponowane drugie wersy: ['谁将片语问何求', '他乡月好俺思亲', '三杯白酒乐成仙', '峰平径长难藏景', '海深寻秘展雄才', '青山醉向一樽横', '好同蝉窟映三潭', '万般幻态杳随风', '须先百忍学张公', '珠帘难掩月多情', '莺燕对舞艳阳天', '青山四面纳千流', '一心二用两头空', '闲庭信步哼欢歌', '雾里青山画屏开', '殷殷老叶护花红', '偶观雨燕栖寒檐', '满头霜雪和新梅', '居身常抱玉壶清', '竹林满山景隽幽', '一腔热血死难消', '清风两袖带回家', '日月同辉光景嫣', '三阳开泰颂廉明', '小楼吹砌玉生寒', '锦葵昂面为迎光', '浮沉历尽许由谁', '闹中取静看擂台', '早将秋韵入诗怀', '满身花影倩人扶', '千杯浊酒醉恒长', '心牵雨骤夜归人', '与君同作太平人', '青灯久作故人看', '猴腾广宇绽琼花', '好留明月九千秋', '花狎春云露搅和', '三联书韵醉今生', '风临荷盏窃清香', '花好月圆夜长明', '河清海晏让人迷', '心宽纳海老夫能', '扬鞭跃马马行空', '鹰翔蓝宇戏搏云', '甘棠播爱岁流金', '胸中消尽是非心', '弄潮帆影港城新', '英年奋进惜时光', '堤前柳浪露春光', '故人书自日边来', '文联叶问斩妖魔', '梅香葱岭缀长虹', '万般气象壮龙年', '催开玉蕊艳无边', '清塘浴月鹤逐风', '卷帘烧烛看梅花', '圆缺朗月也浮名', '思亲情愫贯通篇', '梧桐叶上得秋声', '一杯浊酒两篇诗', '动动脑筋动静无', '新朋正续进行时', '一指清凉尽染秋', '日移松影过禅床', '无田有业不为贫', '一溪柳绿到谁家', '风临柳榭露春心', '西湖乡梦约谁寻', '远山终日送余霞', '纵情狂乱毁根基', '笛声浅扣暗推窗', '一川杨柳笼和风', '洗出芙蓉九点青', '半空摇晃寻常仁', '追求亮丽美人图', '雪融春到春融雪', '常将劲节负秋霜', '国持德政著宏篇', '千般爱意眼中留', '绿水卧听新月明', '兰心未老梦如初', '英雄力困也求人', '且由明月洗尘心', '夜灯勤礼塔中仙', '赋浓夏盛寓秋实', '翠柳清风伴杏娇', '蛇听燕语颂春光', '四方称霸一魔方', '相思一点老了谁', '花间酌酒赏蝶飞', '磋砣无奈怨摽梅', '清泉有趣自通融', '山长水远恨重重', '游春岂料梦成真', '年丰人寿沁诗声', '浮舟水面尽飞花', '捉刀李白斩斯文', '三令五申还有贪', '霜飞两鬓孔明灯', '半帘秋梦鸟也酥', '得心应手手头宽', '子孙常读未烧书', '官居宰相望王侯', '元兴世盛展宏图', '长将远景引天边', '何堪永夜漏更寒', '春风惠我也惠人', '扁舟轻荡水云长', '常教翰墨作鼓吹', '荷描夏画日钤章', '澄天月隐星今宵', '小桃几树鸟啼红', '皇城玉阙夕阳斜', '花贴幼子悦童心', '轻舟破浪过千山', '这边环境安宁', '修身松竹有高风', '一言九鼎定心神', '文庙弦音奏凯频', '晓霞含愁看早梅', '风吹杨柳翠还柔', '一丛老竹梦于胸', '时临峻岭采浮云', '观光农业载风情', '钻杆穿地唱欢歌', '松风竹韵多抒情', '掷笔从戎壮士名', '吹牛煮海火收兵', '梅花傲雪迓长春', '出门去白面书生', '无边相思似流云', '金声玉振展奇才', '一般落魄一般人', '黄金灿灿冷如冰', '春深似海梦无痕', '文中已现老成心', '何堪心乱雨难读', '石泉流水洗椰瓢', '吕布吕蒙常用兵', '无休往日浩如烟', '七年对友队尤宏', '相思不减病扶墙', '金龙对舞戏中来', '水城画卷展宏图', '三春经纬织民图', '江心美景湛空明', '蔼峰亦寄诗仙情', '江山忧患老英雄', '收篇难阻浪涛狂', '黄粱入梦悟尘心', '火牛曾胜敌千军', '沉年古木韵临风', '风流绝壁写春秋', '山窗月透一痕青', '千篇一律竞同声', '谁到篱前问姓名', '闲聊岁月万年篇', '义常若水润人心', '廉风动地畅山河', '半坡翠竹耸蓝天', '一天飞絮舞春风', '帆连水色接天涯', '须从肝胆识英豪', '无须雪尽水开光', '人生一笑尽良朋', '心情更比落花差', '灯残襟冷感情无', '杏雨又同桃雨飞', '碧峰犹冷寺前春', '千竿节气叠浪花', '莺恋柳色月常依', '空樽对月直哆嗦', '禅房墨案写梅香', '深感人情冷似冰', '千枝红杏闹春光', '草逢蓬室至家中', '廉政为民常山情', '炎黄子孙志超群', '月满楼台鸳梦香', '霞飞洞外洞飞霞', '春歌大地爱国情', '隔村香送稻花肥', '睡梦中难辨东西', '一壶沧海洗微名', '丛林花草总痴心', '浓妆淡抹总相宜', '清风临案窃书魂', '长江十堰显神功', '汉宫春风暮烟中', '银河阻断鹊双飞', '东风无处不扬花', '离合不损月分明', '红梅傲雪笑枝青', '阿咸才俊翠壶冰', '春逢喜气盛迎门', '亭空不碍鸟穿行', '天长日丽艳桃花', '杏风桃韵语花辞', '收来花信燕声中', '闻名色变探花郎', '收聚白露仙人梦', '弥陀含笑放光辉', '几杯醉后月光高', '泠泠若水慧兰馨', '方塘九夏溢荷香', '转身应把泪珠弹', '安心是药更无方', '水畔青田走马牛', '五申三令不成规', '幅巾嘉论有清风', '三生有幸遇知音', '一窗竹影又经风', '风急烟轻水送舟', '无情霜剑毁金枝', '花间蕊貌润如珠', '不甘卖命换虚名', '风回柳苑韵邀风', '烟波浩渺任龙舒', '昙花怎晓夜幽长', '风凉彼岸柳垂帘', '洁烹芬美进嘉宾', '水上石头泵可行', '名城泉水润京腔', '搏风远去水云间', '两行悠然静临风', '悲秋远去一孤鸿', '夏种稻田要维家', '花好焉无惬意时', '两行白鹭上青天', '无缘去日怎追风', '笑谈成败慎出兵', '岗连高鼎天为峰', '春来江水绿如蓝', '新朋草舍对趣联', '愁恨两分杨柳风', '一心发愿待君归']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 236

wynik przyjętej metryki: 0.00423728813559322

--------------------------------------------------

wers pierwszy:		 人家不必论贫富
poprawny wers drugi:	 唯有读书声最佳

proponowane drugie wersy: ['人如无欲意何求', '举帜遵章共展才', '岩上青藤攀壁升', '济助家乡晃美名', '草内多藏五步蛇', '把酒依然大丈夫', '病入膏肓有治疗', '火山光灿地遭殃', '毅力凝成跨海桥', '后乐先忧范弟昆', '疑是瑶台月下逢', '鼓瑟还从曲里来', '雀跃鱼翔谐乐多', '一心二用两头空', '悦己悦人悦世间', '木讷的人难启迪', '含露芙蓉醉海棠', '酒醉不如伴月眠', '草木逢春年年生', '画就雾云笔墨香', '悔被浮名牵累多', '笔盖古今三千年', '克俭尚勤播誉名', '闹中取静看擂台', '庖丁自有解牛刀', '规矩者应晓方圆', '言少言多尽美谈', '璋玉无瑕耀祥光', '花好月圆夜长明', '国泰民安幸福多', '然乎者一字乾坤', '沸沸扬扬世界杯', '慧智还从实践来', '电力惠民百业兴', '又何必三日闻香', '千树争高有健才', '学问无穷博古今', '富裕安康福万家', '国运弥盛史弥远', '时急方须济世才', '公正不阿辩是非', '国展宏图烈士欣', '笔点涟漪见水平', '弱不禁风是书生', '玉笛金弦赞独生', '怪句洞开谜底联', '不叫俗尘污本真', '喜报频传战士家', '日移松影过禅床', '小小儿郎立路中', '意气风发马晓春', '无田有业不为贫', '涛落沙新畔易留', '听竹尤增几许清', '大丈夫能屈能伸', '武夷阳羡品俱馨', '莫指云山认故乡', '韧节有意杜虚名', '洗出芙蓉九点青', '落日也将暮色描', '潋滟江波扑簌风', '人且清心同步行', '名享三奇显祖公', '国持德政著宏篇', '世间最难得弟兄', '枝上空吹故国风', '笑问书生君是谁', '赋浓夏盛寓秋实', '往事依然笔底新', '小觑浮名对酒歌', '早出晚归皆自然', '却为心肝伤脑筋', '达业欣成万户楼', '重义轻财德道深', '道士身怀童子功', '热血沸腾意若何', '户内美色呈辉', '宝地佛临济世人', '万里山光收画屏', '防不胜防贼近身', '知耻明荣胸臆宽', '映月迎风不肯行', '笔墨书空自忘机', '常想旁通不对头', '一意孤行不回头', '路远始于跬步间', '江水源源发电来', '关羽无停觅长兄', '小可参禅入几分', '九世同居号义门', '处世无方只守诚', '文庙弦音奏凯频', '马舞龙韵续华章', '关外又开一朵奇葩', '大势所趋水如蓝', '白发无情忽上头', '掷笔从戎壮士名', '夫再礼让妻再争', '绝代佳人不入俗', '剌史同游忆月明', '玉液溶溶滴露来', '骨头坚硬好八连', '黄金灿灿冷如冰', '腹有奸谋即兽心', '德作福田三世修', '吕布吕蒙常用兵', '百年盟约好时光', '杜曲幸有桑麻田', '竹下新笋一色鲜', '俯首甘为孺子牛', '火牛曾胜敌千军', '圣子甘心为罪囚', '小弟欢吟蜀语联', '出口频频亿万吨', '一旦出名人气高', '联内音声欠古风', '中散孤高故不凡', '谁到篱前问姓名', '马上蓝天宇拓宽', '山水相依诗易描', '美丽季节万里春', '部长铁男不定还', '心若非良必惹悲', '慷慨悲歌魏晋风', '傲物诗文有劲风', '无主孤魂百姓怜', '死后欣然上八仙', '竹叶入唇醉耋龄', '白面书生尽奶油', '岁变难更意里人', '深感人情冷似冰', '刻炬成诗韵可观', '早上欣逢笔底兄', '炎黄子孙志超群', '古韵古风誉古今', '两制宏谋百代功', '处世何须带伪装', '杉木果林桃李荣', '日照山东处处蓬莱', '菊花从此不须开', '华夏农民开喜镰', '变易何难志士心', '离合不损月分明', '古木生芽不是春', '老虎苍蝇一起除', '暑气无声入隐溪', '落草潜伏十字坡', '御世今惟不动尊', '是大英雄自虚怀', '竹色四时也不移', '美色必将随后衰', '世事浮云感慨多', '万里清风驻洁江', '开卷细同贤者谋', '积德途中永不停', '安心是药更无方', '水畔青田走马牛', '雅意如茶自在闲', '吉日迎亲有贵人', '必须意识玩绵拳', '归家且遂十年心', '诚信经营财路宽', '满腹心言共汝谈', '本祖巍峨八剑堂', '家兴国兴万事兴', '不甘卖命换虚名', '山水襟怀我不如', '百千万亿归于零', '巢就休询燕几时', '误将弟子入迷宫', '河朔膏腴古督亢', '南粤万家景色新']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 172

wynik przyjętej metryki: 0.0

--------------------------------------------------

wers pierwszy:		 岳麓求知精通科技千秋重
poprawny wers drugi:	 湖湘原道感悟人生万世雄

proponowane drugie wersy: ['瑶花雅洁馨香海国铸精神', '双龙闹海茫茫大海我争雄', '龙传人赞华夏名镇迎东风', '慈心抒自在手慈眼慈甘露慈', '贤也圣也慢步详窥玉洞云', '羊羔跪乳乌鸦哺母且思恩', '年年端午擂鼓划舟夺锦标', '情歌依旧仍随秋水染夕阳', '缘善维贤续得源流此一方', '休闲有乐掌中点击地球村', '千师作赋笔下新村韵有余', '织天织地织出人间一个家', '冰天雪地寒鱼破镜钓江翁', '瑶台丽日扬善弘仁一片天', '呼朋赏月月行瀚海碧波间', '高朋满座倾心叙旧诉衷肠', '民生是本欣挥铁笔写风流', '和平共处五州友好共双赢', '满腔诚信长赢福利四时春', '诗联并进渝水巴山起异军', '人生不醉且看百年长恨歌', '穷途哭恸阮籍猖狂独咏怀', '杏林栽福地仁心妙术起沉疴', '台阶通化境佛寺巍巍气韵深', '一泓碧水闪闪翻波四季清', '有彩有华偶得佳联少雅人', '万锋笔健联台宿将舞龙文', '任丝织絍如果连编可纺纱', '慕拜袁黄奇门遁甲演五方', '落单鸿雁不回首疼坏西风', '水接桃源千载衣冠特堪尊', '八方铺锦绣紫燕娇啼羡物华', '塘边揽胜喜看绿水跃红鱼', '春溪赴梦入径带来山外情', '新企新社发展商机面貌新', '不怀医祖表里阴阳怎得通', '净化空间定让蓝天展笑容', '挥洒一身才气令岁月流芳', '寻梅雪岭无畏寒侵自有香', '琼花瑶叶雨浥芙蕖冉冉香', '皖吟风徽歌韵老村美景若诗', '家书有泪一行归雁向佳人', '和风二月燕剪裁红天地春', '读书堪备对好邀莲炬听更声', '羚羊跪乳孝亲报国展鸿遒', '中华娇子红塔山云烟贵烟', '字成一体大戟长枪跌宕书', '人言虽可信但防渭水混泾江', '年年七夕望月观星念恋人', '不带山不带水归途只带仙风', '勤习十载几案当知学子心', '神州筑梦四方创业业峥嵘', '弘扬传统开创未来再立功', '彩虹飞赤县道通八极路生金', '心存群众为民永作护旗人', '众生平等人我都从低处来', '江边楼上商女欢讴玉树歌', '湖面平和清心恬静意犹闲', '灭除瘟疫倾力堵封病染源', '楚山飞楚曲八方唱就楚风淳', '自己装车自负东京愧北京', '玉轮升碧海清辉广照出天然', '壮怀逸兴盛世鸿儒聚鹭园', '终日惟杜门蔬食经卷绳床', '我惭玉润时逢二月吊南州', '只身游燕赵淡泊无定水云舒', '执法文明素质高哪有霸王', '寒山一梦入耳钟声未必真', '如无真意休来假泪再丢人', '且移玉趾街坊人家结福缘', '云天碧水横练陈江七彩颜', '嫩竹舒新绿倚遍春风翠袖寒', '烘云托月水岸迢迢别样途', '司冬黑帝五湖四海欲凝冰', '皈依万象柳叶馥香传妙音', '心想即成创业顺利步步高', '心朝北斗祖国万岁路铺金', '新年缔良缘月圆人寿谱新歌', '平台屹屹出水蛟腾碧浪中', '四十年苦戍曾教瀚海变桑田', '山灵云逸泉流一脉抚瑶琴', '风亦软云亦淡独怜一地月华', '大呼小叫行住衣食快断流', '为环球献瑞沧桑洗礼万年冰', '篇篇墨语字字无非寂寞吟', '门生情切切敢捐大义铸心碑', '河声逐梦虽经九曲一条心', '站多和韵脚步踏欢快节拍', '三村海阔起碇悬帆赶早潮', '帆樯蔽日风送筝声多在船', '春光照大地九州共绘小康图', '心游翰海叹这般风月似醉似痴', '三农仁政乐浪九州动地来', '福临百姓家和业盛梦添香', '湖湘原道感悟人生万世雄', '亭自皇朝建青松擎月可知情', '意兴飞扬行舟更借一帆风', '莺鹂鸣柳恰有南风雁早乘', '风和牵细浪托盘荷畔捧玉珠', '挥扬旗帜复兴梦执政为民', '欢迎学者此道终须启后人', '千军同忾岂容鬼怪再猖狂', '一字桥头布谷偶听四五声', '一刊誉满誉骋楹联艺术家', '城苑真娇育德千秋桃李馨', '蟾光初照银桨徐摇万点星', '汇九霄圣脉犀江溢彩梦园芳', '丁香迎夏四面八方雨亦催', '东坡曾醉人间天上两婵娟', '山留菩境石鼓一悬梦万年', '归程渺渺涕泪常邀笑梦来', '梅影横窗瘦南枝微弄雪精神', '福音云外播心泉涌玉接灵源', '春心激动重温旧梦老地方', '民生有幸嘣出实心得惠仁', '做戏人看戏人戏内戏人看人', '涵秀沐风雨春风化雨润人心', '沐阳光雨露聚来陆海潘江', '飞扬梦想富强震耳宝鸡啼', '大道生辉养性修身万世春', '何须斗气眼下齐心破一曹', '立于天地处事当留一点真', '红光一色欢天喜地满堂春', '远镜微镜透镜反光镜常问伯奇', '胸怀税务戮力耕耘收税献丹心', '民情在抱两袖清风促和谐', '槐花怒放五月槐花醉九龙', '锤镰记取红色党旗血染成', '秦有十八子笔墨抒意论春秋', '魏都凤落凤鸣凤唱瑞名城', '婺星耀瑞彩阶下榴花照眼红', '千帆竞渡追梦宏开万里程', '浙水舞瓯水舞东方舞动神龙', '月月风风叫你顿首献感情', '一生唯德康强到老有余闲']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 135

wynik przyjętej metryki: 0.007407407407407408

--------------------------------------------------

wers pierwszy:		 一城增富丽壮气宇千秋启数篇锦绣词章麟阁喜添文苑笔
poprawny wers drugi:	 百福祝祥和夸峥嵘万象惊满目辉煌金碧花都沉醉岭南香

proponowane drugie wersy: ['听竹赏梅以咏赊几分诗韵养三分淡泊丹青常寄竹梅清', '柳火煎茶柳翠鸣鹂柳营试马柳条赠别翠影千条咏赞多', '重描千卷蓝图更要靠山惜山靠水惜水当长思百代儿孙', '哭逝者青春忽碎痛矣十分十五六七韶华定格永恒', '填几行翠菊阅几阙丹霞诵几章朱岭吟成红谷春天', '事如云不甚详惟风骨惟馒头彼事情兮一时所啖尝耶', '天上人间于斯占尽更有丹摇翠涌烟霞遍染江山', '亲朋云集两三千看八方美景斑斓椿萱焕彩南岳峰巅竹生华', '皖山隔梦远且自横万里孤舟娥江暂泊更张不敢是乡音', '叹家无所业丧无所殡问为官世上几人廉洁似先生', '忠武祠堂希文谥法正气满寰中下则河岳上则日星', '凭栏问根祖何处是秦关汉阙兴衰以证看斯楼踞坐中州', '戎服读春秋亦英雄亦儒雅试认九霄正气常随奎壁焕光芒', '洵隽阳巨指主余社盟主全校事临危寄诗序开函如读岘山碑', '想必浮生犹有味君莫笑伍员卖唱秦琼卖马杨志卖刀', '禅堂肃穆回旋磬韵梵音弘扬佛法真经雄震乾坤', '盼你归来托雁捎两行书信菊花深处有人立尽斜阳', '邀客聆听胜地谐声诗中境界喜风传彩信鸟唱金歌', '韦驮传法后喜白龙拓境褐石遗痕紫溪流乳宝殿生辉', '右边杉立左视木偶似这样不肖弃材樵夫何妨劈面三刀', '大手笔宏开胜境植出葱茏巴蜀丰草长林两岸诗', '不择沙滩肥沃土可拒风侵何愁雨袭高怀落落谢春晖', '英魂不死倘狼烟再起东南鼓鼙掀海浪今朝卫土效文襄']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 23

wynik przyjętej metryki: 0.0

--------------------------------------------------

wers pierwszy:		 山地畅通用安全铺路
poprawny wers drugi:	 油田崛起为生产护航

proponowane drugie wersy: ['情牵大地春满人间', '珠圆玉润入口皆甜', '德行梦笔开盛世新篇', '柔水月光披野地天穹', '去期颐仅廿载后福无疆', '春来之明灯爆竹贴红联', '诗书启后勤俭传家', '雨打荷叶叶成泪滴滴成珠', '满身花影倩人扶', '任丝织絍如果连编可纺纱', '猴腾广宇绽琼花', '花明柳媚湖上长春', '调一湖春色染绿江淮', '河清海晏让人迷', '一联争首榜元眼花胪', '鸿才立世展鸿图', '莲花亲水意崇廉', '湖山叠韵入我诗囊', '姜维奇术揣摩诸葛计谋', '笔点涟漪见水平', '瑞通阆苑琼楼兴百轩', '笔如磨剑要藏锋', '砖雕雕壁画砖马腾空', '国持德政著宏篇', '劈开天地定人伦', '神驹腾跃吉祥年', '赋浓夏盛寓秋实', '东床配西席不是东西', '中华共颂贤臣', '桨声翻学海海载苦舟', '官居宰相望王侯', '共赏芦溪水高下相倾', '元兴世盛展宏图', '烘云托月水岸迢迢别样途', '五光十色文字之华', '这边环境安宁', '柳垂水面翠溶南北风', '小金龙瑞雪兆丰年', '诗礼之教家人利贞', '攀龙附凤欲攀彩凤缘', '梅乡舟里唱瘦月两回', '地连南北日星恒久晖', '水城画卷展宏图', '爱无边意无限父老堪亲', '度日如年席卷八荒', '热情周到锦上添花', '廉风动地畅山河', '辩雕春囿德莹秋天', '邀月吟诗诗藏梦海碧波里', '微言明义苦谏纠偏', '尘凡皆妄昧贪嗔痴愚', '满园桃李何言北大荒', '龙跃凤鸣南渡江边雅士多', '平野百里高山九重', '当知沦落也从容', '鸣钟食鼎甘田土之出', '平沙戏马雨声干', '手携一集质于通人', '碧柳迎春山河送惠风', '廉风集聚兴廉务必清廉', '弥陀含笑放光辉', '绿荫浓清山美有凤来仪', '金猴贺新岁岁岁平安', '律己循规永葆廉风', '飞腾雅典腾飞环球', '风回柳苑韵邀风', '美水美山美景美未来', '花样年华联若洒可钦']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 68

wynik przyjętej metryki: 0.0

--------------------------------------------------

wers pierwszy:		 随花归故里
poprawny wers drugi:	 伴梦眠老屋

proponowane drugie wersy: ['人懒几生尘', '壮志献江山', '清心长保真', '浪漫怕新闻', '草木已含英', '弦断梦难圆', '作业构三章', '道德五千', '四海奋人心', '功犹可迁', '兰气盈庭', '珍簟展方床', '贤媳举扇陪', '美誉眼前风', '仰百年师', '浓厚简约虚', '莫愁女儿红', '此味几人同', '头彩出中原', '撒豆成兵', '绿野寄仙踪', '倾城倾国', '智者忍违缘', '梅韵贺新年', '绿茵陈', '一街太平歌', '诗带好风吟', '赤水得玄珠', '月分老梅香', '高第煦春风', '禹甸沐春风', '抑抑威仪', '游子自存心', '心悟得真', '道理甚分明', '移山志不忘', '醉后赋离骚', '胡蝶飞南园', '缘去梦依然', '风笔绘春秋', '三江福寿图', '妙理贵躬行', '高处看浮云', '足写乾坤', '牖含遍岭春', '冬夜暖开心', '众号神君', '龙女牧羊', '随地皆春', '秋雨秋风', '党赐深恩', '池浅韵牵波', '宛在岱中行', '天禄谈经', '王府池子深', '寺与山争鲜', '贪后买官', '眉月静横窗', '酒醉好题诗', '冗鱼', '醉酒吐真情', '年年有盼头', '碧浪皱红霞', '淡雅雪边梅', '养性延年', '菡萏静生香', '伴梦眠老屋', '禅味涤心胸', '民以食为天', '初日临春虚', '恨别鸟惊心', '鹤立水中央', '福禄寿喜', '苔湿地刁皮', '鸟语落花山', '百岭见千娇', '才子佳人', '小曲品三春', '人我法皆空', '树葱茏', '艺高大胆人', '花荣上海人', '天地月常圆', '导义延仁', '时泰喜黎民', '而今当宝存', '春入鸟能言', '偏遇有情人', '大宴高轩', '白日奈我何', '无肉也能行', '上寿可期', '夜寂鸟啼空', '塞外朔风寒', '学子话春浓', '巧拙尚相悬', '两手作生涯', '方士信求仙', '中庭松桂姿', '品德讲道德', '出入有声名', '莺歌鹧鸪天']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 102

wynik przyjętej metryki: 0.00980392156862745

--------------------------------------------------

wers pierwszy:		 游福地赏风光苏岭郴江铺锦绣
poprawny wers drugi:	 款嘉宾谈经贸南湘林邑创辉煌

proponowane drugie wersy: ['身名归泡幻抟风羽翼伤心岂独帝京篇', '陈家颜割落耳朵颜面才是东家', '悠悠矣少小离家潇洒人生梦已开', '坐中都是词杰酒半酣时眼更狂', '一时三刻下大雨免道士多啄狗头', '十年非忘本学子该当底气足', '台阶通化境佛寺巍巍气韵深', '香由心生念嫦娥娥寝不离桂花香', '六万数余银充库奉公素抱藿葵心', '款嘉宾谈经贸南湘林邑创辉煌', '八方铺锦绣紫燕娇啼羡物华', '但见波摇影荡不知何处是仙源', '客属同源客家共脉何处不生故土情', '离别时章柳折残山花静待来春', '强国兴邦关注三农百业展新猷', '赏山川斯方似画云中阆苑璨尧天', '梅花千万点报得人间锦绣春', '鲲鹏翔瀚宇激越高昂自在身', '啜甘须忆苦纵登高位犹纳清风', '南为火北为水自古水火怎相容', '东南窥胜境五峰接壤让他丰骨独高骞', '嫩竹舒新绿倚遍春风翠袖寒', '九州畅百川深深悟重和天下论治平', '河涌万顷碧浪太阳圣曲震神州', '江城子思渔父遥寄巫山一片云', '开怀八大味五味滋身三味养心', '浮萍漂泊水中花花中水水中花中', '更漏子蝶恋花千滴满见泪沙流', '桃源美景诗心自醉但邀竹下七贤', '心游翰海叹这般风月似醉似痴', '电城煤城林城一城发展舞龙头', '亭自皇朝建青松擎月可知情', '传家无别业惟薄田数亩旧书五车', '梅影横窗瘦南枝微弄雪精神', '今成瑰宝家藏一件历世定瓷倍足珍', '卢敖得道浮丘放鹤福地神仙去复来', '胸怀税务戮力耕耘收税献丹心', '德宏章贡修文悦礼敦古铄今', '琉球皇帝诏列屿飘零时时在故国神游', '傅说堪圆一梦得以兴邦果圣人', '动车牵北南绿城煤城双轨接龙', '荷叶一池满铺开澄碧坦荡人心', '盼美丽中国收入倍增成就小康']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 43

wynik przyjętej metryki: 0.023255813953488372

--------------------------------------------------

wers pierwszy:		 富春垂钓
poprawny wers drugi:	 天禄谈经

proponowane drugie wersy: ['人懒几生尘', '壮志献江山', '清心长保真', '浪漫怕新闻', '草木已含英', '弦断梦难圆', '作业构三章', '兰馨溢神州', '道德五千', '四海奋人心', '功犹可迁', '兰气盈庭', '珍簟展方床', '贤媳举扇陪', '美誉眼前风', '仰百年师', '莫愁女儿红', '此味几人同', '头彩出中原', '撒豆成兵', '绿野寄仙踪', '慢煲绿豆汤', '倾城倾国', '智者忍违缘', '德及乡里', '梅韵贺新年', '绿茵陈', '一街太平歌', '桃花自美容', '诗带好风吟', '大功扫叛臣', '赤水得玄珠', '月分老梅香', '高第煦春风', '禹甸沐春风', '抑抑威仪', '一樽欢暂同', '游子自存心', '雪厚松袅云', '道理甚分明', '庙略久论兵', '重担重担人', '碧柳锁长亭', '醉后赋离骚', '胡蝶飞南园', '缘去梦依然', '风笔绘春秋', '妙理贵躬行', '足写乾坤', '气化三清', '牖含遍岭春', '冬夜暖开心', '豆灯照墨新', '众号神君', '龙女牧羊', '随地皆春', '秋雨秋风', '党赐深恩', '池浅韵牵波', '宛在岱中行', '天禄谈经', '寺与山争鲜', '夕观沧海云', '贪后买官', '眉月静横窗', '酒醉好题诗', '梅迎跃进春', '思量枕席功夫', '家庭祥和', '醉酒吐真情', '年年有盼头', '碧浪皱红霞', '淡雅雪边梅', '养性延年', '民心向党红', '菡萏静生香', '伴梦眠老屋', '禅味涤心胸', '民以食为天', '初日临春虚', '恨别鸟惊心', '朝槿散幽香', '鹤立水中央', '福禄寿喜', '陕州人杰灵', '苔湿地刁皮', '百岭见千娇', '才子佳人', '小曲品三春', '人我法皆空', '艺高大胆人', '花荣上海人', '天地月常圆', '一鳞', '导义延仁', '人品甘没闻', '时泰喜黎民', '而今当宝存', '春入鸟能言', '偏遇有情人', '大宴高轩', '无肉也能行', '上寿可期', '夜寂鸟啼空', '塞外朔风寒', '学子话春浓', '巧拙尚相悬', '两手作生涯', '方士信求仙', '品德讲道德', '秋波我梦吟', '出入有声名']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 112

wynik przyjętej metryki: 0.008928571428571428

--------------------------------------------------

wers pierwszy:		 闻思修并重
poprawny wers drugi:	 人我法皆空

proponowane drugie wersy: ['人懒几生尘', '壮志献江山', '清心长保真', '浪漫怕新闻', '草木已含英', '松摇古谷风', '弦断梦难圆', '作业构三章', '兰馨溢神州', '四海奋人心', '珍簟展方床', '贤媳举扇陪', '美誉眼前风', '冰封万水寒', '浓厚简约虚', '莫愁女儿红', '春归柳色红', '此味几人同', '头彩出中原', '绿野寄仙踪', '玉律始调阳', '难教白日闲', '智者忍违缘', '真风再发扬', '梅韵贺新年', '转瞬万山遥', '一街太平歌', '桃花自美容', '子亦来见我乎', '诗带好风吟', '长短尽随风', '大功扫叛臣', '赤水得玄珠', '诗兴不无神', '月分老梅香', '高第煦春风', '禹甸沐春风', '徒临洗药泉', '一樽欢暂同', '游子自存心', '雪厚松袅云', '案头月一樽', '道理甚分明', '木栽门内闲', '庙略久论兵', '重担重担人', '移山志不忘', '碧柳锁长亭', '醉后赋离骚', '胡蝶飞南园', '世态笑炎凉', '缘去梦依然', '风笔绘春秋', '三江福寿图', '妙理贵躬行', '脉脉万重心', '高处看浮云', '两乡明月心', '高悬不畏风', '牖含遍岭春', '冬夜暖开心', '少年是网虫', '豆灯照墨新', '水凉难泡茶', '宛在岱中行', '蝉噪涧才幽', '王府池子深', '寺与山争鲜', '夕观沧海云', '眉月静横窗', '酒醉好题诗', '梅迎跃进春', '箫声向远天', '莫向外头看', '思量枕席功夫', '松风如在弦', '这边环境安宁', '醉酒吐真情', '年年有盼头', '碧浪皱红霞', '山深虎迹踪', '民心向党红', '菡萏静生香', '伴梦眠老屋', '禅味涤心胸', '民以食为天', '初日临春虚', '三春经纬织民图', '恨别鸟惊心', '朝槿散幽香', '惩凶儆效尤', '鹤立水中央', '福禄寿喜', '梅花落我肩', '陕州人杰灵', '鸟语落花山', '搴舟破晓风', '百岭见千娇', '衣间不带尘', '小曲品三春', '人我法皆空', '艺高大胆人', '花荣上海人', '天地月常圆', '红雨浸黄云', '敞襟天地宽', '人品甘没闻', '开光佛自由', '时泰喜黎民', '月轮碾古今', '而今当宝存', '春入鸟能言', '偏遇有情人', '落草潜伏十字坡', '心静自然凉', '山转路无穷', '白日奈我何', '春心蝶最知', '千花夹寺门', '无肉也能行', '夜寂鸟啼空', '江涌古今潮', '尝鲜食鱼羊', '塞外朔风寒', '学子话春浓', '巧拙尚相悬', '两手作生涯', '方士信求仙', '中庭松桂姿', '云外一声钟', '秋波我梦吟', '出入有声名', '莺歌鹧鸪天']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 133

wynik przyjętej metryki: 0.007518796992481203

--------------------------------------------------

wers pierwszy:		 飞桥驾鹊天津阔
poprawny wers drugi:	 平沙戏马雨声干

proponowane drugie wersy: ['人如无欲意何求', '举帜遵章共展才', '岩上青藤攀壁升', '济助家乡晃美名', '草内多藏五步蛇', '病入膏肓有治疗', '毅力凝成跨海桥', '后乐先忧范弟昆', '疑是瑶台月下逢', '锦绣春归百姓家', '鼓瑟还从曲里来', '雀跃鱼翔谐乐多', '泉瀑飞长水练生', '雾里青山画屏开', '暮忆三秋雁字长', '烟柳风丝拂岸斜', '竹林满山景隽幽', '悦己悦人悦世间', '阔水滔滔有酒仙', '燕子三双戏柳烟', '豪赌毁他上进心', '野渡闲游一叶舟', '含露芙蓉醉海棠', '日月同辉光景嫣', '可恨蛮牛不识琴', '酒醉不如伴月眠', '草木逢春年年生', '画就雾云笔墨香', '悔被浮名牵累多', '坑我此生此袋烟', '笔盖古今三千年', '克俭尚勤播誉名', '规矩者应晓方圆', '猴腾广宇绽琼花', '言少言多尽美谈', '花狎春云露搅和', '大漠孤烟古道长', '新颖文章秋水清', '国泰民安幸福多', '南燕离巢北国春', '沸沸扬扬世界杯', '浅草雷门愧下关', '慧智还从实践来', '融月新醅慢慢尝', '绿岛清风拂袖来', '电力惠民百业兴', '千树争高有健才', '学问无穷博古今', '富裕安康福万家', '国运弥盛史弥远', '时急方须济世才', '公正不阿辩是非', '祭酒干杯国子光', '国展宏图烈士欣', '碧野连天满目春', '糊口养家望父滩', '大河滚滚尽淘沙', '淡淡菊香盈袖中', '笔点涟漪见水平', '弱不禁风是书生', '动动脑筋动静无', '玉笛金弦赞独生', '怪句洞开谜底联', '不叫俗尘污本真', '喜报频传战士家', '竹韵梅香总可人', '一指清凉尽染秋', '小小儿郎立路中', '意气风发马晓春', '无田有业不为贫', '涛落沙新畔易留', '德雨润开廉洁花', '月老三分秋水寒', '听竹尤增几许清', '大丈夫能屈能伸', '弹毕雅曲听和声', '人爱人钦人喜欢', '洞口经春长薜萝', '莫指云山认故乡', '韧节有意杜虚名', '洗出芙蓉九点青', '草木蔫枯晒绿洲', '落日也将暮色描', '象郡云烟锁桂梧', '潋滟江波扑簌风', '人且清心同步行', '名享三奇显祖公', '明月来时渚落霜', '阳朔沿水显花荣', '绿水卧听新月明', '北海波清映日黄', '映月二泉人世情', '世间最难得弟兄', '枝上空吹故国风', '笑问书生君是谁', '孔圣有才死后尊', '翠柳清风伴杏娇', '新庆交封暨缅封', '往事依然笔底新', '风过泸州带酒香', '相思一点老了谁', '风雨人生鉴知音', '小觑浮名对酒歌', '早出晚归皆自然', '翠袖拂空一抹烟', '却为心肝伤脑筋', '达业欣成万户楼', '点水蜻蜓赏绿来', '重义轻财德道深', '道士身怀童子功', '步步登高上岳阳', '热血沸腾意若何', '对苑繁华万蕾新', '宝地佛临济世人', '万里山光收画屏', '一捧廉泉岛外春', '防不胜防贼近身', '知耻明荣胸臆宽', '每觉邻山云最多', '虎步春光翼又添', '笔墨书空自忘机', '绿净春深好染衣', '常想旁通不对头', '一意孤行不回头', '烛影摇红步步娇', '路远始于跬步间', '江水源源发电来', '纵览清江高士怀', '关羽无停觅长兄', '落木落红落寂生', '小可参禅入几分', '九世同居号义门', '空海星辰宇宙流', '兄弟同吟夜雨陪', '贞慧何辞驻翠颜', '竹韵真箫彻夜吹', '落日栖霞赏故园', '正在柳洲接柳风', '文庙弦音奏凯频', '却诩心田少欠情', '山影盘龙月钓珠', '崇廉尚德岛尚书', '大势所趋水如蓝', '何处箫声断客肠', '皓月两轮水面逢', '良夜清风月满湖', '仙境田园隐棹声', '白发无情忽上头', '掷笔从戎壮士名', '夫再礼让妻再争', '三爱首推书友茶', '绝代佳人不入俗', '黄叶飘零比较烦', '剌史同游忆月明', '玉液溶溶滴露来', '骨头坚硬好八连', '黄金灿灿冷如冰', '四海龙兴艺术潮', '杨絮舞出风感觉', '玉鼎沉香影寂寥', '德作福田三世修', '小阁亦存明月身', '十里桃花相见欢', '吕布吕蒙常用兵', '他日凌云傲世间', '樱树花开迓客图', '百年盟约好时光', '海上风云浪几何', '小鸟放歌岁月甜', '竹下新笋一色鲜', '俯首甘为孺子牛', '水稻风多不待秋', '圣子甘心为罪囚', '小弟欢吟蜀语联', '一旦出名人气高', '联内音声欠古风', '中散孤高故不凡', '谁到篱前问姓名', '柳叶随风画美图', '柳下相逢思半生', '马上蓝天宇拓宽', '动地惊天事业昌', '山水相依诗易描', '美丽季节万里春', '心若非良必惹悲', '慷慨悲歌魏晋风', '傲物诗文有劲风', '载笔须来阙下游', '无主孤魂百姓怜', '死后欣然上八仙', '灯残襟冷感情无', '兰桂齐芳福乐门', '白面书生尽奶油', '饮酒月前独自愁', '乍响雷鸣下箸惊', '诗卷长流天地间', '岁变难更意里人', '深感人情冷似冰', '烛影摇红夜不眠', '每到云稠方想伞', '廉政为民常山情', '北海泛舟携孔融', '刻炬成诗韵可观', '早上欣逢笔底兄', '月满楼台鸳梦香', '虎年缘接冬奥情', '古韵古风誉古今', '两制宏谋百代功', '梦远还托风导游', '杉木果林桃李荣', '翠扇红衣十里香', '睡梦中难辨东西', '华夏农民开喜镰', '花到极清始觉香', '变易何难志士心', '闹市俗人涮肥羊', '古木生芽不是春', '片段岚光落画屏', '老虎苍蝇一起除', '暑气无声入隐溪', '燕翅劈开两岸闲', '落草潜伏十字坡', '碧简须雕次第仙', '落月桐轩寂手谈', '蛇对赠君东海福', '御世今惟不动尊', '是大英雄自虚怀', '竹色四时也不移', '羊跃人欢艳阳春', '美色必将随后衰', '弥陀含笑放光辉', '月色从来未换新', '世事浮云感慨多', '万里清风驻洁江', '开卷细同贤者谋', '积德途中永不停', '水畔青田走马牛', '雅意如茶自在闲', '吉日迎亲有贵人', '一地纸灰寂寞人', '丹翠含悲珠泪流', '老眼欣观四化图', '诚信经营财路宽', '水墨胡涂浪漫稀', '风急烟轻水送舟', '满腹心言共汝谈', '紫燕翻飞柳泛青', '家兴国兴万事兴', '河畔吟风写去难', '秋叶梧桐扫地僧', '平子归田不为穷', '秋月春风惹梦思', '山水襟怀我不如', '古道雁行倦戏秋', '小院兰亭柳下风', '巢就休询燕几时', '白虎岭中白虎来', '凝注流霜秋九梦', '河朔膏腴古督亢', '花好焉无惬意时', '春来江水绿如蓝', '南粤万家景色新', '新朋草舍对趣联', '愁恨两分杨柳风', '千载长天起大云', '小院雅风入彩光']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 266

wynik przyjętej metryki: 0.0

--------------------------------------------------

MLPRegressor

Proponuje wszystkie wersy, dla których ocena modelu jest większa niż 0,9.

for indeks_wersu_pierwszego in wybrane_dane_testowe:
    wers_pierwszy = testowe["in"][indeks_wersu_pierwszego]
    print("wers pierwszy:\t\t", wers_pierwszy)
    poprawny_wers_drugi = testowe["out"][indeks_wersu_pierwszego]
    print("poprawny wers drugi:\t", poprawny_wers_drugi)
    print()

    reprezentacja_wersu_pierwszego = x_test[indeks_wersu_pierwszego]
    mozliwe_indeksy_wersu_drugiego = []
    wartosci_przewidywan_wersu_drugiego = []
    for indeks_wersu_drugiego in range(len(y_test)):
        reprezentacja_wersu_drugiego = y_test[indeks_wersu_drugiego]
        wejscie_do_MLP = torch.cat((reprezentacja_wersu_pierwszego, reprezentacja_wersu_drugiego))
        mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)
        wartosci_przewidywan_wersu_drugiego.append(regresor.predict([wejscie_do_MLP])[0])

    pom_df = pandas.DataFrame({"indeks":mozliwe_indeksy_wersu_drugiego,"wartosc":wartosci_przewidywan_wersu_drugiego})
    proponowane_wersy = [testowe["out"][i] for i in pom_df["indeks"] if pom_df["wartosc"][i]>=0.9]

    print("proponowane drugie wersy:", proponowane_wersy)
    print("czy poprawny wers jest pośród proponowanych wersów?:", poprawny_wers_drugi in proponowane_wersy)
    print("liczba proponowanych wersów:", len(proponowane_wersy))
    print()

    print("wynik przyjętej metryki:", jagosz_score(poprawny_wers_drugi, proponowane_wersy))
    if (len(proponowane_wersy)<1 or len(proponowane_wersy)>5):
        print("wyjaśnienie - największe wartości przewidywań")
        print(pom_df.nlargest(len(proponowane_wersy)+1, "wartosc"))
    print()
    print("-"*50)
    print()
wers pierwszy:		 花好月圆涵画意
poprawny wers drugi:	 年丰人寿沁诗声

proponowane drugie wersy: ['尘烟已远隐青山', '而今华夏振雄风', '他乡月好俺思亲', '火山光灿地遭殃', '三杯白酒乐成仙', '峰平径长难藏景', '海深寻秘展雄才', '青山醉向一樽横', '好同蝉窟映三潭', '铮铮梅蕾半含春', '莺燕对舞艳阳天', '一心二用两头空', '闲庭信步哼欢歌', '偶观雨燕栖寒檐', '居身常抱玉壶清', '清风两袖带回家', '三阳开泰颂廉明', '小楼吹砌玉生寒', '锦葵昂面为迎光', '看三国志欲何为', '闹中取静看擂台', '与君同作太平人', '青灯久作故人看', '猴腾广宇绽琼花', '好留明月九千秋', '风临荷盏窃清香', '古人异代不同时', '弄潮帆影港城新', '堤前柳浪露春光', '春江柳线乱弹琴', '梅香葱岭缀长虹', '仙人指路点迷津', '梧桐叶上得秋声', '无田有业不为贫', '一溪柳绿到谁家', '风临柳榭露春心', '俨然天竺古先生', '武夷阳羡品俱馨', '远山终日送余霞', '笛声浅扣暗推窗', '一川杨柳笼和风', '半空摇晃寻常仁', '追求亮丽美人图', '雪融春到春融雪', '常将劲节负秋霜', '善男信女拜观音', '劈开天地定人伦', '举杯邀月到凡尘', '伤心羁旅断愁肠', '夜灯勤礼塔中仙', '赋浓夏盛寓秋实', '牢盆给费利官民', '飞鸿远浦一时惊', '蛇听燕语颂春光', '清泉有趣自通融', '游春岂料梦成真', '年丰人寿沁诗声', '霜飞两鬓孔明灯', '不言第一海胸襟', '乌啼古树惹乡愁', '难经何必借炎黄', '子孙常读未烧书', '情凝大地重如山', '长将远景引天边', '有悲寒户落新愁', '满腔忧愤铸诗魂', '万般殷切候佳音', '行吟战马啸征尘', '何堪永夜漏更寒', '荷描夏画日钤章', '漫天风语绕苍穹', '花贴幼子悦童心', '红旗漫卷息狼烟', '四行热泪洒苍颜', '人间重义树新风', '一言九鼎定心神', '春风念字到青青', '观光农业载风情', '钻杆穿地唱欢歌', '再将粉黛沁于宣', '秉公执法树廉风', '当惊阁老好风光', '梅花傲雪迓长春', '出门去白面书生', '江郎梦里得犹神', '畅谈国事一腔情', '黄金灿灿冷如冰', '春深似海梦无痕', '文中已现老成心', '迎春老树发新芽', '无休往日浩如烟', '相思不减病扶墙', '挖坑华夏葬儒顽', '江心美景湛空明', '吸烟无益肺摧残', '风吹枫落枫随风', '蔼峰亦寄诗仙情', '江山忧患老英雄', '收篇难阻浪涛狂', '黄粱入梦悟尘心', '火牛曾胜敌千军', '杯中寂寞不曾空', '风流绝壁写春秋', '风流人物看今朝', '廉风动地畅山河', '帆连水色接天涯', '酣摊夏苑恋风情', '人生一笑尽良朋', '灯残襟冷感情无', '满园桃李尽争春', '碧峰犹冷寺前春', '千竿节气叠浪花', '人威毕竟胜天威', '千枝红杏闹春光', '仙风道骨验方肠', '草逢蓬室至家中', '廉政为民常山情', '三园猗顿晋商宗', '春歌大地爱国情', '隔村香送稻花肥', '蛇头就是做中人', '浓妆淡抹总相宜', '长江十堰显神功', '汉宫春风暮烟中', '春逢喜气盛迎门', '丹楹喜庆福临门', '亭空不碍鸟穿行', '慵眠古渡淡千愁', '收来花信燕声中', '文章有道拟施行', '闻名色变探花郎', '弥陀含笑放光辉', '闲生百态网中人', '转身应把泪珠弹', '千秋剑气护忠魂', '五申三令不成规', '幅巾嘉论有清风', '三生有幸遇知音', '峰丘暗许百年情', '一窗竹影又经风', '无情霜剑毁金枝', '不甘卖命换虚名', '风回柳苑韵邀风', '烟波浩渺任龙舒', '昙花怎晓夜幽长', '名城泉水润京腔', '搏风远去水云间', '笑迎世纪浴春光', '两行白鹭上青天', '笑谈成败慎出兵']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 150

wynik przyjętej metryki: 0.006666666666666667
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
1077    1077  1.750754
322      322  1.719922
1334    1334  1.606502
1386    1386  1.572082
80        80  1.496093
...      ...       ...
1028    1028  0.909767
81        81  0.908926
151      151  0.907361
949      949  0.904659
129      129  0.897735

[151 rows x 2 columns]

--------------------------------------------------

wers pierwszy:		 人家不必论贫富
poprawny wers drugi:	 唯有读书声最佳

proponowane drugie wersy: ['把酒依然大丈夫', '雀跃鱼翔谐乐多', '千般柳絮游子心', '映日桑榆重晚晴', '悦己悦人悦世间', '豪赌毁他上进心', '木讷的人难启迪', '可恨蛮牛不识琴', '笔盖古今三千年', '克俭尚勤播誉名', '玉律始调阳', '真风再发扬', '地阔难及贪欲长', '璋玉无瑕耀祥光', '沸沸扬扬世界杯', '梦里飞花静闻香', '栽培桃李成林', '学问无穷博古今', '富裕安康福万家', '时急方须济世才', '公正不阿辩是非', '糊口养家望父滩', '笔点涟漪见水平', '怪句洞开谜底联', '不叫俗尘污本真', '喜报频传战士家', '涛落沙新畔易留', '胜算亦防失误时', '听竹尤增几许清', '大丈夫能屈能伸', '弹毕雅曲听和声', '爱慕虚荣唱一出', '春梦几枝与醉痴', '落日也将暮色描', '名享三奇显祖公', '唯有读书声最佳', '孔圣有才死后尊', '牢盆给费利官民', '风过泸州带酒香', '三令五申还有贪', '热血沸腾意若何', '知耻明荣胸臆宽', '常想旁通不对头', '优良业绩绩可观', '关羽无停觅长兄', '贞慧何辞驻翠颜', '马舞龙韵续华章', '大势所趋水如蓝', '白发无情忽上头', '掷笔从戎壮士名', '剌史同游忆月明', '腹有奸谋即兽心', '室壁裂时蟢网缝', '十里桃花相见欢', '百年盟约好时光', '静夜遐思枕月眠', '竹下新笋一色鲜', '俯首甘为孺子牛', '水稻风多不待秋', '圣子甘心为罪囚', '出口频频亿万吨', '谁到篱前问姓名', '部长铁男不定还', '慷慨悲歌魏晋风', '载笔须来阙下游', '无主孤魂百姓怜', '兰桂齐芳福乐门', '饮酒月前独自愁', '何防凿壁偷', '刻炬成诗韵可观', '处世何须带伪装', '古木生芽不是春', '老虎苍蝇一起除', '落草潜伏十字坡', '天乐鸣时简子游', '是大英雄自虚怀', '竹色四时也不移', '美色必将随后衰', '雅意如茶自在闲', '落笔再歌吉祥年', '水墨胡涂浪漫稀', '紫燕翻飞柳泛青', '百千万亿归于零', '河朔膏腴古督亢', '花好焉无惬意时', '何必杀鸡笑野猴', '南粤万家景色新']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 87

wynik przyjętej metryki: 0.011494252873563218
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
1248    1248  1.538035
132      132  1.512274
33        33  1.486148
1464    1464  1.430674
259      259  1.351068
...      ...       ...
1270    1270  0.904546
799      799  0.903461
401      401  0.903190
1080    1080  0.903127
648      648  0.899576

[88 rows x 2 columns]

--------------------------------------------------

wers pierwszy:		 岳麓求知精通科技千秋重
poprawny wers drugi:	 湖湘原道感悟人生万世雄

proponowane drugie wersy: ['瑶花雅洁馨香海国铸精神', '风如云清清云清风枕边风', '双龙闹海茫茫大海我争雄', '贤也圣也慢步详窥玉洞云', '情歌依旧仍随秋水染夕阳', '休闲有乐掌中点击地球村', '瑶台丽日扬善弘仁一片天', '民生是本欣挥铁笔写风流', '茗标熠熠全身银泽溢新馨', '一泓碧水闪闪翻波四季清', '落单鸿雁不回首疼坏西风', '塘边揽胜喜看绿水跃红鱼', '春溪赴梦入径带来山外情', '净化空间定让蓝天展笑容', '斑斑旧迹义垂万古一山魂', '寻梅雪岭无畏寒侵自有香', '琼花瑶叶雨浥芙蕖冉冉香', '家书有泪一行归雁向佳人', '和风二月燕剪裁红天地春', '说地谈天妙语千词趣味生', '新梨屯垦耕联苑国粹拓疆', '年年七夕望月观星念恋人', '弘扬传统开创未来再立功', '心存群众为民永作护旗人', '湖面平和清心恬静意犹闲', '旌旗飞舞千桡激起粤精神', '壮怀逸兴盛世鸿儒聚鹭园', '碧血丹心一缕幽香溢九州', '寒山一梦入耳钟声未必真', '地铁迎春西咸大道正龙吟', '且移玉趾街坊人家结福缘', '云天碧水横练陈江七彩颜', '皈依万象柳叶馥香传妙音', '平台屹屹出水蛟腾碧浪中', '亮相走红依卖弄不类不伦', '篇篇墨语字字无非寂寞吟', '门生情切切敢捐大义铸心碑', '河声逐梦虽经九曲一条心', '庭前漫步闲听寂寂落花声', '正本清源革故鼎新事业昌', '三农仁政乐浪九州动地来', '曙开平野露沾萱圃草悬光', '地比巴黎精描山水化诗章', '莺鹂鸣柳恰有南风雁早乘', '欢迎学者此道终须启后人', '一刊誉满誉骋楹联艺术家', '蟾光初照银桨徐摇万点星', '丁香迎夏四面八方雨亦催', '春心激动重温旧梦老地方', '慈容宛在莲邦从此乐逍遥', '大道生辉养性修身万世春', '立于天地处事当留一点真', '壮大联坛一片云霞灿锦城', '红光一色欢天喜地满堂春', '民情在抱两袖清风促和谐', '槐花怒放五月槐花醉九龙', '锤镰记取红色党旗血染成', '千帆竞渡追梦宏开万里程', '月月风风叫你顿首献感情', '啼莺恰恰花盈秀野闹新春', '辉煌禹甸水漾芙蕖万象新']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 61

wynik przyjętej metryki: 0.0
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
63        63  1.430051
328      328  1.232970
285      285  1.195808
812      812  1.183949
1268    1268  1.162685
...      ...       ...
874      874  0.902322
963      963  0.901936
628      628  0.901397
1267    1267  0.900050
1306    1306  0.897187

[62 rows x 2 columns]

--------------------------------------------------

wers pierwszy:		 一城增富丽壮气宇千秋启数篇锦绣词章麟阁喜添文苑笔
poprawny wers drugi:	 百福祝祥和夸峥嵘万象惊满目辉煌金碧花都沉醉岭南香

proponowane drugie wersy: ['柳火煎茶柳翠鸣鹂柳营试马柳条赠别翠影千条咏赞多', '想必浮生犹有味君莫笑伍员卖唱秦琼卖马杨志卖刀', '邀客聆听胜地谐声诗中境界喜风传彩信鸟唱金歌', '心铭国耻胜利迎来铸辉煌功业耸立千秋纪念碑']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 4

wynik przyjętej metryki: 0.0

--------------------------------------------------

wers pierwszy:		 山地畅通用安全铺路
poprawny wers drugi:	 油田崛起为生产护航

proponowane drugie wersy: ['满身花影倩人扶', '调一湖春色染绿江淮', '梦里飞花静闻香', '瑞通阆苑琼楼兴百轩', '雪融春到春融雪', '桨声翻学海海载苦舟', '柳垂水面翠溶南北风', '梅乡舟里唱瘦月两回', '半坡翠竹耸蓝天', '满园桃李何言北大荒', '鸣钟食鼎甘田土之出', '金猴贺新岁岁岁平安', '花样年华联若洒可钦']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 13

wynik przyjętej metryki: 0.0
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
1123    1123  1.173290
642      642  1.102260
396      396  1.074446
200      200  1.069938
1409    1409  1.034693
789      789  1.015779
910      910  0.993425
1341    1341  0.989088
1018    1018  0.970223
486      486  0.936643
236      236  0.932841
340      340  0.924875
1181    1181  0.909370
585      585  0.899589

--------------------------------------------------

wers pierwszy:		 随花归故里
poprawny wers drugi:	 伴梦眠老屋

proponowane drugie wersy: ['此地是桃溪深处渊源一派溯龙泓', '浓厚简约虚', '此味几人同', '撒豆成兵', '德及乡里', '绿茵陈', '长短尽随风', '风定水无波', '醉后赋离骚', '气化三清', '众号神君', '党赐深恩', '池浅韵牵波', '宛在岱中行', '碧浪皱红霞', '初日临春虚', '才子佳人', '树葱茏', '出入有声名']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 19

wynik przyjętej metryki: 0.0
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
1104    1104  1.530875
258      258  1.430391
848      848  1.106179
252      252  1.093159
931      931  1.085685
192      192  1.083769
154      154  1.082355
1445    1445  1.076470
651      651  1.042010
291      291  1.032749
1081    1081  1.011256
601      601  1.008619
692      692  1.003235
439      439  1.001311
682      682  1.000212
432      432  0.959261
540      540  0.940793
92        92  0.916658
175      175  0.900608
87        87  0.890902

--------------------------------------------------

wers pierwszy:		 游福地赏风光苏岭郴江铺锦绣
poprawny wers drugi:	 款嘉宾谈经贸南湘林邑创辉煌

proponowane drugie wersy: ['悠悠矣少小离家潇洒人生梦已开', '此地是桃溪深处渊源一派溯龙泓', '诉久长诉离别相逢于白露欲来时', '其性烈其情柔其节亮留国史万年', '开怀八大味五味滋身三味养心', '浮萍漂泊水中花花中水水中花中', '涵汾水馨香太行厚重三晋同为一部书', '心游翰海叹这般风月似醉似痴', '传家无别业惟薄田数亩旧书五车', '红木映红心商德融器具造福万家']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 10

wynik przyjętej metryki: 0.0
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
92        92  1.294912
962      962  1.177321
1403    1403  1.120041
888      888  1.088421
1070    1070  1.044999
868      868  0.995907
78        78  0.967438
885      885  0.909440
522      522  0.902096
941      941  0.901670
85        85  0.876454

--------------------------------------------------

wers pierwszy:		 富春垂钓
poprawny wers drugi:	 天禄谈经

proponowane drugie wersy: ['美德重红幸福门', '作业构三章', '道德五千', '四海奋人心', '功犹可迁', '仰百年师', '浓厚简约虚', '柳娜娜', '此味几人同', '撒豆成兵', '绿野寄仙踪', '难教白日闲', '梅韵贺新年', '文盲', '一街太平歌', '梧宫秋吴王愁', '赤水得玄珠', '栽培桃李成林', '心悟得真', '英雄是达人', '道理甚分明', '木栽门内闲', '庙略久论兵', '碧柳锁长亭', '风定水无波', '醉后赋离骚', '妙理贵躬行', '冬夜暖开心', '众号神君', '龙女牧羊', '新桥', '严师', '不言第一海胸襟', '府藏石铫图', '池浅韵牵波', '宛在岱中行', '天禄谈经', '寺与山争鲜', '酒醉好题诗', '冗鱼', '长庚', '碧浪皱红霞', '淡雅雪边梅', '初日临春虚', '福禄寿喜', '陕州人杰灵', '苔湿地刁皮', '鸟语落花山', '人我法皆空', '何防凿壁偷', '艺高大胆人', '一鳞', '起宏图', '时泰喜黎民', '月轮碾古今', '春入鸟能言', '蛇对赠君东海福', '大宴高轩', '白日奈我何', '单恋独予一江秋', '无肉也能行', '上寿可期', '苍苔', '学子话春浓']
czy poprawny wers jest pośród proponowanych wersów?: True
liczba proponowanych wersów: 64

wynik przyjętej metryki: 0.015625
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
1113    1113  1.373200
747      747  1.311217
192      192  1.304834
1101    1101  1.304543
1188    1188  1.279343
...      ...       ...
303      303  0.921175
1112    1112  0.918424
1191    1191  0.917258
509      509  0.903766
298      298  0.898008

[65 rows x 2 columns]

--------------------------------------------------

wers pierwszy:		 闻思修并重
poprawny wers drugi:	 人我法皆空

proponowane drugie wersy: ['玉律始调阳', '游子自存心']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 2

wynik przyjętej metryki: 0.0

--------------------------------------------------

wers pierwszy:		 飞桥驾鹊天津阔
poprawny wers drugi:	 平沙戏马雨声干

proponowane drugie wersy: ['草内多藏五步蛇', '后乐先忧范弟昆', '锦绣春归百姓家', '雾里青山画屏开', '映日桑榆重晚晴', '野渡闲游一叶舟', '日月同辉光景嫣', '酒醉不如伴月眠', '草木逢春年年生', '看三国志欲何为', '几管笛箫奏响春', '千树落花别样红', '地阔难及贪欲长', '大漠孤烟古道长', '然乎者一字乾坤', '南燕离巢北国春', '浅草雷门愧下关', '又何必三日闻香', '梦里飞花静闻香', '国运弥盛史弥远', '时急方须济世才', '五色龙溪抱江流', '动动脑筋动静无', '弹毕雅曲听和声', '春梦几枝与醉痴', '粉黛淡施十五光', '明月来时渚落霜', '北海波清映日黄', '映月二泉人世情', '风过泸州带酒香', '小觑浮名对酒歌', '不言第一海胸襟', '知耻明荣胸臆宽', '路远始于跬步间', '纵览清江高士怀', '皇城玉阙夕阳斜', '贞慧何辞驻翠颜', '落日栖霞赏故园', '大势所趋水如蓝', '白发无情忽上头', '夫再礼让妻再争', '无边相思似流云', '畅谈国事一腔情', '杨絮舞出风感觉', '室壁裂时蟢网缝', '月转疏枝过女墙', '七年对友队尤宏', '百年盟约好时光', '蔼峰亦寄诗仙情', '中散孤高故不凡', '马上蓝天宇拓宽', '美丽季节万里春', '部长铁男不定还', '慷慨悲歌魏晋风', '五井丰碑今日游', '载笔须来阙下游', '死后欣然上八仙', '兰桂齐芳福乐门', '饮酒月前独自愁', '廉政为民常山情', '北海泛舟携孔融', '四面荷花扑画船', '古韵古风誉古今', '梦远还托风导游', '处世何须带伪装', '杉木果林桃李荣', '春歌大地爱国情', '翠扇红衣十里香', '华夏农民开喜镰', '老虎苍蝇一起除', '蛇对赠君东海福', '红枣绿茶岭上香', '竹色四时也不移', '羊跃人欢艳阳春', '单恋独予一江秋', '水畔青田走马牛', '雅意如茶自在闲', '一地纸灰寂寞人', '归家且遂十年心', '紫燕翻飞柳泛青', '误将弟子入迷宫', '河朔膏腴古督亢', '花好焉无惬意时', '何必杀鸡笑野猴', '南粤万家景色新']
czy poprawny wers jest pośród proponowanych wersów?: False
liczba proponowanych wersów: 85

wynik przyjętej metryki: 0.0
wyjaśnienie - największe wartości przewidywań
      indeks   wartosc
896      896  1.700319
632      632  1.542179
871      871  1.496446
1319    1319  1.451665
1333    1333  1.371606
...      ...       ...
1375    1375  0.905215
751      751  0.904408
91        91  0.903730
1356    1356  0.901601
265      265  0.898689

[86 rows x 2 columns]

--------------------------------------------------

MLPClassifier

Przyjęta metryka dla 1/100 zbioru testowego.

krotki = []
czesc_zbioru_testowego, _ = train_test_split(x_test,test_size=0.95,random_state=42)
for indeks_wersu_pierwszego in range(len(czesc_zbioru_testowego)):
    wers_pierwszy = testowe["in"][indeks_wersu_pierwszego]
    poprawny_wers_drugi = testowe["out"][indeks_wersu_pierwszego]

    reprezentacja_wersu_pierwszego = x_test[indeks_wersu_pierwszego]
    mozliwe_indeksy_wersu_drugiego = []
    wartosci_przewidywan_wersu_drugiego = []
    for indeks_wersu_drugiego in range(len(y_test)):
        reprezentacja_wersu_drugiego = y_test[indeks_wersu_drugiego]
        wejscie_do_MLP = torch.cat((reprezentacja_wersu_pierwszego, reprezentacja_wersu_drugiego))
        mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)
        if klasyfikator.predict([wejscie_do_MLP])[0] == 1:
            mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)

    proponowane_wersy = [testowe["out"][i] for i in mozliwe_indeksy_wersu_drugiego]

    krotki.append((poprawny_wers_drugi,proponowane_wersy))

print(jagosz_score_dla_zbioru(krotki))
0.0005942200059422001

Średnia metryk dla 1/100 zbioru testowego.

jagosz_scores=[]
for indeks_wersu_pierwszego in range(len(czesc_zbioru_testowego)):
    wers_pierwszy = testowe["in"][indeks_wersu_pierwszego]
    poprawny_wers_drugi = testowe["out"][indeks_wersu_pierwszego]

    reprezentacja_wersu_pierwszego = x_test[indeks_wersu_pierwszego]
    mozliwe_indeksy_wersu_drugiego = []
    wartosci_przewidywan_wersu_drugiego = []
    for indeks_wersu_drugiego in range(len(y_test)):
        reprezentacja_wersu_drugiego = y_test[indeks_wersu_drugiego]
        wejscie_do_MLP = torch.cat((reprezentacja_wersu_pierwszego, reprezentacja_wersu_drugiego))
        mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)
        if klasyfikator.predict([wejscie_do_MLP])[0] == 1:
            mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)

    proponowane_wersy = [testowe["out"][i] for i in mozliwe_indeksy_wersu_drugiego]

    jagosz_scores.append(jagosz_score(poprawny_wers_drugi,proponowane_wersy))

print(numpy.mean(jagosz_scores))
0.0005963998131847716

MLPClassifier

Przyjęta metryka dla 1/100 zbioru testowego.

krotki = []
for indeks_wersu_pierwszego in range(len(czesc_zbioru_testowego)):
    wers_pierwszy = testowe["in"][indeks_wersu_pierwszego]
    poprawny_wers_drugi = testowe["out"][indeks_wersu_pierwszego]

    reprezentacja_wersu_pierwszego = x_test[indeks_wersu_pierwszego]
    mozliwe_indeksy_wersu_drugiego = []
    wartosci_przewidywan_wersu_drugiego = []
    for indeks_wersu_drugiego in range(len(y_test)):
        reprezentacja_wersu_drugiego = y_test[indeks_wersu_drugiego]
        wejscie_do_MLP = torch.cat((reprezentacja_wersu_pierwszego, reprezentacja_wersu_drugiego))
        mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)
        wartosci_przewidywan_wersu_drugiego.append(regresor.predict([wejscie_do_MLP])[0])

    pom_df = pandas.DataFrame({"indeks":mozliwe_indeksy_wersu_drugiego,"wartosc":wartosci_przewidywan_wersu_drugiego})
    proponowane_wersy = [testowe["out"][i] for i in pom_df["indeks"] if pom_df["wartosc"][i]>=0.9]

    krotki.append((poprawny_wers_drugi,proponowane_wersy))

print(jagosz_score_dla_zbioru(krotki))
0.005119117936601693

Średnia metryk dla 1/100 zbioru testowego.

jagosz_scores=[]
for indeks_wersu_pierwszego in range(len(czesc_zbioru_testowego)):
    wers_pierwszy = testowe["in"][indeks_wersu_pierwszego]
    poprawny_wers_drugi = testowe["out"][indeks_wersu_pierwszego]

    reprezentacja_wersu_pierwszego = x_test[indeks_wersu_pierwszego]
    mozliwe_indeksy_wersu_drugiego = []
    wartosci_przewidywan_wersu_drugiego = []
    for indeks_wersu_drugiego in range(len(y_test)):
        reprezentacja_wersu_drugiego = y_test[indeks_wersu_drugiego]
        wejscie_do_MLP = torch.cat((reprezentacja_wersu_pierwszego, reprezentacja_wersu_drugiego))
        mozliwe_indeksy_wersu_drugiego.append(indeks_wersu_drugiego)
        wartosci_przewidywan_wersu_drugiego.append(regresor.predict([wejscie_do_MLP])[0])

    pom_df = pandas.DataFrame({"indeks":mozliwe_indeksy_wersu_drugiego,"wartosc":wartosci_przewidywan_wersu_drugiego})
    proponowane_wersy = [testowe["out"][i] for i in pom_df["indeks"] if pom_df["wartosc"][i]>=0.9]

    jagosz_scores.append(jagosz_score(poprawny_wers_drugi,proponowane_wersy))

print(numpy.mean(jagosz_scores))
0.011362779457775134
print(len(czesc_zbioru_testowego))
74