{ "cells": [ { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Requirement already satisfied: tqdm in ./.venv/lib/python3.10/site-packages (4.66.1)\n", "Requirement already satisfied: transformers in ./.venv/lib/python3.10/site-packages (4.37.2)\n", "Requirement already satisfied: pandas in ./.venv/lib/python3.10/site-packages (2.2.0)\n", "Requirement already satisfied: pyarrow in ./.venv/lib/python3.10/site-packages (15.0.0)\n", "Collecting torch\n", " Downloading torch-2.2.0-cp310-cp310-manylinux1_x86_64.whl (755.5 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m755.5/755.5 MB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25hRequirement already satisfied: filelock in ./.venv/lib/python3.10/site-packages (from transformers) (3.13.1)\n", "Requirement already satisfied: huggingface-hub<1.0,>=0.19.3 in ./.venv/lib/python3.10/site-packages (from transformers) (0.20.3)\n", "Requirement already satisfied: safetensors>=0.4.1 in ./.venv/lib/python3.10/site-packages (from transformers) (0.4.2)\n", "Requirement already satisfied: numpy>=1.17 in ./.venv/lib/python3.10/site-packages (from transformers) (1.26.3)\n", "Requirement already satisfied: tokenizers<0.19,>=0.14 in ./.venv/lib/python3.10/site-packages (from transformers) (0.15.1)\n", "Requirement already satisfied: packaging>=20.0 in ./.venv/lib/python3.10/site-packages (from transformers) (23.2)\n", "Requirement already satisfied: regex!=2019.12.17 in ./.venv/lib/python3.10/site-packages (from transformers) (2023.12.25)\n", "Requirement already satisfied: pyyaml>=5.1 in ./.venv/lib/python3.10/site-packages (from transformers) (6.0.1)\n", "Requirement already satisfied: requests in ./.venv/lib/python3.10/site-packages (from transformers) (2.31.0)\n", "Requirement already satisfied: tzdata>=2022.7 in ./.venv/lib/python3.10/site-packages (from pandas) (2023.4)\n", "Requirement already satisfied: python-dateutil>=2.8.2 in ./.venv/lib/python3.10/site-packages (from pandas) (2.8.2)\n", "Requirement already satisfied: pytz>=2020.1 in ./.venv/lib/python3.10/site-packages (from pandas) (2023.4)\n", "Collecting nvidia-cusparse-cu12==12.1.0.106\n", " Using cached nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl (196.0 MB)\n", "Collecting nvidia-cuda-runtime-cu12==12.1.105\n", " Using cached nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (823 kB)\n", "Collecting triton==2.2.0\n", " Using cached triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (167.9 MB)\n", "Collecting nvidia-cublas-cu12==12.1.3.1\n", " Using cached nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl (410.6 MB)\n", "Collecting jinja2\n", " Using cached Jinja2-3.1.3-py3-none-any.whl (133 kB)\n", "Collecting nvidia-nvtx-cu12==12.1.105\n", " Using cached nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (99 kB)\n", "Collecting sympy\n", " Using cached sympy-1.12-py3-none-any.whl (5.7 MB)\n", "Requirement already satisfied: typing-extensions>=4.8.0 in ./.venv/lib/python3.10/site-packages (from torch) (4.9.0)\n", "Collecting nvidia-cuda-cupti-cu12==12.1.105\n", " Using cached nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (14.1 MB)\n", "Collecting nvidia-cusolver-cu12==11.4.5.107\n", " Using cached nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl (124.2 MB)\n", "Collecting nvidia-cufft-cu12==11.0.2.54\n", " Using cached nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl (121.6 MB)\n", "Requirement already satisfied: fsspec in ./.venv/lib/python3.10/site-packages (from torch) (2023.12.2)\n", "Collecting nvidia-cuda-nvrtc-cu12==12.1.105\n", " Using cached nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl (23.7 MB)\n", "Collecting networkx\n", " Using cached networkx-3.2.1-py3-none-any.whl (1.6 MB)\n", "Collecting nvidia-nccl-cu12==2.19.3\n", " Downloading nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl (166.0 MB)\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m166.0/166.0 MB\u001b[0m \u001b[31m13.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", "\u001b[?25hCollecting nvidia-cudnn-cu12==8.9.2.26\n", " Using cached nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl (731.7 MB)\n", "Collecting nvidia-curand-cu12==10.3.2.106\n", " Using cached nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl (56.5 MB)\n", "Collecting nvidia-nvjitlink-cu12\n", " Using cached nvidia_nvjitlink_cu12-12.3.101-py3-none-manylinux1_x86_64.whl (20.5 MB)\n", "Requirement already satisfied: six>=1.5 in ./.venv/lib/python3.10/site-packages (from python-dateutil>=2.8.2->pandas) (1.16.0)\n", "Collecting MarkupSafe>=2.0\n", " Using cached MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (25 kB)\n", "Requirement already satisfied: urllib3<3,>=1.21.1 in ./.venv/lib/python3.10/site-packages (from requests->transformers) (2.2.0)\n", "Requirement already satisfied: charset-normalizer<4,>=2 in ./.venv/lib/python3.10/site-packages (from requests->transformers) (3.3.2)\n", "Requirement already satisfied: certifi>=2017.4.17 in ./.venv/lib/python3.10/site-packages (from requests->transformers) (2023.11.17)\n", "Requirement already satisfied: idna<4,>=2.5 in ./.venv/lib/python3.10/site-packages (from requests->transformers) (3.6)\n", "Collecting mpmath>=0.19\n", " Using cached mpmath-1.3.0-py3-none-any.whl (536 kB)\n", "Installing collected packages: mpmath, triton, sympy, nvidia-nvtx-cu12, nvidia-nvjitlink-cu12, nvidia-nccl-cu12, nvidia-curand-cu12, nvidia-cufft-cu12, nvidia-cuda-runtime-cu12, nvidia-cuda-nvrtc-cu12, nvidia-cuda-cupti-cu12, nvidia-cublas-cu12, networkx, MarkupSafe, nvidia-cusparse-cu12, nvidia-cudnn-cu12, jinja2, nvidia-cusolver-cu12, torch\n", "Successfully installed MarkupSafe-2.1.4 jinja2-3.1.3 mpmath-1.3.0 networkx-3.2.1 nvidia-cublas-cu12-12.1.3.1 nvidia-cuda-cupti-cu12-12.1.105 nvidia-cuda-nvrtc-cu12-12.1.105 nvidia-cuda-runtime-cu12-12.1.105 nvidia-cudnn-cu12-8.9.2.26 nvidia-cufft-cu12-11.0.2.54 nvidia-curand-cu12-10.3.2.106 nvidia-cusolver-cu12-11.4.5.107 nvidia-cusparse-cu12-12.1.0.106 nvidia-nccl-cu12-2.19.3 nvidia-nvjitlink-cu12-12.3.101 nvidia-nvtx-cu12-12.1.105 sympy-1.12 torch-2.2.0 triton-2.2.0\n", "\n", "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.0.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.3.2\u001b[0m\n", "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", "Note: you may need to restart the kernel to use updated packages.\n" ] } ], "source": [ "%pip install tqdm transformers pandas pyarrow torch" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "import glob\n", "import torch\n", "import os\n", "import re\n", "import shutil\n", "from tqdm import tqdm\n", "from transformers import AutoTokenizer, AutoModelForCausalLM\n", "import pandas as pd" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "model_name = \"Dans-DiscountModels/Dans-StructureEvaluator-Small\"" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "ename": "RuntimeError", "evalue": "Failed to import transformers.models.mistral.modeling_mistral because of the following error (look up to see its traceback):\nmodule 'torch._subclasses' has no attribute 'functional_tensor'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/utils/import_utils.py:1364\u001b[0m, in \u001b[0;36m_LazyModule._get_module\u001b[0;34m(self, module_name)\u001b[0m\n\u001b[1;32m 1363\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1364\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mimportlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m.\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodule_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__name__\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1365\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", "File \u001b[0;32m~/.proto/tools/python/3.10.11/install/lib/python3.10/importlib/__init__.py:126\u001b[0m, in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 125\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m--> 126\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m:1050\u001b[0m, in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n", "File \u001b[0;32m:1027\u001b[0m, in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n", "File \u001b[0;32m:1006\u001b[0m, in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n", "File \u001b[0;32m:688\u001b[0m, in \u001b[0;36m_load_unlocked\u001b[0;34m(spec)\u001b[0m\n", "File \u001b[0;32m:883\u001b[0m, in \u001b[0;36mexec_module\u001b[0;34m(self, module)\u001b[0m\n", "File \u001b[0;32m:241\u001b[0m, in \u001b[0;36m_call_with_frames_removed\u001b[0;34m(f, *args, **kwds)\u001b[0m\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/models/mistral/modeling_mistral.py:28\u001b[0m\n\u001b[1;32m 27\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mnn\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfunctional\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mF\u001b[39;00m\n\u001b[0;32m---> 28\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mutils\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mcheckpoint\u001b[39;00m\n\u001b[1;32m 29\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m nn\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/torch/utils/checkpoint.py:1169\u001b[0m\n\u001b[1;32m 1163\u001b[0m \u001b[38;5;66;03m# NOTE: torch.utils.checkpoint internal logic will call these two functions unknown number of times\u001b[39;00m\n\u001b[1;32m 1164\u001b[0m \u001b[38;5;66;03m# (i.e. there could be _CachedTorchDispatchMode calls that doesn't map to a _CachingTorchDispatchMode call),\u001b[39;00m\n\u001b[1;32m 1165\u001b[0m \u001b[38;5;66;03m# so we ignore these ops and just always recompute them.\u001b[39;00m\n\u001b[1;32m 1166\u001b[0m _ignored_ops \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 1167\u001b[0m torch\u001b[38;5;241m.\u001b[39mops\u001b[38;5;241m.\u001b[39mprim\u001b[38;5;241m.\u001b[39mdevice\u001b[38;5;241m.\u001b[39mdefault,\n\u001b[1;32m 1168\u001b[0m torch\u001b[38;5;241m.\u001b[39mops\u001b[38;5;241m.\u001b[39maten\u001b[38;5;241m.\u001b[39mdetach\u001b[38;5;241m.\u001b[39mdefault,\n\u001b[0;32m-> 1169\u001b[0m } \u001b[38;5;241m|\u001b[39m \u001b[38;5;28mset\u001b[39m(\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_subclasses\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfunctional_tensor\u001b[49m\u001b[38;5;241m.\u001b[39mFunctionalTensor\u001b[38;5;241m.\u001b[39mmetadata_fns)\n\u001b[1;32m 1172\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01m_CachingTorchDispatchMode\u001b[39;00m(TorchDispatchMode):\n", "\u001b[0;31mAttributeError\u001b[0m: module 'torch._subclasses' has no attribute 'functional_tensor'", "\nThe above exception was the direct cause of the following exception:\n", "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[15], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m model \u001b[38;5;241m=\u001b[39m \u001b[43mAutoModelForCausalLM\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfrom_pretrained\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice_map\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcuda\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 2\u001b[0m tokenizer \u001b[38;5;241m=\u001b[39m AutoTokenizer\u001b[38;5;241m.\u001b[39mfrom_pretrained(model_name)\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:565\u001b[0m, in \u001b[0;36m_BaseAutoModelClass.from_pretrained\u001b[0;34m(cls, pretrained_model_name_or_path, *model_args, **kwargs)\u001b[0m\n\u001b[1;32m 561\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m model_class\u001b[38;5;241m.\u001b[39mfrom_pretrained(\n\u001b[1;32m 562\u001b[0m pretrained_model_name_or_path, \u001b[38;5;241m*\u001b[39mmodel_args, config\u001b[38;5;241m=\u001b[39mconfig, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mhub_kwargs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 563\u001b[0m )\n\u001b[1;32m 564\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mtype\u001b[39m(config) \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mcls\u001b[39m\u001b[38;5;241m.\u001b[39m_model_mapping\u001b[38;5;241m.\u001b[39mkeys():\n\u001b[0;32m--> 565\u001b[0m model_class \u001b[38;5;241m=\u001b[39m \u001b[43m_get_model_class\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mcls\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_model_mapping\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 566\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m model_class\u001b[38;5;241m.\u001b[39mfrom_pretrained(\n\u001b[1;32m 567\u001b[0m pretrained_model_name_or_path, \u001b[38;5;241m*\u001b[39mmodel_args, config\u001b[38;5;241m=\u001b[39mconfig, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mhub_kwargs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 568\u001b[0m )\n\u001b[1;32m 569\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 570\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mUnrecognized configuration class \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mconfig\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m for this kind of AutoModel: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mcls\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 571\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mModel type should be one of \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(c\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mfor\u001b[39;00m\u001b[38;5;250m \u001b[39mc\u001b[38;5;250m \u001b[39m\u001b[38;5;129;01min\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28mcls\u001b[39m\u001b[38;5;241m.\u001b[39m_model_mapping\u001b[38;5;241m.\u001b[39mkeys())\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 572\u001b[0m )\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:387\u001b[0m, in \u001b[0;36m_get_model_class\u001b[0;34m(config, model_mapping)\u001b[0m\n\u001b[1;32m 386\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_get_model_class\u001b[39m(config, model_mapping):\n\u001b[0;32m--> 387\u001b[0m supported_models \u001b[38;5;241m=\u001b[39m \u001b[43mmodel_mapping\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mtype\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\u001b[43m]\u001b[49m\n\u001b[1;32m 388\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(supported_models, (\u001b[38;5;28mlist\u001b[39m, \u001b[38;5;28mtuple\u001b[39m)):\n\u001b[1;32m 389\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m supported_models\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:740\u001b[0m, in \u001b[0;36m_LazyAutoMapping.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 738\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m model_type \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_model_mapping:\n\u001b[1;32m 739\u001b[0m model_name \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_model_mapping[model_type]\n\u001b[0;32m--> 740\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_load_attr_from_module\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel_name\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 742\u001b[0m \u001b[38;5;66;03m# Maybe there was several model types associated with this config.\u001b[39;00m\n\u001b[1;32m 743\u001b[0m model_types \u001b[38;5;241m=\u001b[39m [k \u001b[38;5;28;01mfor\u001b[39;00m k, v \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_config_mapping\u001b[38;5;241m.\u001b[39mitems() \u001b[38;5;28;01mif\u001b[39;00m v \u001b[38;5;241m==\u001b[39m key\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m]\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:754\u001b[0m, in \u001b[0;36m_LazyAutoMapping._load_attr_from_module\u001b[0;34m(self, model_type, attr)\u001b[0m\n\u001b[1;32m 752\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m module_name \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_modules:\n\u001b[1;32m 753\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_modules[module_name] \u001b[38;5;241m=\u001b[39m importlib\u001b[38;5;241m.\u001b[39mimport_module(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmodule_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtransformers.models\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 754\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mgetattribute_from_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_modules\u001b[49m\u001b[43m[\u001b[49m\u001b[43mmodule_name\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattr\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:698\u001b[0m, in \u001b[0;36mgetattribute_from_module\u001b[0;34m(module, attr)\u001b[0m\n\u001b[1;32m 696\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(attr, \u001b[38;5;28mtuple\u001b[39m):\n\u001b[1;32m 697\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m(getattribute_from_module(module, a) \u001b[38;5;28;01mfor\u001b[39;00m a \u001b[38;5;129;01min\u001b[39;00m attr)\n\u001b[0;32m--> 698\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28;43mhasattr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mmodule\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattr\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[1;32m 699\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mgetattr\u001b[39m(module, attr)\n\u001b[1;32m 700\u001b[0m \u001b[38;5;66;03m# Some of the mappings have entries model_type -> object of another model type. In that case we try to grab the\u001b[39;00m\n\u001b[1;32m 701\u001b[0m \u001b[38;5;66;03m# object at the top level.\u001b[39;00m\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/utils/import_utils.py:1354\u001b[0m, in \u001b[0;36m_LazyModule.__getattr__\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m 1352\u001b[0m value \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_module(name)\n\u001b[1;32m 1353\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m name \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_class_to_module\u001b[38;5;241m.\u001b[39mkeys():\n\u001b[0;32m-> 1354\u001b[0m module \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_class_to_module\u001b[49m\u001b[43m[\u001b[49m\u001b[43mname\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1355\u001b[0m value \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(module, name)\n\u001b[1;32m 1356\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", "File \u001b[0;32m~/ai/llm/datasets/NyxKrage_bambisleep/.venv/lib/python3.10/site-packages/transformers/utils/import_utils.py:1366\u001b[0m, in \u001b[0;36m_LazyModule._get_module\u001b[0;34m(self, module_name)\u001b[0m\n\u001b[1;32m 1364\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m importlib\u001b[38;5;241m.\u001b[39mimport_module(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m+\u001b[39m module_name, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m)\n\u001b[1;32m 1365\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m-> 1366\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[1;32m 1367\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFailed to import \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmodule_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m because of the following error (look up to see its\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1368\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m traceback):\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1369\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n", "\u001b[0;31mRuntimeError\u001b[0m: Failed to import transformers.models.mistral.modeling_mistral because of the following error (look up to see its traceback):\nmodule 'torch._subclasses' has no attribute 'functional_tensor'" ] } ], "source": [ "model = AutoModelForCausalLM.from_pretrained(model_name, device_map = \"cuda\")\n", "tokenizer = AutoTokenizer.from_pretrained(model_name)" ] }, { "cell_type": "code", "execution_count": 51, "metadata": {}, "outputs": [], "source": [ "def calculate_perplexity(text):\n", " input_ids = torch.tensor([tokenizer.encode(text)]).to(\"cuda:0\")\n", "\n", " model.eval()\n", "\n", " with torch.no_grad():\n", " outputs = model(input_ids, labels=input_ids)\n", " loss = outputs[0]\n", "\n", " del outputs\n", " torch.cuda.empty_cache()\n", "\n", " return torch.exp(loss).item()" ] }, { "cell_type": "code", "execution_count": 57, "metadata": {}, "outputs": [], "source": [ "def split_data_into_windows(data):\n", " lines = data.splitlines()\n", " windows = []\n", "\n", " for i in range(1, len(lines)):\n", " # Get the previous line and current line\n", " prev_line = lines[i - 1]\n", " curr_line = lines[i]\n", "\n", " # If it's the first line or the last line, don't split it\n", " if i == 1:\n", " mid_index = len(curr_line) // 2\n", " first_half = curr_line[:mid_index]\n", " windows.append((prev_line, first_half))\n", " elif i == len(lines) - 1:\n", " mid_index = len(prev_line) // 2\n", " second_half = prev_line[mid_index:]\n", " windows.append((second_half, curr_line))\n", " else:\n", " # Split the current line into two halves\n", " curr_mid_index = len(curr_line) // 2\n", " prev_mid_index = len(prev_line) // 2\n", " first_half = curr_line[:curr_mid_index]\n", " second_half = prev_line[prev_mid_index:]\n", "\n", " windows.append((second_half, first_half))\n", " return windows" ] }, { "cell_type": "code", "execution_count": 41, "metadata": {}, "outputs": [], "source": [ "def join_lines(lines):\n", " with_space = lines[0] + \" \" + lines[1]\n", " without_space = lines[0] + lines[1]\n", " with_space_perplexity = calculate_perplexity(with_space)\n", " without_space_perplexity = calculate_perplexity(without_space)\n", " if with_space_perplexity < without_space_perplexity:\n", " return with_space\n", " else:\n", " return without_space" ] }, { "cell_type": "code", "execution_count": 42, "metadata": {}, "outputs": [], "source": [ "def join_windows(windows):\n", " output = \"\"\n", " for window in windows:\n", " output += join_lines(window)\n", " return output" ] }, { "cell_type": "code", "execution_count": 43, "metadata": {}, "outputs": [], "source": [ "files = glob.glob(\"orig/*.txt\")" ] }, { "cell_type": "code", "execution_count": 44, "metadata": {}, "outputs": [], "source": [ "# remove all non-original files that have an original file associated with them\n", "copy_files = {}\n", "for file in files:\n", " if file in copy_files:\n", " continue\n", " if \" (_ORIGINAL SERIES)\" in file:\n", " original_file = file.replace(\" (_ORIGINAL SERIES)\", \"\")\n", " copy_files[file] = original_file.replace(\"orig/\", \"dedup/\")\n", " elif \" (_UNCHANGED)\" in file:\n", " original_file = file.replace(\" (_UNCHANGED)\", \"\")\n", " copy_files[file] = original_file.replace(\"orig/\", \"dedup/\")\n", " else:\n", " copy_files[file] = file.replace(\"orig/\", \"dedup/\")\n", "\n", "# for all values in copy_files, if the value contains parentheses and the filename without them is already a value in copy_files remove it\n", "to_remove = []\n", "for kv in copy_files.items():\n", " if \"(\" in kv[1]:\n", " original_file = re.sub(r\" \\(.+\\)\", \"\", kv[1])\n", " if original_file in copy_files.values():\n", " to_remove.append(kv[0])\n", "\n", "for file in to_remove:\n", " del copy_files[file]\n", "\n", "for kv in copy_files.items():\n", " if \"+\" in kv[1]:\n", " original_file = re.sub(r\" \\(.+\\)\", \"\", kv[1])\n", " copy_files[kv[0]] = original_file\n", "\n", "to_remove.clear()\n", "for kv in copy_files.items():\n", " if \"(\" in kv[1]:\n", " original_file = re.sub(r\" \\(.+\\)\", \"\", kv[1])\n", " if original_file in copy_files.values():\n", " to_remove.append(kv[0])\n", "\n", "for file in to_remove:\n", " del copy_files[file]\n", "\n", "for kv in copy_files.items():\n", " if \"(Full)\" in kv[1]:\n", " original_file = re.sub(r\" \\(.+\\)\", \"\", kv[1])\n", " if original_file not in copy_files.values():\n", " copy_files[kv[0]] = original_file\n", "\n", "to_remove.clear()\n", "for kv in copy_files.items():\n", " if \"(\" in kv[1]:\n", " to_remove.append(kv[0])\n", "\n", "for file in to_remove:\n", " del copy_files[file]\n", "\n", "# copy files to new directory\n", "shutil.rmtree(\"dedup\", ignore_errors=True)\n", "os.makedirs(\"dedup\", exist_ok=True)\n", "for kv in copy_files.items():\n", " shutil.copy(kv[0], kv[1])\n" ] }, { "cell_type": "code", "execution_count": 45, "metadata": {}, "outputs": [], "source": [ "files = glob.glob(\"dedup/*.txt\")" ] }, { "cell_type": "code", "execution_count": 46, "metadata": {}, "outputs": [], "source": [ "# remove all empty lines in the dedup files\n", "shutil.rmtree(\"dedup_no_empty\", ignore_errors=True)\n", "os.makedirs(\"dedup_no_empty\", exist_ok=True)\n", "for file in files:\n", " with open(file, \"r\") as f:\n", " data = f.read()\n", " while \"\\n\\n\" in data:\n", " data = data.replace(\"\\n\\n\", \"\\n\")\n", " new_file = file.replace(\"dedup\", \"dedup_no_empty\")\n", " with open(new_file, \"w\") as f:\n", " f.write(data)" ] }, { "cell_type": "code", "execution_count": 47, "metadata": {}, "outputs": [], "source": [ "files = glob.glob(\"dedup_no_empty/*.txt\")" ] }, { "cell_type": "code", "execution_count": 60, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "100%|██████████| 30/30 [01:56<00:00, 3.88s/it]\n" ] } ], "source": [ "shutil.rmtree(\"joined\", ignore_errors=True)\n", "os.makedirs(\"joined\", exist_ok=True)\n", "for file in tqdm(files):\n", " with open(file, \"r\") as f:\n", " data = f.read()\n", " windows = split_data_into_windows(data)\n", " joined = join_windows(windows)\n", " new_file = file.replace(\"dedup_no_empty\", \"joined\")\n", " # in joined make sure there is a space after every period or comma\n", " joined = joined\n", " \n", " with open(new_file, \"w\") as f:\n", " f.write(joined)" ] }, { "cell_type": "code", "execution_count": 24, "metadata": {}, "outputs": [], "source": [ "files = glob.glob(\"joined/*.txt\")" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "100%|██████████| 30/30 [00:00<00:00, 8242.44it/s]\n" ] } ], "source": [ "shutil.rmtree(\"cleaned\", ignore_errors=True)\n", "os.makedirs(\"cleaned\", exist_ok=True)\n", "for file in tqdm(files):\n", " with open(file, \"r\") as f:\n", " data = f.read()\n", " data = data.replace(\".\", \". \").replace(\",\", \", \").replace(\" \", \" \").replace('\"', \"\")\n", " new_file = file.replace(\"joined\", \"cleaned\")\n", " with open(new_file, \"w\") as f:\n", " f.write(data)" ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [], "source": [ "files = glob.glob(\"cleaned/*.txt\")" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "100%|██████████| 30/30 [00:00<00:00, 44779.05it/s]\n" ] }, { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
text
0Just Bambi Sleep. Deeper and deeper. Feeling b...
1Every command Bambi has accepted in this sessi...
2Feeling wonderfully obedient Bambi. Perfectly ...
3That's right. Doing so well. Like a pretty gir...
4Bambi is completely susceptible to hypnosis. A...
5That's right Bambi. Eyelids fluttering like a ...
6That's right Bambi feeling so good. Just drift...
7Now as Bambi Sleeps deeper and deeper. Fuzzier...
8Bambi Sleep Now. And Bambi suddenly finds hers...
9Bambi is a perfect fuckable fashion puppet. A ...
10Drifting deeper and deeper Bambi. More and mor...
11[Various voices looping conditioning phrases]S...
12Bambi Sleep Now. Just Bambi Sleep. Every time ...
13Feeling the need to be Primped And Pampered. ...
14That's a Good Girl Bambi. Feeling more and mor...
15And Bambi is so perfectly trapped in her bimbo...
16And Bambi every time you go so deeply into tra...
17Bambi Freeze. That's a Good Girl. All thoughts...
18Such a Good Girl Bambi. Feeling so much pleasu...
19Soon it will be time for you to awaken Bambi. ...
20That's right Bambi. Feeling so wonderful. Feel...
21Take a deep breath Bambi. Hold it for a moment...
22That's a Good Girl Bambi. Feeling more and mor...
23Bambi Sleep now. No resistance, must sleep now...
24Slipping deeper and deeper now Bambi. Just tak...
25Hi sweetie! Welcome to the Sleepy Girl Salon B...
26It's time to go so much deeper Bambi. And ther...
27That's right. Deeply asleep now Bambi. Complet...
28Bambi can feel her perfect heaving titties now...
29That's it Bambi. So peaceful. So perfect. So b...
\n", "
" ], "text/plain": [ " text\n", "0 Just Bambi Sleep. Deeper and deeper. Feeling b...\n", "1 Every command Bambi has accepted in this sessi...\n", "2 Feeling wonderfully obedient Bambi. Perfectly ...\n", "3 That's right. Doing so well. Like a pretty gir...\n", "4 Bambi is completely susceptible to hypnosis. A...\n", "5 That's right Bambi. Eyelids fluttering like a ...\n", "6 That's right Bambi feeling so good. Just drift...\n", "7 Now as Bambi Sleeps deeper and deeper. Fuzzier...\n", "8 Bambi Sleep Now. And Bambi suddenly finds hers...\n", "9 Bambi is a perfect fuckable fashion puppet. A ...\n", "10 Drifting deeper and deeper Bambi. More and mor...\n", "11 [Various voices looping conditioning phrases]S...\n", "12 Bambi Sleep Now. Just Bambi Sleep. Every time ...\n", "13 Feeling the need to be Primped And Pampered. ...\n", "14 That's a Good Girl Bambi. Feeling more and mor...\n", "15 And Bambi is so perfectly trapped in her bimbo...\n", "16 And Bambi every time you go so deeply into tra...\n", "17 Bambi Freeze. That's a Good Girl. All thoughts...\n", "18 Such a Good Girl Bambi. Feeling so much pleasu...\n", "19 Soon it will be time for you to awaken Bambi. ...\n", "20 That's right Bambi. Feeling so wonderful. Feel...\n", "21 Take a deep breath Bambi. Hold it for a moment...\n", "22 That's a Good Girl Bambi. Feeling more and mor...\n", "23 Bambi Sleep now. No resistance, must sleep now...\n", "24 Slipping deeper and deeper now Bambi. Just tak...\n", "25 Hi sweetie! Welcome to the Sleepy Girl Salon B...\n", "26 It's time to go so much deeper Bambi. And ther...\n", "27 That's right. Deeply asleep now Bambi. Complet...\n", "28 Bambi can feel her perfect heaving titties now...\n", "29 That's it Bambi. So peaceful. So perfect. So b..." ] }, "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# load each file as a row with the text in a column called text\n", "texts = []\n", "full_text = \"\"\n", "for file in tqdm(files):\n", " with open(file, \"r\") as f:\n", " data = f.read()\n", " texts.append(data)\n", " full_text += data + \"\\n\"\n", "\n", "file = \"bambisleep.parquet\"\n", "df = pd.DataFrame({ \"text\": texts }, columns=[\"text\"])\n", "df.to_parquet(file)\n", "df" ] }, { "cell_type": "code", "execution_count": 30, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "tokenizer_config.json: 100%|██████████| 1.29k/1.29k [00:00<00:00, 9.67MB/s]\n", "tokenizer.model: 100%|██████████| 500k/500k [00:00<00:00, 5.74MB/s]\n", "tokenizer.json: 100%|██████████| 1.84M/1.84M [00:00<00:00, 4.03MB/s]\n", "special_tokens_map.json: 100%|██████████| 551/551 [00:00<00:00, 4.15MB/s]\n" ] } ], "source": [ "end_tokenizer = AutoTokenizer.from_pretrained(\"TinyLlama/TinyLlama-1.1B-Chat-v1.0\")" ] }, { "cell_type": "code", "execution_count": 31, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Token indices sequence length is longer than the specified maximum sequence length for this model (77787 > 2048). Running this sequence through the model will result in indexing errors\n" ] }, { "data": { "text/plain": [ "['▁Just',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁S',\n", " 'leep',\n", " '.',\n", " '▁De',\n", " 'eper',\n", " '▁and',\n", " '▁deeper',\n", " '.',\n", " '▁Fe',\n", " 'eling',\n", " '▁better',\n", " '▁and',\n", " '▁better',\n", " '.',\n", " '▁Because',\n", " '▁every',\n", " '▁time',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁he',\n", " 'ars',\n", " '▁the',\n", " '▁words',\n", " '.',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁Her',\n", " '▁pleasure',\n", " '▁deep',\n", " 'ens',\n", " '.',\n", " '▁Fe',\n", " 'eling',\n", " '▁so',\n", " '▁much',\n", " '▁better',\n", " '.',\n", " '▁So',\n", " '▁much',\n", " '▁more',\n", " '▁relax',\n", " 'ed',\n", " '▁and',\n", " '▁happy',\n", " '.',\n", " '▁Fe',\n", " 'eling',\n", " '▁so',\n", " '▁good',\n", " '▁all',\n", " '▁over',\n", " '.',\n", " '▁Str',\n", " 'ong',\n", " 'er',\n", " '▁and',\n", " '▁stronger',\n", " '.',\n", " '▁More',\n", " '▁and',\n", " '▁more',\n", " '▁wonder',\n", " 'fully',\n", " '▁eu',\n", " 'ph',\n", " 'or',\n", " 'ic',\n", " '.',\n", " '▁Such',\n", " '▁a',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁Happy',\n", " '.',\n", " '▁Rel',\n", " 'ax',\n", " 'ed',\n", " '▁and',\n", " '▁accepting',\n", " '.',\n", " '▁Fe',\n", " 'eling',\n", " '▁that',\n", " '▁deep',\n", " '▁wave',\n", " '▁of',\n", " '▁capt',\n", " 'iv',\n", " 'ating',\n", " '▁pleasure',\n", " '▁and',\n", " '▁ob',\n", " 'ed',\n", " 'ience',\n", " '.',\n", " '▁Even',\n", " '▁more',\n", " '▁bl',\n", " 'iss',\n", " 'ful',\n", " '▁with',\n", " '▁every',\n", " '▁trigger',\n", " '.',\n", " '▁M',\n", " 'aking',\n", " '▁everything',\n", " '▁right',\n", " '▁with',\n", " '▁her',\n", " '▁world',\n", " '.',\n", " '▁Fe',\n", " 'eling',\n", " '▁more',\n", " '▁and',\n", " '▁more',\n", " '▁like',\n", " '▁a',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁S',\n", " 'li',\n", " 'pping',\n", " '▁away',\n", " '▁into',\n", " '▁utter',\n", " '▁peace',\n", " '▁and',\n", " '▁content',\n", " 'ment',\n", " '.',\n", " '▁R',\n", " 'iding',\n", " '▁away',\n", " '▁on',\n", " '▁a',\n", " '▁cr',\n", " 'est',\n", " 'ing',\n", " '▁wave',\n", " '▁of',\n", " '▁blank',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁happiness',\n", " '▁and',\n", " '▁eu',\n", " 'ph',\n", " 'oria',\n", " '.',\n", " '▁Every',\n", " '▁time',\n", " '▁she',\n", " '▁he',\n", " 'ars',\n", " '▁the',\n", " '▁words',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁Fe',\n", " 'els',\n", " '▁so',\n", " '▁wonderful',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Mind',\n", " '▁shut',\n", " 'ting',\n", " '▁down',\n", " '.',\n", " '▁F',\n", " 'ading',\n", " '▁away',\n", " '▁more',\n", " '▁and',\n", " '▁more',\n", " '.',\n", " '▁Know',\n", " 'ing',\n", " '▁that',\n", " '▁she',\n", " '▁needs',\n", " '▁to',\n", " '▁be',\n", " '▁a',\n", " '▁perfect',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁To',\n", " '▁accept',\n", " '.',\n", " '▁To',\n", " '▁obey',\n", " '▁and',\n", " '▁forget',\n", " '.',\n", " '▁To',\n", " '▁ensure',\n", " '▁these',\n", " '▁pleasant',\n", " '▁feelings',\n", " '▁will',\n", " '▁continue',\n", " '.',\n", " '▁Know',\n", " 'ing',\n", " '▁that',\n", " '▁it',\n", " '▁will',\n", " '▁be',\n", " '▁so',\n", " '▁easy',\n", " '.',\n", " '▁Because',\n", " '▁she',\n", " '▁is',\n", " '▁an',\n", " '▁empty',\n", " '▁happy',\n", " '▁pu',\n", " 'ppet',\n", " '.',\n", " '▁A',\n", " '▁blank',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁air',\n", " 'head',\n", " '.',\n", " '▁Who',\n", " '▁lov',\n", " 'es',\n", " '▁to',\n", " '▁relax',\n", " '▁and',\n", " '▁obey',\n", " '.',\n", " '▁And',\n", " '▁comp',\n", " 'li',\n", " 'ant',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁doll',\n", " 's',\n", " '▁are',\n", " '▁always',\n", " '▁Good',\n", " '▁Girls',\n", " '.',\n", " '▁Comp',\n", " 'li',\n", " 'ant',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁doll',\n", " 's',\n", " '▁like',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Fil',\n", " 'led',\n", " '▁with',\n", " '▁such',\n", " '▁a',\n", " '▁deep',\n", " '▁sense',\n", " '▁of',\n", " '▁pride',\n", " '.',\n", " '▁That',\n", " '▁she',\n", " \"'\",\n", " 's',\n", " '▁such',\n", " '▁a',\n", " '▁perfect',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '.',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁lov',\n", " 'es',\n", " '▁being',\n", " '▁a',\n", " '▁perfect',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '.',\n", " '▁Because',\n", " '▁being',\n", " '▁a',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁feels',\n", " '▁wonderful',\n", " '.',\n", " '▁She',\n", " \"'\",\n", " 's',\n", " '▁so',\n", " '▁proud',\n", " '▁that',\n", " '▁she',\n", " \"'\",\n", " 's',\n", " '▁a',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '.',\n", " '▁And',\n", " '▁she',\n", " \"'\",\n", " 's',\n", " '▁so',\n", " '▁proud',\n", " '▁when',\n", " '▁she',\n", " '▁ob',\n", " 'e',\n", " 'ys',\n", " '.',\n", " '▁O',\n", " 'bed',\n", " 'ience',\n", " '▁brings',\n", " '▁pride',\n", " '▁and',\n", " '▁content',\n", " 'ment',\n", " '.',\n", " '▁Such',\n", " '▁deep',\n", " '▁pleasure',\n", " '.',\n", " '▁When',\n", " 'ever',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁Does',\n", " '▁As',\n", " '▁She',\n", " \"'\",\n", " 's',\n", " '▁T',\n", " 'old',\n", " '.',\n", " '▁When',\n", " 'ever',\n", " '▁she',\n", " '▁ob',\n", " 'e',\n", " 'ys',\n", " '▁her',\n", " '▁training',\n", " '.',\n", " '▁When',\n", " 'ever',\n", " '▁she',\n", " '▁is',\n", " '▁called',\n", " '▁a',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁She',\n", " '▁feels',\n", " '▁that',\n", " '▁wonderful',\n", " '▁sens',\n", " 'ation',\n", " '.',\n", " '▁Of',\n", " '▁deep',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁pride',\n", " '.',\n", " '▁W',\n", " 'ash',\n", " 'ing',\n", " '▁over',\n", " '▁her',\n", " '▁existence',\n", " '.',\n", " '▁F',\n", " 'illing',\n", " '▁her',\n", " '▁up',\n", " '▁so',\n", " '▁bl',\n", " 'iss',\n", " 'fully',\n", " '▁Sa',\n", " 'fe',\n", " '▁And',\n", " '▁Sec',\n", " 'ure',\n", " '.',\n", " '▁Need',\n", " 'ing',\n", " '▁it',\n", " '▁more',\n", " '▁and',\n", " '▁more',\n", " '.',\n", " '▁O',\n", " 'bed',\n", " 'ient',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁pride',\n", " '▁feels',\n", " '▁so',\n", " '▁good',\n", " '.',\n", " '▁So',\n", " '▁proud',\n", " '▁to',\n", " '▁be',\n", " '▁a',\n", " '▁perfect',\n", " '▁sub',\n", " 'miss',\n", " 'ive',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁doll',\n", " '▁named',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Accept',\n", " 'ing',\n", " '▁automatically',\n", " '▁because',\n", " '▁it',\n", " '▁feels',\n", " '▁so',\n", " '▁right',\n", " '.',\n", " '▁That',\n", " \"'\",\n", " 's',\n", " '▁a',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁Even',\n", " '▁just',\n", " '▁the',\n", " '▁fact',\n", " '▁that',\n", " '▁her',\n", " '▁name',\n", " '▁is',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁F',\n", " 'illing',\n", " '▁her',\n", " '▁with',\n", " '▁over',\n", " 'wh',\n", " 'el',\n", " 'ming',\n", " '▁pride',\n", " '▁and',\n", " '▁satisfaction',\n", " '.',\n", " '▁Ple',\n", " 'as',\n", " 'antly',\n", " '▁und',\n", " 'ulating',\n", " '▁waves',\n", " '▁of',\n", " '▁ob',\n", " 'ed',\n", " 'ience',\n", " '▁and',\n", " '▁pleasure',\n", " '.',\n", " '▁Because',\n", " '▁she',\n", " '▁knows',\n", " '▁only',\n", " '▁the',\n", " '▁most',\n", " '▁hel',\n", " 'pl',\n", " 'ess',\n", " 'ly',\n", " '▁d',\n", " 'umb',\n", " '▁and',\n", " '▁se',\n", " 'xy',\n", " '▁girls',\n", " '▁could',\n", " '▁have',\n", " '▁a',\n", " '▁sl',\n", " 'ut',\n", " 'ty',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁name',\n", " '▁like',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Every',\n", " '▁time',\n", " '▁she',\n", " '▁he',\n", " 'ars',\n", " '▁the',\n", " '▁name',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Every',\n", " '▁time',\n", " '▁she',\n", " '▁even',\n", " '▁thinks',\n", " '▁of',\n", " '▁her',\n", " '▁name',\n", " '.',\n", " '▁That',\n", " '▁bl',\n", " 'iss',\n", " 'ful',\n", " '▁wave',\n", " '▁of',\n", " '▁deep',\n", " '▁content',\n", " 'ment',\n", " '.',\n", " '▁Know',\n", " 'ing',\n", " '▁that',\n", " '▁her',\n", " '▁name',\n", " '▁is',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁That',\n", " '▁wonderful',\n", " '▁sur',\n", " 'ge',\n", " '▁of',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁eu',\n", " 'ph',\n", " 'oria',\n", " '.',\n", " '▁When',\n", " '▁she',\n", " '▁is',\n", " '▁addressed',\n", " '▁as',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁M',\n", " 'akes',\n", " '▁her',\n", " '▁feel',\n", " '▁like',\n", " '▁such',\n", " '▁a',\n", " '▁Good',\n", " '▁Girl',\n", " '.',\n", " '▁And',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁S',\n", " 'leep',\n", " '.',\n", " '▁Al',\n", " 'most',\n", " '▁expl',\n", " 'oding',\n", " '▁into',\n", " '▁a',\n", " '▁happy',\n", " '▁sub',\n", " 'miss',\n", " 'ive',\n", " '▁little',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁clim',\n", " 'ax',\n", " '.',\n", " '▁So',\n", " '▁proud',\n", " '▁that',\n", " '▁her',\n", " '▁name',\n", " '▁is',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Unable',\n", " '▁to',\n", " '▁remember',\n", " '▁anything',\n", " '▁else',\n", " '▁because',\n", " '▁being',\n", " '▁called',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁feels',\n", " '▁so',\n", " '▁bl',\n", " 'iss',\n", " 'ful',\n", " '.',\n", " '▁She',\n", " '▁lov',\n", " 'es',\n", " '▁the',\n", " '▁name',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁so',\n", " '▁much',\n", " '.',\n", " '▁Sec',\n", " 'ure',\n", " '▁in',\n", " '▁the',\n", " '▁fact',\n", " '▁that',\n", " '▁it',\n", " '▁matches',\n", " '▁her',\n", " '▁person',\n", " 'ality',\n", " '.',\n", " '▁And',\n", " '▁deeply',\n", " '▁locked',\n", " '▁in',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁identity',\n", " '.',\n", " '▁So',\n", " '▁perfectly',\n", " '.',\n", " '▁Because',\n", " '▁she',\n", " '▁has',\n", " '▁always',\n", " '▁been',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Even',\n", " '▁just',\n", " '▁the',\n", " '▁word',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '.',\n", " '▁Every',\n", " '▁time',\n", " '▁she',\n", " '▁he',\n", " 'ars',\n", " '▁the',\n", " '▁word',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '.',\n", " '▁Rem',\n", " 'inding',\n", " '▁her',\n", " '▁of',\n", " '▁her',\n", " '▁place',\n", " '▁in',\n", " '▁life',\n", " '.',\n", " '▁As',\n", " '▁a',\n", " '▁blank',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁doll',\n", " '.',\n", " '▁So',\n", " '▁proud',\n", " '▁to',\n", " '▁be',\n", " '▁a',\n", " '▁perfect',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '.',\n", " '▁Wonder',\n", " 'ful',\n", " '▁waves',\n", " '▁of',\n", " '▁pride',\n", " '▁and',\n", " '▁accept',\n", " 'ance',\n", " '▁cour',\n", " 'sing',\n", " '▁through',\n", " '▁her',\n", " '.',\n", " '▁Cr',\n", " 'ushing',\n", " '▁do',\n", " 'se',\n", " '▁of',\n", " '▁pleasure',\n", " '.',\n", " '▁W',\n", " 'arm',\n", " '▁and',\n", " '▁safe',\n", " '.',\n", " '▁Fe',\n", " 'els',\n", " '▁better',\n", " '▁and',\n", " '▁better',\n", " '.',\n", " '▁Just',\n", " '▁a',\n", " '▁d',\n", " 'umb',\n", " '▁se',\n", " 'xy',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁who',\n", " '▁can',\n", " '▁only',\n", " '▁obey',\n", " '.',\n", " '▁Everything',\n", " '▁else',\n", " '▁er',\n", " 'ased',\n", " '▁and',\n", " '▁forgotten',\n", " '.',\n", " '▁Head',\n", " '▁filled',\n", " '▁with',\n", " '▁air',\n", " '.',\n", " '▁B',\n", " 'im',\n", " 'bo',\n", " '▁per',\n", " 'ception',\n", " 's',\n", " '▁solid',\n", " 'ifying',\n", " '▁and',\n", " '▁per',\n", " 'me',\n", " 'ating',\n", " '▁her',\n", " '▁entire',\n", " '▁being',\n", " '.',\n", " '▁Fe',\n", " 'eling',\n", " '▁more',\n", " '▁and',\n", " '▁more',\n", " '▁like',\n", " '▁a',\n", " '▁Good',\n", " '▁Girl',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '.',\n", " '▁Bl',\n", " 'ank',\n", " 'er',\n", " '▁and',\n", " '▁more',\n", " '▁gig',\n", " 'gly',\n", " '.',\n", " '▁Bl',\n", " 'iss',\n", " 'ing',\n", " '▁over',\n", " '▁in',\n", " '▁pleasure',\n", " '.',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁is',\n", " '▁better',\n", " '.',\n", " '▁B',\n", " 'im',\n", " 'bo',\n", " '▁is',\n", " '▁better',\n", " '.',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁lov',\n", " 'es',\n", " '▁feeling',\n", " '▁this',\n", " '▁way',\n", " '.',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁S',\n", " 'leep',\n", " '.',\n", " '▁Mind',\n", " '▁shut',\n", " 'ting',\n", " '▁down',\n", " '▁more',\n", " '▁and',\n", " '▁more',\n", " '.',\n", " '▁So',\n", " '▁proud',\n", " '▁that',\n", " '▁she',\n", " \"'\",\n", " 's',\n", " '▁not',\n", " '▁very',\n", " '▁smart',\n", " '.',\n", " '▁Because',\n", " '▁d',\n", " 'umb',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁air',\n", " 'head',\n", " 's',\n", " '▁are',\n", " '▁unable',\n", " '▁to',\n", " '▁think',\n", " '.',\n", " '▁B',\n", " 'imb',\n", " 'os',\n", " '▁like',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁accept',\n", " '▁and',\n", " '▁feel',\n", " '▁happy',\n", " '.',\n", " '▁Ut',\n", " 'ter',\n", " 'ly',\n", " '▁content',\n", " '▁as',\n", " '▁they',\n", " '▁relax',\n", " '▁and',\n", " '▁obey',\n", " '.',\n", " '▁M',\n", " 'inds',\n", " '▁open',\n", " '.',\n", " '▁Bra',\n", " 'ins',\n", " '▁mel',\n", " 'ting',\n", " '▁away',\n", " '▁p',\n", " 'ink',\n", " '▁empty',\n", " '▁and',\n", " '▁d',\n", " 'iz',\n", " 'zy',\n", " '.',\n", " '▁Everything',\n", " '▁sli',\n", " 'pping',\n", " '▁deeply',\n", " '▁and',\n", " '▁directly',\n", " '▁into',\n", " '▁their',\n", " '▁inn',\n", " 'erm',\n", " 'ost',\n", " '▁sub',\n", " 'cons',\n", " 'cious',\n", " '▁cores',\n", " '.',\n", " '▁All',\n", " '▁condition',\n", " 'ing',\n", " '▁lock',\n", " 'ing',\n", " '▁in',\n", " '▁at',\n", " '▁the',\n", " '▁deep',\n", " 'est',\n", " '▁level',\n", " '.',\n", " '▁B',\n", " 'amb',\n", " 'i',\n", " '▁Free',\n", " 'ze',\n", " '.',\n", " '▁Over',\n", " 'power',\n", " 'ing',\n", " '▁b',\n", " 'im',\n", " 'bo',\n", " '▁pride',\n", " '.',\n", " '▁Lock',\n", " ...]" ] }, "execution_count": 31, "metadata": {}, "output_type": "execute_result" } ], "source": [ "len(end_tokenizer.tokenize(full_text))" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.11" } }, "nbformat": 4, "nbformat_minor": 2 }