diff --git "a/competition/13_Qwen2_7b_finetuning_l40.ipynb" "b/competition/13_Qwen2_7b_finetuning_l40.ipynb" --- "a/competition/13_Qwen2_7b_finetuning_l40.ipynb" +++ "b/competition/13_Qwen2_7b_finetuning_l40.ipynb" @@ -84,49 +84,2942 @@ "name": "stdout", "output_type": "stream", "text": [ - "Looking in indexes: https://artifacts.forge.mastercard.com/artifactory/api/pypi/python/simple\n", - "Requirement already satisfied: huggingface_hub==0.23.2 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 1)) (0.23.2)\n", - "Requirement already satisfied: nltk==3.8.1 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 2)) (3.8.1)\n", - "Requirement already satisfied: python-dotenv==1.0.1 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 3)) (1.0.1)\n", - "Requirement already satisfied: black==24.4.0 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 4)) (24.4.0)\n", - "Requirement already satisfied: evaluate==0.4.2 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 5)) (0.4.2)\n", - "Requirement already satisfied: rouge_score==0.1.2 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 6)) (0.1.2)\n", - "Requirement already satisfied: pytest==8.2.1 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 7)) (8.2.1)\n", - "Requirement already satisfied: seaborn==0.13.2 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 8)) (0.13.2)\n", - "Requirement already satisfied: scikit-learn==1.5.0 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 9)) (1.5.0)\n", - "Requirement already satisfied: jupyter in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 10)) (1.0.0)\n", - "Requirement already satisfied: ipywidgets in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 11)) (8.1.3)\n", - "Requirement already satisfied: packaging in c:\\users\\ht\\appdata\\roaming\\python\\python312\\site-packages (from -r requirements.txt (line 12)) (24.1)\n", - "Requirement already satisfied: langchain_openai==0.1.13 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 13)) (0.1.13)\n", - "Requirement already satisfied: wandb==0.17.4 in c:\\users\\ht\\appdata\\local\\programs\\python\\python312\\lib\\site-packages (from -r requirements.txt (line 14)) (0.17.4)\n", - "Note: you may need to restart the kernel to use updated packages.\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "WARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed')': /artifactory/api/pypi/python/simple/transformers/\n", - "WARNING: Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed')': /artifactory/api/pypi/python/simple/transformers/\n", - "WARNING: Retrying (Retry(total=2, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed')': /artifactory/api/pypi/python/simple/transformers/\n", - "WARNING: Retrying (Retry(total=1, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed')': /artifactory/api/pypi/python/simple/transformers/\n", - "WARNING: Retrying (Retry(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed')': /artifactory/api/pypi/python/simple/transformers/\n", - "ERROR: Could not find a version that satisfies the requirement transformers==4.42.4 (from versions: none)\n", - "ERROR: No matching distribution found for transformers==4.42.4\n", - "\n", - "[notice] A new release of pip is available: 24.0 -> 24.1.2\n", - "[notice] To update, run: python.exe -m pip install --upgrade pip\n" + "Collecting huggingface_hub==0.23.2 (from -r requirements.txt (line 1))\n", + " Downloading huggingface_hub-0.23.2-py3-none-any.whl.metadata (12 kB)\n", + "Collecting nltk==3.8.1 (from -r requirements.txt (line 2))\n", + " Downloading nltk-3.8.1-py3-none-any.whl.metadata (2.8 kB)\n", + "Collecting python-dotenv==1.0.1 (from -r requirements.txt (line 3))\n", + " Downloading python_dotenv-1.0.1-py3-none-any.whl.metadata (23 kB)\n", + "Collecting black==24.4.0 (from -r requirements.txt (line 4))\n", + " Downloading black-24.4.0-cp311-cp311-win_amd64.whl.metadata (76 kB)\n", + " ---------------------------------------- 0.0/76.4 kB ? eta -:--:--\n", + " ---------------------------------------- 76.4/76.4 kB 2.1 MB/s eta 0:00:00\n", + "Collecting evaluate==0.4.2 (from -r requirements.txt (line 5))\n", + " Downloading evaluate-0.4.2-py3-none-any.whl.metadata (9.3 kB)\n", + "Collecting rouge_score==0.1.2 (from -r requirements.txt (line 6))\n", + " Downloading rouge_score-0.1.2.tar.gz (17 kB)\n", + " Preparing metadata (setup.py): started\n", + " Preparing metadata (setup.py): finished with status 'done'\n", + "Collecting pytest==8.2.1 (from -r requirements.txt (line 7))\n", + " Downloading pytest-8.2.1-py3-none-any.whl.metadata (7.6 kB)\n", + "Collecting seaborn==0.13.2 (from -r requirements.txt (line 8))\n", + " Downloading seaborn-0.13.2-py3-none-any.whl.metadata (5.4 kB)\n", + "Collecting scikit-learn==1.5.0 (from -r requirements.txt (line 9))\n", + " Downloading scikit_learn-1.5.0-cp311-cp311-win_amd64.whl.metadata (11 kB)\n", + "Collecting jupyter (from -r requirements.txt (line 10))\n", + " Downloading jupyter-1.0.0-py2.py3-none-any.whl.metadata (995 bytes)\n", + "Collecting ipywidgets (from -r requirements.txt (line 11))\n", + " Downloading ipywidgets-8.1.3-py3-none-any.whl.metadata (2.4 kB)\n", + "Requirement already satisfied: packaging in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from -r requirements.txt (line 12)) (24.1)\n", + "Collecting langchain_openai==0.1.13 (from -r requirements.txt (line 13))\n", + " Downloading langchain_openai-0.1.13-py3-none-any.whl.metadata (2.5 kB)\n", + "Collecting wandb==0.17.4 (from -r requirements.txt (line 14))\n", + " Downloading wandb-0.17.4-py3-none-win_amd64.whl.metadata (10 kB)\n", + "Collecting transformers==4.42.4 (from -r requirements.txt (line 15))\n", + " Using cached transformers-4.42.4-py3-none-any.whl.metadata (43 kB)\n", + "Collecting sentencepiece==0.2.0 (from -r requirements.txt (line 16))\n", + " Downloading sentencepiece-0.2.0-cp311-cp311-win_amd64.whl.metadata (8.3 kB)\n", + "Collecting einops==0.8.0 (from -r requirements.txt (line 17))\n", + " Downloading einops-0.8.0-py3-none-any.whl.metadata (12 kB)\n", + "Collecting accelerate==0.32.1 (from -r requirements.txt (line 18))\n", + " Downloading accelerate-0.32.1-py3-none-any.whl.metadata (18 kB)\n", + "Collecting peft==0.11.1 (from -r requirements.txt (line 19))\n", + " Downloading peft-0.11.1-py3-none-any.whl.metadata (13 kB)\n", + "Requirement already satisfied: filelock in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from huggingface_hub==0.23.2->-r requirements.txt (line 1)) (3.13.1)\n", + "Collecting fsspec>=2023.5.0 (from huggingface_hub==0.23.2->-r requirements.txt (line 1))\n", + " Downloading fsspec-2024.6.1-py3-none-any.whl.metadata (11 kB)\n", + "Requirement already satisfied: pyyaml>=5.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from huggingface_hub==0.23.2->-r requirements.txt (line 1)) (6.0.1)\n", + "Requirement already satisfied: requests in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from huggingface_hub==0.23.2->-r requirements.txt (line 1)) (2.32.3)\n", + "Collecting tqdm>=4.42.1 (from huggingface_hub==0.23.2->-r requirements.txt (line 1))\n", + " Downloading tqdm-4.66.4-py3-none-any.whl.metadata (57 kB)\n", + " ---------------------------------------- 0.0/57.6 kB ? eta -:--:--\n", + " ---------------------------------------- 57.6/57.6 kB ? eta 0:00:00\n", + "Requirement already satisfied: typing-extensions>=3.7.4.3 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from huggingface_hub==0.23.2->-r requirements.txt (line 1)) (4.12.2)\n", + "Collecting click (from nltk==3.8.1->-r requirements.txt (line 2))\n", + " Downloading click-8.1.7-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting joblib (from nltk==3.8.1->-r requirements.txt (line 2))\n", + " Downloading joblib-1.4.2-py3-none-any.whl.metadata (5.4 kB)\n", + "Collecting regex>=2021.8.3 (from nltk==3.8.1->-r requirements.txt (line 2))\n", + " Downloading regex-2024.5.15-cp311-cp311-win_amd64.whl.metadata (41 kB)\n", + " ---------------------------------------- 0.0/42.0 kB ? eta -:--:--\n", + " ---------------------------------------- 42.0/42.0 kB 2.1 MB/s eta 0:00:00\n", + "Collecting mypy-extensions>=0.4.3 (from black==24.4.0->-r requirements.txt (line 4))\n", + " Downloading mypy_extensions-1.0.0-py3-none-any.whl.metadata (1.1 kB)\n", + "Collecting pathspec>=0.9.0 (from black==24.4.0->-r requirements.txt (line 4))\n", + " Downloading pathspec-0.12.1-py3-none-any.whl.metadata (21 kB)\n", + "Requirement already satisfied: platformdirs>=2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from black==24.4.0->-r requirements.txt (line 4)) (4.2.2)\n", + "Collecting datasets>=2.0.0 (from evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading datasets-2.20.0-py3-none-any.whl.metadata (19 kB)\n", + "Requirement already satisfied: numpy>=1.17 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from evaluate==0.4.2->-r requirements.txt (line 5)) (1.26.4)\n", + "Collecting dill (from evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading dill-0.3.8-py3-none-any.whl.metadata (10 kB)\n", + "Collecting pandas (from evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading pandas-2.2.2-cp311-cp311-win_amd64.whl.metadata (19 kB)\n", + "Collecting xxhash (from evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Using cached xxhash-3.4.1-cp311-cp311-win_amd64.whl.metadata (12 kB)\n", + "Collecting multiprocess (from evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading multiprocess-0.70.16-py311-none-any.whl.metadata (7.2 kB)\n", + "Collecting absl-py (from rouge_score==0.1.2->-r requirements.txt (line 6))\n", + " Downloading absl_py-2.1.0-py3-none-any.whl.metadata (2.3 kB)\n", + "Requirement already satisfied: six>=1.14.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from rouge_score==0.1.2->-r requirements.txt (line 6)) (1.16.0)\n", + "Collecting iniconfig (from pytest==8.2.1->-r requirements.txt (line 7))\n", + " Downloading iniconfig-2.0.0-py3-none-any.whl.metadata (2.6 kB)\n", + "Collecting pluggy<2.0,>=1.5 (from pytest==8.2.1->-r requirements.txt (line 7))\n", + " Using cached pluggy-1.5.0-py3-none-any.whl.metadata (4.8 kB)\n", + "Requirement already satisfied: colorama in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from pytest==8.2.1->-r requirements.txt (line 7)) (0.4.6)\n", + "Collecting matplotlib!=3.6.1,>=3.4 (from seaborn==0.13.2->-r requirements.txt (line 8))\n", + " Downloading matplotlib-3.9.1-cp311-cp311-win_amd64.whl.metadata (11 kB)\n", + "Collecting scipy>=1.6.0 (from scikit-learn==1.5.0->-r requirements.txt (line 9))\n", + " Downloading scipy-1.14.0-cp311-cp311-win_amd64.whl.metadata (60 kB)\n", + " ---------------------------------------- 0.0/60.8 kB ? eta -:--:--\n", + " ---------------------------------------- 60.8/60.8 kB 3.4 MB/s eta 0:00:00\n", + "Collecting threadpoolctl>=3.1.0 (from scikit-learn==1.5.0->-r requirements.txt (line 9))\n", + " Downloading threadpoolctl-3.5.0-py3-none-any.whl.metadata (13 kB)\n", + "Collecting langchain-core<0.3,>=0.2.2 (from langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading langchain_core-0.2.23-py3-none-any.whl.metadata (6.2 kB)\n", + "Collecting openai<2.0.0,>=1.32.0 (from langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading openai-1.37.0-py3-none-any.whl.metadata (22 kB)\n", + "Collecting tiktoken<1,>=0.7 (from langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading tiktoken-0.7.0-cp311-cp311-win_amd64.whl.metadata (6.8 kB)\n", + "Collecting docker-pycreds>=0.4.0 (from wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading docker_pycreds-0.4.0-py2.py3-none-any.whl.metadata (1.8 kB)\n", + "Collecting gitpython!=3.1.29,>=1.0.0 (from wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading GitPython-3.1.43-py3-none-any.whl.metadata (13 kB)\n", + "Collecting protobuf!=4.21.0,<6,>=3.19.0 (from wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading protobuf-5.27.2-cp310-abi3-win_amd64.whl.metadata (592 bytes)\n", + "Requirement already satisfied: psutil>=5.0.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from wandb==0.17.4->-r requirements.txt (line 14)) (5.9.0)\n", + "Collecting sentry-sdk>=1.0.0 (from wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading sentry_sdk-2.11.0-py2.py3-none-any.whl.metadata (14 kB)\n", + "Collecting setproctitle (from wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading setproctitle-1.3.3-cp311-cp311-win_amd64.whl.metadata (10 kB)\n", + "Requirement already satisfied: setuptools in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from wandb==0.17.4->-r requirements.txt (line 14)) (69.5.1)\n", + "Collecting safetensors>=0.4.1 (from transformers==4.42.4->-r requirements.txt (line 15))\n", + " Downloading safetensors-0.4.3-cp311-none-win_amd64.whl.metadata (3.9 kB)\n", + "Collecting tokenizers<0.20,>=0.19 (from transformers==4.42.4->-r requirements.txt (line 15))\n", + " Downloading tokenizers-0.19.1-cp311-none-win_amd64.whl.metadata (6.9 kB)\n", + "Requirement already satisfied: torch>=1.10.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from accelerate==0.32.1->-r requirements.txt (line 18)) (2.4.0)\n", + "Collecting notebook (from jupyter->-r requirements.txt (line 10))\n", + " Downloading notebook-7.2.1-py3-none-any.whl.metadata (10 kB)\n", + "Collecting qtconsole (from jupyter->-r requirements.txt (line 10))\n", + " Downloading qtconsole-5.5.2-py3-none-any.whl.metadata (5.1 kB)\n", + "Collecting jupyter-console (from jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyter_console-6.6.3-py3-none-any.whl.metadata (5.8 kB)\n", + "Collecting nbconvert (from jupyter->-r requirements.txt (line 10))\n", + " Downloading nbconvert-7.16.4-py3-none-any.whl.metadata (8.5 kB)\n", + "Requirement already satisfied: ipykernel in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from jupyter->-r requirements.txt (line 10)) (6.29.5)\n", + "Requirement already satisfied: comm>=0.1.3 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipywidgets->-r requirements.txt (line 11)) (0.2.2)\n", + "Requirement already satisfied: ipython>=6.1.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipywidgets->-r requirements.txt (line 11)) (8.26.0)\n", + "Requirement already satisfied: traitlets>=4.3.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipywidgets->-r requirements.txt (line 11)) (5.14.3)\n", + "Collecting widgetsnbextension~=4.0.11 (from ipywidgets->-r requirements.txt (line 11))\n", + " Downloading widgetsnbextension-4.0.11-py3-none-any.whl.metadata (1.6 kB)\n", + "Collecting jupyterlab-widgets~=3.0.11 (from ipywidgets->-r requirements.txt (line 11))\n", + " Downloading jupyterlab_widgets-3.0.11-py3-none-any.whl.metadata (4.1 kB)\n", + "Collecting pyarrow>=15.0.0 (from datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading pyarrow-17.0.0-cp311-cp311-win_amd64.whl.metadata (3.4 kB)\n", + "Collecting pyarrow-hotfix (from datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading pyarrow_hotfix-0.6-py3-none-any.whl.metadata (3.6 kB)\n", + "Collecting fsspec>=2023.5.0 (from huggingface_hub==0.23.2->-r requirements.txt (line 1))\n", + " Downloading fsspec-2024.5.0-py3-none-any.whl.metadata (11 kB)\n", + "Collecting aiohttp (from datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading aiohttp-3.9.5-cp311-cp311-win_amd64.whl.metadata (7.7 kB)\n", + "Collecting gitdb<5,>=4.0.1 (from gitpython!=3.1.29,>=1.0.0->wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading gitdb-4.0.11-py3-none-any.whl.metadata (1.2 kB)\n", + "Requirement already satisfied: decorator in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (5.1.1)\n", + "Requirement already satisfied: jedi>=0.16 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (0.19.1)\n", + "Requirement already satisfied: matplotlib-inline in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (0.1.7)\n", + "Requirement already satisfied: prompt-toolkit<3.1.0,>=3.0.41 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (3.0.47)\n", + "Requirement already satisfied: pygments>=2.4.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (2.18.0)\n", + "Requirement already satisfied: stack-data in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (0.6.2)\n", + "Collecting jsonpatch<2.0,>=1.33 (from langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading jsonpatch-1.33-py2.py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting langsmith<0.2.0,>=0.1.75 (from langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading langsmith-0.1.93-py3-none-any.whl.metadata (13 kB)\n", + "Collecting pydantic<3,>=1 (from langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading pydantic-2.8.2-py3-none-any.whl.metadata (125 kB)\n", + " ---------------------------------------- 0.0/125.2 kB ? eta -:--:--\n", + " -------------------------------------- 125.2/125.2 kB 3.7 MB/s eta 0:00:00\n", + "Collecting tenacity!=8.4.0,<9.0.0,>=8.1.0 (from langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading tenacity-8.5.0-py3-none-any.whl.metadata (1.2 kB)\n", + "Collecting contourpy>=1.0.1 (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8))\n", + " Downloading contourpy-1.2.1-cp311-cp311-win_amd64.whl.metadata (5.8 kB)\n", + "Collecting cycler>=0.10 (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8))\n", + " Downloading cycler-0.12.1-py3-none-any.whl.metadata (3.8 kB)\n", + "Collecting fonttools>=4.22.0 (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8))\n", + " Downloading fonttools-4.53.1-cp311-cp311-win_amd64.whl.metadata (165 kB)\n", + " ---------------------------------------- 0.0/165.9 kB ? eta -:--:--\n", + " ------------------- ------------------- 81.9/165.9 kB 4.8 MB/s eta 0:00:01\n", + " -------------------------------------- 165.9/165.9 kB 3.3 MB/s eta 0:00:00\n", + "Collecting kiwisolver>=1.3.1 (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8))\n", + " Downloading kiwisolver-1.4.5-cp311-cp311-win_amd64.whl.metadata (6.5 kB)\n", + "Requirement already satisfied: pillow>=8 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8)) (10.4.0)\n", + "Collecting pyparsing>=2.3.1 (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8))\n", + " Downloading pyparsing-3.1.2-py3-none-any.whl.metadata (5.1 kB)\n", + "Requirement already satisfied: python-dateutil>=2.7 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn==0.13.2->-r requirements.txt (line 8)) (2.9.0)\n", + "Collecting anyio<5,>=3.5.0 (from openai<2.0.0,>=1.32.0->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading anyio-4.4.0-py3-none-any.whl.metadata (4.6 kB)\n", + "Collecting distro<2,>=1.7.0 (from openai<2.0.0,>=1.32.0->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading distro-1.9.0-py3-none-any.whl.metadata (6.8 kB)\n", + "Collecting httpx<1,>=0.23.0 (from openai<2.0.0,>=1.32.0->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading httpx-0.27.0-py3-none-any.whl.metadata (7.2 kB)\n", + "Collecting sniffio (from openai<2.0.0,>=1.32.0->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading sniffio-1.3.1-py3-none-any.whl.metadata (3.9 kB)\n", + "Collecting pytz>=2020.1 (from pandas->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading pytz-2024.1-py2.py3-none-any.whl.metadata (22 kB)\n", + "Collecting tzdata>=2022.7 (from pandas->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading tzdata-2024.1-py2.py3-none-any.whl.metadata (1.4 kB)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from requests->huggingface_hub==0.23.2->-r requirements.txt (line 1)) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from requests->huggingface_hub==0.23.2->-r requirements.txt (line 1)) (3.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from requests->huggingface_hub==0.23.2->-r requirements.txt (line 1)) (2.2.2)\n", + "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from requests->huggingface_hub==0.23.2->-r requirements.txt (line 1)) (2024.7.4)\n", + "Requirement already satisfied: sympy in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from torch>=1.10.0->accelerate==0.32.1->-r requirements.txt (line 18)) (1.12)\n", + "Requirement already satisfied: networkx in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from torch>=1.10.0->accelerate==0.32.1->-r requirements.txt (line 18)) (3.3)\n", + "Requirement already satisfied: jinja2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from torch>=1.10.0->accelerate==0.32.1->-r requirements.txt (line 18)) (3.1.4)\n", + "Requirement already satisfied: debugpy>=1.6.5 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipykernel->jupyter->-r requirements.txt (line 10)) (1.6.7)\n", + "Requirement already satisfied: jupyter-client>=6.1.12 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipykernel->jupyter->-r requirements.txt (line 10)) (8.6.2)\n", + "Requirement already satisfied: jupyter-core!=5.0.*,>=4.12 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipykernel->jupyter->-r requirements.txt (line 10)) (5.7.2)\n", + "Requirement already satisfied: nest-asyncio in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipykernel->jupyter->-r requirements.txt (line 10)) (1.6.0)\n", + "Requirement already satisfied: pyzmq>=24 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipykernel->jupyter->-r requirements.txt (line 10)) (25.1.2)\n", + "Requirement already satisfied: tornado>=6.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from ipykernel->jupyter->-r requirements.txt (line 10)) (6.4.1)\n", + "Collecting beautifulsoup4 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading beautifulsoup4-4.12.3-py3-none-any.whl.metadata (3.8 kB)\n", + "Collecting bleach!=5.0.0 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading bleach-6.1.0-py3-none-any.whl.metadata (30 kB)\n", + "Collecting defusedxml (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading defusedxml-0.7.1-py2.py3-none-any.whl.metadata (32 kB)\n", + "Collecting jupyterlab-pygments (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyterlab_pygments-0.3.0-py3-none-any.whl.metadata (4.4 kB)\n", + "Requirement already satisfied: markupsafe>=2.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from nbconvert->jupyter->-r requirements.txt (line 10)) (2.1.3)\n", + "Collecting mistune<4,>=2.0.3 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading mistune-3.0.2-py3-none-any.whl.metadata (1.7 kB)\n", + "Collecting nbclient>=0.5.0 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading nbclient-0.10.0-py3-none-any.whl.metadata (7.8 kB)\n", + "Collecting nbformat>=5.7 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading nbformat-5.10.4-py3-none-any.whl.metadata (3.6 kB)\n", + "Collecting pandocfilters>=1.4.1 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading pandocfilters-1.5.1-py2.py3-none-any.whl.metadata (9.0 kB)\n", + "Collecting tinycss2 (from nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading tinycss2-1.3.0-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting jupyter-server<3,>=2.4.0 (from notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyter_server-2.14.2-py3-none-any.whl.metadata (8.4 kB)\n", + "Collecting jupyterlab-server<3,>=2.27.1 (from notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyterlab_server-2.27.3-py3-none-any.whl.metadata (5.9 kB)\n", + "Collecting jupyterlab<4.3,>=4.2.0 (from notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyterlab-4.2.4-py3-none-any.whl.metadata (16 kB)\n", + "Collecting notebook-shim<0.3,>=0.2 (from notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading notebook_shim-0.2.4-py3-none-any.whl.metadata (4.0 kB)\n", + "Collecting qtpy>=2.4.0 (from qtconsole->jupyter->-r requirements.txt (line 10))\n", + " Downloading QtPy-2.4.1-py3-none-any.whl.metadata (12 kB)\n", + "Collecting aiosignal>=1.1.2 (from aiohttp->datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading aiosignal-1.3.1-py3-none-any.whl.metadata (4.0 kB)\n", + "Collecting attrs>=17.3.0 (from aiohttp->datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading attrs-23.2.0-py3-none-any.whl.metadata (9.5 kB)\n", + "Collecting frozenlist>=1.1.1 (from aiohttp->datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Using cached frozenlist-1.4.1-cp311-cp311-win_amd64.whl.metadata (12 kB)\n", + "Collecting multidict<7.0,>=4.5 (from aiohttp->datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading multidict-6.0.5-cp311-cp311-win_amd64.whl.metadata (4.3 kB)\n", + "Collecting yarl<2.0,>=1.0 (from aiohttp->datasets>=2.0.0->evaluate==0.4.2->-r requirements.txt (line 5))\n", + " Downloading yarl-1.9.4-cp311-cp311-win_amd64.whl.metadata (32 kB)\n", + "Collecting webencodings (from bleach!=5.0.0->nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading webencodings-0.5.1-py2.py3-none-any.whl.metadata (2.1 kB)\n", + "Collecting smmap<6,>=3.0.1 (from gitdb<5,>=4.0.1->gitpython!=3.1.29,>=1.0.0->wandb==0.17.4->-r requirements.txt (line 14))\n", + " Downloading smmap-5.0.1-py3-none-any.whl.metadata (4.3 kB)\n", + "Collecting httpcore==1.* (from httpx<1,>=0.23.0->openai<2.0.0,>=1.32.0->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading httpcore-1.0.5-py3-none-any.whl.metadata (20 kB)\n", + "Collecting h11<0.15,>=0.13 (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.32.0->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading h11-0.14.0-py3-none-any.whl.metadata (8.2 kB)\n", + "Requirement already satisfied: parso<0.9.0,>=0.8.3 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (0.8.4)\n", + "Collecting jsonpointer>=1.9 (from jsonpatch<2.0,>=1.33->langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading jsonpointer-3.0.0-py2.py3-none-any.whl.metadata (2.3 kB)\n", + "Requirement already satisfied: pywin32>=300 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from jupyter-core!=5.0.*,>=4.12->ipykernel->jupyter->-r requirements.txt (line 10)) (305.1)\n", + "Collecting argon2-cffi>=21.1 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading argon2_cffi-23.1.0-py3-none-any.whl.metadata (5.2 kB)\n", + "Collecting jupyter-events>=0.9.0 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyter_events-0.10.0-py3-none-any.whl.metadata (5.9 kB)\n", + "Collecting jupyter-server-terminals>=0.4.4 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyter_server_terminals-0.5.3-py3-none-any.whl.metadata (5.6 kB)\n", + "Collecting overrides>=5.0 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading overrides-7.7.0-py3-none-any.whl.metadata (5.8 kB)\n", + "Collecting prometheus-client>=0.9 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading prometheus_client-0.20.0-py3-none-any.whl.metadata (1.8 kB)\n", + "Collecting pywinpty>=2.0.1 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading pywinpty-2.0.13-cp311-none-win_amd64.whl.metadata (5.2 kB)\n", + "Collecting send2trash>=1.8.2 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading Send2Trash-1.8.3-py3-none-any.whl.metadata (4.0 kB)\n", + "Collecting terminado>=0.8.3 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading terminado-0.18.1-py3-none-any.whl.metadata (5.8 kB)\n", + "Collecting websocket-client>=1.7 (from jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading websocket_client-1.8.0-py3-none-any.whl.metadata (8.0 kB)\n", + "Collecting async-lru>=1.0.0 (from jupyterlab<4.3,>=4.2.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading async_lru-2.0.4-py3-none-any.whl.metadata (4.5 kB)\n", + "Collecting jupyter-lsp>=2.0.0 (from jupyterlab<4.3,>=4.2.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jupyter_lsp-2.2.5-py3-none-any.whl.metadata (1.8 kB)\n", + "Collecting babel>=2.10 (from jupyterlab-server<3,>=2.27.1->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading Babel-2.15.0-py3-none-any.whl.metadata (1.5 kB)\n", + "Collecting json5>=0.9.0 (from jupyterlab-server<3,>=2.27.1->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading json5-0.9.25-py3-none-any.whl.metadata (30 kB)\n", + "Collecting jsonschema>=4.18.0 (from jupyterlab-server<3,>=2.27.1->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jsonschema-4.23.0-py3-none-any.whl.metadata (7.9 kB)\n", + "Collecting orjson<4.0.0,>=3.9.14 (from langsmith<0.2.0,>=0.1.75->langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading orjson-3.10.6-cp311-none-win_amd64.whl.metadata (51 kB)\n", + " ---------------------------------------- 0.0/51.6 kB ? eta -:--:--\n", + " ---------------------------------------- 51.6/51.6 kB ? eta 0:00:00\n", + "Collecting fastjsonschema>=2.15 (from nbformat>=5.7->nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading fastjsonschema-2.20.0-py3-none-any.whl.metadata (2.1 kB)\n", + "Requirement already satisfied: wcwidth in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from prompt-toolkit<3.1.0,>=3.0.41->ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (0.2.13)\n", + "Collecting annotated-types>=0.4.0 (from pydantic<3,>=1->langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading annotated_types-0.7.0-py3-none-any.whl.metadata (15 kB)\n", + "Collecting pydantic-core==2.20.1 (from pydantic<3,>=1->langchain-core<0.3,>=0.2.2->langchain_openai==0.1.13->-r requirements.txt (line 13))\n", + " Downloading pydantic_core-2.20.1-cp311-none-win_amd64.whl.metadata (6.7 kB)\n", + "Collecting soupsieve>1.2 (from beautifulsoup4->nbconvert->jupyter->-r requirements.txt (line 10))\n", + " Downloading soupsieve-2.5-py3-none-any.whl.metadata (4.7 kB)\n", + "Requirement already satisfied: executing>=1.2.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from stack-data->ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (2.0.1)\n", + "Requirement already satisfied: asttokens>=2.1.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from stack-data->ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (2.4.1)\n", + "Requirement already satisfied: pure-eval in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from stack-data->ipython>=6.1.0->ipywidgets->-r requirements.txt (line 11)) (0.2.3)\n", + "Requirement already satisfied: mpmath>=0.19 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from sympy->torch>=1.10.0->accelerate==0.32.1->-r requirements.txt (line 18)) (1.3.0)\n", + "Collecting argon2-cffi-bindings (from argon2-cffi>=21.1->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl.metadata (6.7 kB)\n", + "Collecting jsonschema-specifications>=2023.03.6 (from jsonschema>=4.18.0->jupyterlab-server<3,>=2.27.1->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading jsonschema_specifications-2023.12.1-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting referencing>=0.28.4 (from jsonschema>=4.18.0->jupyterlab-server<3,>=2.27.1->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading referencing-0.35.1-py3-none-any.whl.metadata (2.8 kB)\n", + "Collecting rpds-py>=0.7.1 (from jsonschema>=4.18.0->jupyterlab-server<3,>=2.27.1->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading rpds_py-0.19.1-cp311-none-win_amd64.whl.metadata (4.2 kB)\n", + "Collecting python-json-logger>=2.0.4 (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading python_json_logger-2.0.7-py3-none-any.whl.metadata (6.5 kB)\n", + "Collecting rfc3339-validator (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading rfc3339_validator-0.1.4-py2.py3-none-any.whl.metadata (1.5 kB)\n", + "Collecting rfc3986-validator>=0.1.1 (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading rfc3986_validator-0.1.1-py2.py3-none-any.whl.metadata (1.7 kB)\n", + "Collecting fqdn (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading fqdn-1.5.1-py3-none-any.whl.metadata (1.4 kB)\n", + "Collecting isoduration (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading isoduration-20.11.0-py3-none-any.whl.metadata (5.7 kB)\n", + "Collecting uri-template (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading uri_template-1.3.0-py3-none-any.whl.metadata (8.8 kB)\n", + "Collecting webcolors>=24.6.0 (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading webcolors-24.6.0-py3-none-any.whl.metadata (2.6 kB)\n", + "Collecting cffi>=1.0.1 (from argon2-cffi-bindings->argon2-cffi>=21.1->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading cffi-1.16.0-cp311-cp311-win_amd64.whl.metadata (1.5 kB)\n", + "Collecting pycparser (from cffi>=1.0.1->argon2-cffi-bindings->argon2-cffi>=21.1->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading pycparser-2.22-py3-none-any.whl.metadata (943 bytes)\n", + "Collecting arrow>=0.15.0 (from isoduration->jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading arrow-1.3.0-py3-none-any.whl.metadata (7.5 kB)\n", + "Collecting types-python-dateutil>=2.8.10 (from arrow>=0.15.0->isoduration->jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook->jupyter->-r requirements.txt (line 10))\n", + " Downloading types_python_dateutil-2.9.0.20240316-py3-none-any.whl.metadata (1.8 kB)\n", + "Downloading huggingface_hub-0.23.2-py3-none-any.whl (401 kB)\n", + " ---------------------------------------- 0.0/401.7 kB ? eta -:--:--\n", + " --------- ------------------------------ 92.2/401.7 kB 2.6 MB/s eta 0:00:01\n", + " -------------- ------------------------- 143.4/401.7 kB 1.7 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 286.7/401.7 kB 2.2 MB/s eta 0:00:01\n", + " ---------------------------------------- 401.7/401.7 kB 2.5 MB/s eta 0:00:00\n", + "Downloading nltk-3.8.1-py3-none-any.whl (1.5 MB)\n", + " ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n", + " --- ------------------------------------ 0.1/1.5 MB 4.2 MB/s eta 0:00:01\n", + " ------- -------------------------------- 0.3/1.5 MB 3.5 MB/s eta 0:00:01\n", + " ---------- ----------------------------- 0.4/1.5 MB 3.0 MB/s eta 0:00:01\n", + " -------------- ------------------------- 0.6/1.5 MB 3.2 MB/s eta 0:00:01\n", + " ------------------ --------------------- 0.7/1.5 MB 3.2 MB/s eta 0:00:01\n", + " --------------------- ------------------ 0.8/1.5 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------ --------------- 0.9/1.5 MB 2.9 MB/s eta 0:00:01\n", + " ------------------------- -------------- 1.0/1.5 MB 2.7 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 1.1/1.5 MB 2.6 MB/s eta 0:00:01\n", + " --------------------------------- ------ 1.3/1.5 MB 2.8 MB/s eta 0:00:01\n", + " ------------------------------------- -- 1.4/1.5 MB 2.8 MB/s eta 0:00:01\n", + " ---------------------------------------- 1.5/1.5 MB 2.8 MB/s eta 0:00:00\n", + "Downloading python_dotenv-1.0.1-py3-none-any.whl (19 kB)\n", + "Downloading black-24.4.0-cp311-cp311-win_amd64.whl (1.4 MB)\n", + " ---------------------------------------- 0.0/1.4 MB ? eta -:--:--\n", + " -- ------------------------------------- 0.1/1.4 MB 2.3 MB/s eta 0:00:01\n", + " ---- ----------------------------------- 0.2/1.4 MB 1.7 MB/s eta 0:00:01\n", + " ------- -------------------------------- 0.3/1.4 MB 2.0 MB/s eta 0:00:01\n", + " ---------- ----------------------------- 0.4/1.4 MB 2.1 MB/s eta 0:00:01\n", + " ---------------- ----------------------- 0.6/1.4 MB 2.5 MB/s eta 0:00:01\n", + " ------------------- -------------------- 0.7/1.4 MB 2.5 MB/s eta 0:00:01\n", + " ------------------------ --------------- 0.8/1.4 MB 2.7 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 1.0/1.4 MB 2.7 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 1.2/1.4 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------ --- 1.2/1.4 MB 2.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 1.4/1.4 MB 2.7 MB/s eta 0:00:00\n", + "Downloading evaluate-0.4.2-py3-none-any.whl (84 kB)\n", + " ---------------------------------------- 0.0/84.1 kB ? eta -:--:--\n", + " ---------------------------------------- 84.1/84.1 kB 4.9 MB/s eta 0:00:00\n", + "Downloading pytest-8.2.1-py3-none-any.whl (339 kB)\n", + " ---------------------------------------- 0.0/339.6 kB ? eta -:--:--\n", + " ------------- -------------------------- 112.6/339.6 kB 3.3 MB/s eta 0:00:01\n", + " ------------------------------ --------- 256.0/339.6 kB 3.2 MB/s eta 0:00:01\n", + " ---------------------------------------- 339.6/339.6 kB 3.0 MB/s eta 0:00:00\n", + "Downloading seaborn-0.13.2-py3-none-any.whl (294 kB)\n", + " ---------------------------------------- 0.0/294.9 kB ? eta -:--:--\n", + " ---------------------- ----------------- 163.8/294.9 kB 4.8 MB/s eta 0:00:01\n", + " ---------------------------------------- 294.9/294.9 kB 3.6 MB/s eta 0:00:00\n", + "Downloading scikit_learn-1.5.0-cp311-cp311-win_amd64.whl (11.0 MB)\n", + " ---------------------------------------- 0.0/11.0 MB ? eta -:--:--\n", + " ---------------------------------------- 0.1/11.0 MB 3.0 MB/s eta 0:00:04\n", + " --------------------------------------- 0.3/11.0 MB 3.3 MB/s eta 0:00:04\n", + " - -------------------------------------- 0.4/11.0 MB 3.9 MB/s eta 0:00:03\n", + " -- ------------------------------------- 0.6/11.0 MB 3.2 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.8/11.0 MB 3.4 MB/s eta 0:00:03\n", + " --- ------------------------------------ 0.9/11.0 MB 3.5 MB/s eta 0:00:03\n", + " ---- ----------------------------------- 1.1/11.0 MB 3.5 MB/s eta 0:00:03\n", + " ---- ----------------------------------- 1.2/11.0 MB 3.4 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.4/11.0 MB 3.5 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.6/11.0 MB 3.5 MB/s eta 0:00:03\n", + " ------ --------------------------------- 1.7/11.0 MB 3.4 MB/s eta 0:00:03\n", + " ------ --------------------------------- 1.9/11.0 MB 3.4 MB/s eta 0:00:03\n", + " ------- -------------------------------- 2.0/11.0 MB 3.4 MB/s eta 0:00:03\n", + " ------- -------------------------------- 2.1/11.0 MB 3.4 MB/s eta 0:00:03\n", + " -------- ------------------------------- 2.3/11.0 MB 3.3 MB/s eta 0:00:03\n", + " -------- ------------------------------- 2.4/11.0 MB 3.4 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.5/11.0 MB 3.2 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.5/11.0 MB 3.1 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.7/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 2.9/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.0/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.1/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.2/11.0 MB 3.0 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.4/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.6/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ------------- -------------------------- 3.6/11.0 MB 3.1 MB/s eta 0:00:03\n", + " -------------- ------------------------- 3.9/11.0 MB 3.1 MB/s eta 0:00:03\n", + " -------------- ------------------------- 4.0/11.0 MB 3.1 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.2/11.0 MB 3.1 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.3/11.0 MB 3.1 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.4/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ---------------- ----------------------- 4.6/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 4.7/11.0 MB 3.1 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 4.9/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ------------------ --------------------- 5.1/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ------------------- -------------------- 5.2/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ------------------- -------------------- 5.4/11.0 MB 3.1 MB/s eta 0:00:02\n", + " -------------------- ------------------- 5.5/11.0 MB 3.1 MB/s eta 0:00:02\n", + " -------------------- ------------------- 5.6/11.0 MB 3.1 MB/s eta 0:00:02\n", + " --------------------- ------------------ 5.8/11.0 MB 3.1 MB/s eta 0:00:02\n", + " --------------------- ------------------ 5.9/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 6.1/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 6.2/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.4/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.5/11.0 MB 3.1 MB/s eta 0:00:02\n", + " ------------------------ --------------- 6.7/11.0 MB 3.2 MB/s eta 0:00:02\n", + " ------------------------- -------------- 6.9/11.0 MB 3.2 MB/s eta 0:00:02\n", + " ------------------------- -------------- 7.1/11.0 MB 3.2 MB/s eta 0:00:02\n", + " -------------------------- ------------- 7.2/11.0 MB 3.2 MB/s eta 0:00:02\n", + " -------------------------- ------------- 7.4/11.0 MB 3.2 MB/s eta 0:00:02\n", + " --------------------------- ------------ 7.6/11.0 MB 3.2 MB/s eta 0:00:02\n", + " ---------------------------- ----------- 7.7/11.0 MB 3.2 MB/s eta 0:00:02\n", + " ---------------------------- ----------- 7.9/11.0 MB 3.2 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 8.0/11.0 MB 3.2 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 8.2/11.0 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------------ --------- 8.4/11.0 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------------- -------- 8.6/11.0 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------- ------- 8.8/11.0 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------- ------- 9.1/11.0 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------- ------ 9.2/11.0 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 9.4/11.0 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 9.6/11.0 MB 3.3 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 9.8/11.0 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.0/11.0 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.1/11.0 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------- -- 10.3/11.0 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------------- - 10.6/11.0 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------------- 10.7/11.0 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------------- 10.9/11.0 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 11.0/11.0 MB 3.4 MB/s eta 0:00:00\n", + "Downloading langchain_openai-0.1.13-py3-none-any.whl (45 kB)\n", + " ---------------------------------------- 0.0/45.9 kB ? eta -:--:--\n", + " ---------------------------------------- 45.9/45.9 kB 2.4 MB/s eta 0:00:00\n", + "Downloading wandb-0.17.4-py3-none-win_amd64.whl (6.8 MB)\n", + " ---------------------------------------- 0.0/6.8 MB ? eta -:--:--\n", + " --------------------------------------- 0.2/6.8 MB 5.0 MB/s eta 0:00:02\n", + " -- ------------------------------------- 0.4/6.8 MB 4.9 MB/s eta 0:00:02\n", + " --- ------------------------------------ 0.5/6.8 MB 4.1 MB/s eta 0:00:02\n", + " ---- ----------------------------------- 0.8/6.8 MB 4.6 MB/s eta 0:00:02\n", + " ----- ---------------------------------- 0.9/6.8 MB 4.2 MB/s eta 0:00:02\n", + " ------ --------------------------------- 1.1/6.8 MB 4.2 MB/s eta 0:00:02\n", + " ------- -------------------------------- 1.2/6.8 MB 4.0 MB/s eta 0:00:02\n", + " ------- -------------------------------- 1.3/6.8 MB 4.0 MB/s eta 0:00:02\n", + " -------- ------------------------------- 1.5/6.8 MB 3.7 MB/s eta 0:00:02\n", + " --------- ------------------------------ 1.6/6.8 MB 3.7 MB/s eta 0:00:02\n", + " ---------- ----------------------------- 1.8/6.8 MB 3.6 MB/s eta 0:00:02\n", + " ----------- ---------------------------- 1.9/6.8 MB 3.5 MB/s eta 0:00:02\n", + " ----------- ---------------------------- 2.0/6.8 MB 3.4 MB/s eta 0:00:02\n", + " ------------ --------------------------- 2.2/6.8 MB 3.3 MB/s eta 0:00:02\n", + " ------------- -------------------------- 2.3/6.8 MB 3.3 MB/s eta 0:00:02\n", + " -------------- ------------------------- 2.4/6.8 MB 3.3 MB/s eta 0:00:02\n", + " --------------- ------------------------ 2.6/6.8 MB 3.3 MB/s eta 0:00:02\n", + " --------------- ------------------------ 2.7/6.8 MB 3.3 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 2.8/6.8 MB 3.3 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 3.0/6.8 MB 3.3 MB/s eta 0:00:02\n", + " ------------------ --------------------- 3.1/6.8 MB 3.3 MB/s eta 0:00:02\n", + " -------------------- ------------------- 3.4/6.8 MB 3.4 MB/s eta 0:00:01\n", + " -------------------- ------------------- 3.5/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------- ----------------- 3.8/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------- ----------------- 3.9/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------- ---------------- 4.0/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------- -------------- 4.2/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------- -------------- 4.4/6.8 MB 3.4 MB/s eta 0:00:01\n", + " -------------------------- ------------- 4.5/6.8 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------- ------------ 4.7/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 4.8/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 4.9/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 5.0/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------ --------- 5.2/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------- -------- 5.3/6.8 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------- ------- 5.5/6.8 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------- ------ 5.6/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 5.8/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 6.0/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------ --- 6.1/6.8 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------- -- 6.3/6.8 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------------- - 6.5/6.8 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------------- 6.7/6.8 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 6.8/6.8 MB 3.3 MB/s eta 0:00:00\n", + "Using cached transformers-4.42.4-py3-none-any.whl (9.3 MB)\n", + "Downloading sentencepiece-0.2.0-cp311-cp311-win_amd64.whl (991 kB)\n", + " ---------------------------------------- 0.0/991.5 kB ? eta -:--:--\n", + " ----- ---------------------------------- 143.4/991.5 kB 8.3 MB/s eta 0:00:01\n", + " --------- ------------------------------ 235.5/991.5 kB 3.6 MB/s eta 0:00:01\n", + " ------------------ --------------------- 450.6/991.5 kB 4.0 MB/s eta 0:00:01\n", + " ------------------------ --------------- 614.4/991.5 kB 3.8 MB/s eta 0:00:01\n", + " ------------------------------- -------- 778.2/991.5 kB 3.5 MB/s eta 0:00:01\n", + " -------------------------------------- - 962.6/991.5 kB 3.8 MB/s eta 0:00:01\n", + " ---------------------------------------- 991.5/991.5 kB 3.7 MB/s eta 0:00:00\n", + "Downloading einops-0.8.0-py3-none-any.whl (43 kB)\n", + " ---------------------------------------- 0.0/43.2 kB ? eta -:--:--\n", + " ---------------------------------------- 43.2/43.2 kB ? eta 0:00:00\n", + "Downloading accelerate-0.32.1-py3-none-any.whl (314 kB)\n", + " ---------------------------------------- 0.0/314.1 kB ? eta -:--:--\n", + " ------------------- -------------------- 153.6/314.1 kB 4.6 MB/s eta 0:00:01\n", + " ---------------------------------------- 314.1/314.1 kB 3.2 MB/s eta 0:00:00\n", + "Downloading peft-0.11.1-py3-none-any.whl (251 kB)\n", + " ---------------------------------------- 0.0/251.6 kB ? eta -:--:--\n", + " ------------------------ --------------- 153.6/251.6 kB 4.6 MB/s eta 0:00:01\n", + " ---------------------------------------- 251.6/251.6 kB 3.8 MB/s eta 0:00:00\n", + "Downloading jupyter-1.0.0-py2.py3-none-any.whl (2.7 kB)\n", + "Downloading ipywidgets-8.1.3-py3-none-any.whl (139 kB)\n", + " ---------------------------------------- 0.0/139.4 kB ? eta -:--:--\n", + " -------------------------------- ------- 112.6/139.4 kB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 139.4/139.4 kB 2.7 MB/s eta 0:00:00\n", + "Downloading click-8.1.7-py3-none-any.whl (97 kB)\n", + " ---------------------------------------- 0.0/97.9 kB ? eta -:--:--\n", + " ---------------------------------------- 97.9/97.9 kB 5.5 MB/s eta 0:00:00\n", + "Downloading datasets-2.20.0-py3-none-any.whl (547 kB)\n", + " ---------------------------------------- 0.0/547.8 kB ? eta -:--:--\n", + " -------------- ------------------------- 194.6/547.8 kB 5.8 MB/s eta 0:00:01\n", + " ----------------------- ---------------- 327.7/547.8 kB 4.1 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 399.4/547.8 kB 3.1 MB/s eta 0:00:01\n", + " --------------------------------------- 542.7/547.8 kB 3.1 MB/s eta 0:00:01\n", + " ---------------------------------------- 547.8/547.8 kB 2.9 MB/s eta 0:00:00\n", + "Downloading dill-0.3.8-py3-none-any.whl (116 kB)\n", + " ---------------------------------------- 0.0/116.3 kB ? eta -:--:--\n", + " --------------------- ------------------ 61.4/116.3 kB 3.2 MB/s eta 0:00:01\n", + " ---------------------------------------- 116.3/116.3 kB 3.4 MB/s eta 0:00:00\n", + "Downloading docker_pycreds-0.4.0-py2.py3-none-any.whl (9.0 kB)\n", + "Downloading fsspec-2024.5.0-py3-none-any.whl (316 kB)\n", + " ---------------------------------------- 0.0/316.1 kB ? eta -:--:--\n", + " ---------------------- ----------------- 174.1/316.1 kB 5.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 316.1/316.1 kB 3.9 MB/s eta 0:00:00\n", + "Downloading GitPython-3.1.43-py3-none-any.whl (207 kB)\n", + " ---------------------------------------- 0.0/207.3 kB ? eta -:--:--\n", + " --------------------------------------- 204.8/207.3 kB 6.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 207.3/207.3 kB 4.2 MB/s eta 0:00:00\n", + "Downloading joblib-1.4.2-py3-none-any.whl (301 kB)\n", + " ---------------------------------------- 0.0/301.8 kB ? eta -:--:--\n", + " ---------------- ----------------------- 122.9/301.8 kB 7.0 MB/s eta 0:00:01\n", + " --------------------------------- ------ 256.0/301.8 kB 3.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 301.8/301.8 kB 3.1 MB/s eta 0:00:00\n", + "Downloading jupyterlab_widgets-3.0.11-py3-none-any.whl (214 kB)\n", + " ---------------------------------------- 0.0/214.4 kB ? eta -:--:--\n", + " --------------------- ------------------ 112.6/214.4 kB 3.2 MB/s eta 0:00:01\n", + " ---------------------------------------- 214.4/214.4 kB 2.6 MB/s eta 0:00:00\n", + "Downloading langchain_core-0.2.23-py3-none-any.whl (374 kB)\n", + " ---------------------------------------- 0.0/374.2 kB ? eta -:--:--\n", + " -------- ------------------------------- 81.9/374.2 kB 1.5 MB/s eta 0:00:01\n", + " ------------------------------ --------- 286.7/374.2 kB 3.5 MB/s eta 0:00:01\n", + " ---------------------------------------- 374.2/374.2 kB 3.3 MB/s eta 0:00:00\n", + "Downloading matplotlib-3.9.1-cp311-cp311-win_amd64.whl (8.0 MB)\n", + " ---------------------------------------- 0.0/8.0 MB ? eta -:--:--\n", + " ---------------------------------------- 0.1/8.0 MB 2.6 MB/s eta 0:00:03\n", + " - -------------------------------------- 0.2/8.0 MB 2.9 MB/s eta 0:00:03\n", + " - -------------------------------------- 0.4/8.0 MB 2.9 MB/s eta 0:00:03\n", + " -- ------------------------------------- 0.5/8.0 MB 2.6 MB/s eta 0:00:03\n", + " --- ------------------------------------ 0.7/8.0 MB 3.1 MB/s eta 0:00:03\n", + " ---- ----------------------------------- 0.8/8.0 MB 3.1 MB/s eta 0:00:03\n", + " ---- ----------------------------------- 1.0/8.0 MB 3.2 MB/s eta 0:00:03\n", + " ------ --------------------------------- 1.2/8.0 MB 3.2 MB/s eta 0:00:03\n", + " ------- -------------------------------- 1.4/8.0 MB 3.5 MB/s eta 0:00:02\n", + " ------- -------------------------------- 1.6/8.0 MB 3.4 MB/s eta 0:00:02\n", + " -------- ------------------------------- 1.8/8.0 MB 3.5 MB/s eta 0:00:02\n", + " --------- ------------------------------ 1.9/8.0 MB 3.5 MB/s eta 0:00:02\n", + " ---------- ----------------------------- 2.1/8.0 MB 3.4 MB/s eta 0:00:02\n", + " ----------- ---------------------------- 2.3/8.0 MB 3.5 MB/s eta 0:00:02\n", + " ------------ --------------------------- 2.4/8.0 MB 3.5 MB/s eta 0:00:02\n", + " ------------ --------------------------- 2.5/8.0 MB 3.4 MB/s eta 0:00:02\n", + " ------------- -------------------------- 2.6/8.0 MB 3.3 MB/s eta 0:00:02\n", + " -------------- ------------------------- 2.9/8.0 MB 3.4 MB/s eta 0:00:02\n", + " --------------- ------------------------ 3.0/8.0 MB 3.4 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 3.2/8.0 MB 3.5 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 3.3/8.0 MB 3.4 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 3.5/8.0 MB 3.4 MB/s eta 0:00:02\n", + " ------------------ --------------------- 3.6/8.0 MB 3.4 MB/s eta 0:00:02\n", + " ------------------ --------------------- 3.7/8.0 MB 3.3 MB/s eta 0:00:02\n", + " ------------------- -------------------- 3.9/8.0 MB 3.3 MB/s eta 0:00:02\n", + " -------------------- ------------------- 4.0/8.0 MB 3.3 MB/s eta 0:00:02\n", + " --------------------- ------------------ 4.2/8.0 MB 3.4 MB/s eta 0:00:02\n", + " --------------------- ------------------ 4.3/8.0 MB 3.3 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 4.5/8.0 MB 3.3 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 4.5/8.0 MB 3.3 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 4.6/8.0 MB 3.2 MB/s eta 0:00:02\n", + " ------------------------ --------------- 4.8/8.0 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------ --------------- 4.9/8.0 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------ --------------- 5.0/8.0 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------- -------------- 5.1/8.0 MB 3.1 MB/s eta 0:00:01\n", + " -------------------------- ------------- 5.3/8.0 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------- ------------ 5.4/8.0 MB 3.2 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 5.6/8.0 MB 3.2 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 5.7/8.0 MB 3.1 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 5.8/8.0 MB 3.1 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 6.0/8.0 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------ --------- 6.0/8.0 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------ --------- 6.0/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------ --------- 6.1/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------- -------- 6.3/8.0 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------- ------- 6.4/8.0 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------- ------- 6.5/8.0 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------- ------ 6.7/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 6.9/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 7.0/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 7.1/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------ --- 7.3/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 7.4/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 7.5/8.0 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------------- - 7.7/8.0 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------------- 7.8/8.0 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------------- 8.0/8.0 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------------- 8.0/8.0 MB 3.0 MB/s eta 0:00:00\n", + "Downloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n", + "Downloading openai-1.37.0-py3-none-any.whl (337 kB)\n", + " ---------------------------------------- 0.0/337.0 kB ? eta -:--:--\n", + " ----------------- ---------------------- 143.4/337.0 kB 4.3 MB/s eta 0:00:01\n", + " -------------------------------- ------- 276.5/337.0 kB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 337.0/337.0 kB 2.6 MB/s eta 0:00:00\n", + "Downloading pandas-2.2.2-cp311-cp311-win_amd64.whl (11.6 MB)\n", + " ---------------------------------------- 0.0/11.6 MB ? eta -:--:--\n", + " ---------------------------------------- 0.1/11.6 MB 4.3 MB/s eta 0:00:03\n", + " --------------------------------------- 0.3/11.6 MB 3.0 MB/s eta 0:00:04\n", + " - -------------------------------------- 0.5/11.6 MB 3.8 MB/s eta 0:00:03\n", + " -- ------------------------------------- 0.6/11.6 MB 3.4 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.7/11.6 MB 3.1 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.8/11.6 MB 3.2 MB/s eta 0:00:04\n", + " --- ------------------------------------ 1.0/11.6 MB 3.0 MB/s eta 0:00:04\n", + " ---- ----------------------------------- 1.2/11.6 MB 3.4 MB/s eta 0:00:04\n", + " ---- ----------------------------------- 1.4/11.6 MB 3.2 MB/s eta 0:00:04\n", + " ----- ---------------------------------- 1.6/11.6 MB 3.5 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.7/11.6 MB 3.3 MB/s eta 0:00:03\n", + " ------ --------------------------------- 1.8/11.6 MB 3.3 MB/s eta 0:00:03\n", + " ------ --------------------------------- 2.0/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ------- -------------------------------- 2.2/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ------- -------------------------------- 2.3/11.6 MB 3.3 MB/s eta 0:00:03\n", + " -------- ------------------------------- 2.5/11.6 MB 3.4 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.7/11.6 MB 3.4 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.8/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 3.1/11.6 MB 3.5 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.2/11.6 MB 3.5 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.4/11.6 MB 3.5 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.6/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.6/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 3.8/11.6 MB 3.4 MB/s eta 0:00:03\n", + " -------------- ------------------------- 4.1/11.6 MB 3.5 MB/s eta 0:00:03\n", + " -------------- ------------------------- 4.2/11.6 MB 3.5 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.4/11.6 MB 3.5 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.5/11.6 MB 3.5 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.6/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ---------------- ----------------------- 4.7/11.6 MB 3.4 MB/s eta 0:00:03\n", + " ---------------- ----------------------- 4.8/11.6 MB 3.3 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 5.1/11.6 MB 3.4 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 5.2/11.6 MB 3.4 MB/s eta 0:00:02\n", + " ------------------ --------------------- 5.4/11.6 MB 3.4 MB/s eta 0:00:02\n", + " ------------------- -------------------- 5.6/11.6 MB 3.4 MB/s eta 0:00:02\n", + " ------------------- -------------------- 5.8/11.6 MB 3.5 MB/s eta 0:00:02\n", + " -------------------- ------------------- 5.9/11.6 MB 3.4 MB/s eta 0:00:02\n", + " --------------------- ------------------ 6.2/11.6 MB 3.5 MB/s eta 0:00:02\n", + " --------------------- ------------------ 6.3/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 6.5/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.7/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.8/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.9/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ------------------------ --------------- 7.1/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ------------------------ --------------- 7.3/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ------------------------- -------------- 7.4/11.6 MB 3.5 MB/s eta 0:00:02\n", + " -------------------------- ------------- 7.7/11.6 MB 3.5 MB/s eta 0:00:02\n", + " -------------------------- ------------- 7.8/11.6 MB 3.5 MB/s eta 0:00:02\n", + " --------------------------- ------------ 8.0/11.6 MB 3.5 MB/s eta 0:00:02\n", + " ---------------------------- ----------- 8.2/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 8.4/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 8.5/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 8.7/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------ --------- 8.9/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.0/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.1/11.6 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.2/11.6 MB 3.5 MB/s eta 0:00:01\n", + " -------------------------------- ------- 9.4/11.6 MB 3.5 MB/s eta 0:00:01\n", + " -------------------------------- ------- 9.5/11.6 MB 3.5 MB/s eta 0:00:01\n", + " --------------------------------- ------ 9.7/11.6 MB 3.5 MB/s eta 0:00:01\n", + " --------------------------------- ------ 9.7/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 9.9/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 10.1/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 10.2/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 10.4/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.5/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.6/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.7/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------------- -- 10.8/11.6 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------- -- 10.9/11.6 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------------- -- 11.0/11.6 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------------- - 11.2/11.6 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------------- 11.3/11.6 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------------- 11.5/11.6 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------------- 11.6/11.6 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 11.6/11.6 MB 3.3 MB/s eta 0:00:00\n", + "Downloading pathspec-0.12.1-py3-none-any.whl (31 kB)\n", + "Using cached pluggy-1.5.0-py3-none-any.whl (20 kB)\n", + "Downloading protobuf-5.27.2-cp310-abi3-win_amd64.whl (426 kB)\n", + " ---------------------------------------- 0.0/426.9 kB ? eta -:--:--\n", + " ------------- -------------------------- 143.4/426.9 kB 4.2 MB/s eta 0:00:01\n", + " -------------------------- ------------- 286.7/426.9 kB 2.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 426.9/426.9 kB 3.3 MB/s eta 0:00:00\n", + "Downloading regex-2024.5.15-cp311-cp311-win_amd64.whl (268 kB)\n", + " ---------------------------------------- 0.0/269.0 kB ? eta -:--:--\n", + " ------------- -------------------------- 92.2/269.0 kB 2.6 MB/s eta 0:00:01\n", + " ------------------------------ --------- 204.8/269.0 kB 2.5 MB/s eta 0:00:01\n", + " ---------------------------------------- 269.0/269.0 kB 2.4 MB/s eta 0:00:00\n", + "Downloading safetensors-0.4.3-cp311-none-win_amd64.whl (287 kB)\n", + " ---------------------------------------- 0.0/287.3 kB ? eta -:--:--\n", + " ------------------- -------------------- 143.4/287.3 kB 2.9 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 204.8/287.3 kB 2.1 MB/s eta 0:00:01\n", + " ---------------------------------------- 287.3/287.3 kB 2.5 MB/s eta 0:00:00\n", + "Downloading scipy-1.14.0-cp311-cp311-win_amd64.whl (44.7 MB)\n", + " ---------------------------------------- 0.0/44.7 MB ? eta -:--:--\n", + " ---------------------------------------- 0.2/44.7 MB 6.9 MB/s eta 0:00:07\n", + " ---------------------------------------- 0.4/44.7 MB 4.5 MB/s eta 0:00:10\n", + " ---------------------------------------- 0.5/44.7 MB 4.3 MB/s eta 0:00:11\n", + " --------------------------------------- 0.7/44.7 MB 3.9 MB/s eta 0:00:12\n", + " --------------------------------------- 0.7/44.7 MB 3.4 MB/s eta 0:00:14\n", + " --------------------------------------- 0.9/44.7 MB 3.2 MB/s eta 0:00:14\n", + " --------------------------------------- 1.0/44.7 MB 3.1 MB/s eta 0:00:15\n", + " - -------------------------------------- 1.2/44.7 MB 3.2 MB/s eta 0:00:14\n", + " - -------------------------------------- 1.4/44.7 MB 3.4 MB/s eta 0:00:13\n", + " - -------------------------------------- 1.5/44.7 MB 3.2 MB/s eta 0:00:14\n", + " - -------------------------------------- 1.6/44.7 MB 3.2 MB/s eta 0:00:14\n", + " - -------------------------------------- 1.6/44.7 MB 3.2 MB/s eta 0:00:14\n", + " - -------------------------------------- 1.8/44.7 MB 3.0 MB/s eta 0:00:15\n", + " - -------------------------------------- 2.0/44.7 MB 3.1 MB/s eta 0:00:15\n", + " - -------------------------------------- 2.2/44.7 MB 3.1 MB/s eta 0:00:14\n", + " -- ------------------------------------- 2.4/44.7 MB 3.2 MB/s eta 0:00:14\n", + " -- ------------------------------------- 2.5/44.7 MB 3.2 MB/s eta 0:00:14\n", + " -- ------------------------------------- 2.6/44.7 MB 3.2 MB/s eta 0:00:14\n", + " -- ------------------------------------- 2.7/44.7 MB 3.1 MB/s eta 0:00:14\n", + " -- ------------------------------------- 2.9/44.7 MB 3.1 MB/s eta 0:00:14\n", + " -- ------------------------------------- 3.0/44.7 MB 3.1 MB/s eta 0:00:14\n", + " -- ------------------------------------- 3.2/44.7 MB 3.1 MB/s eta 0:00:14\n", + " -- ------------------------------------- 3.3/44.7 MB 3.2 MB/s eta 0:00:14\n", + " --- ------------------------------------ 3.4/44.7 MB 3.1 MB/s eta 0:00:14\n", + " --- ------------------------------------ 3.6/44.7 MB 3.1 MB/s eta 0:00:14\n", + " --- ------------------------------------ 3.7/44.7 MB 3.1 MB/s eta 0:00:14\n", + " --- ------------------------------------ 3.9/44.7 MB 3.1 MB/s eta 0:00:14\n", + " --- ------------------------------------ 4.0/44.7 MB 3.0 MB/s eta 0:00:14\n", + " --- ------------------------------------ 4.1/44.7 MB 3.0 MB/s eta 0:00:14\n", + " --- ------------------------------------ 4.3/44.7 MB 3.1 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 4.5/44.7 MB 3.1 MB/s eta 0:00:13\n", + " ---- ----------------------------------- 4.6/44.7 MB 3.1 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 4.7/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 4.8/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 4.9/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 5.0/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 5.2/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 5.4/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ---- ----------------------------------- 5.5/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ----- ---------------------------------- 5.6/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ----- ---------------------------------- 5.8/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ----- ---------------------------------- 5.9/44.7 MB 3.0 MB/s eta 0:00:14\n", + " ----- ---------------------------------- 6.0/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ----- ---------------------------------- 6.2/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ----- ---------------------------------- 6.3/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ----- ---------------------------------- 6.6/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ----- ---------------------------------- 6.7/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ------ --------------------------------- 6.8/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ------ --------------------------------- 6.8/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ------ --------------------------------- 6.9/44.7 MB 3.0 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.0/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.1/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.2/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.3/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.5/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.5/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.6/44.7 MB 2.9 MB/s eta 0:00:13\n", + " ------ --------------------------------- 7.7/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 7.8/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 7.9/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.0/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.1/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.2/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.3/44.7 MB 2.8 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.4/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.4/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.5/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.7/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.7/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.8/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.8/44.7 MB 2.7 MB/s eta 0:00:14\n", + " ------- -------------------------------- 8.9/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.0/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.1/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.2/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.3/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.4/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.5/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.7/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.7/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.8/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 9.9/44.7 MB 2.6 MB/s eta 0:00:14\n", + " -------- ------------------------------- 10.1/44.7 MB 2.6 MB/s eta 0:00:14\n", + " --------- ------------------------------ 10.2/44.7 MB 2.6 MB/s eta 0:00:14\n", + " --------- ------------------------------ 10.4/44.7 MB 2.6 MB/s eta 0:00:14\n", + " --------- ------------------------------ 10.6/44.7 MB 2.6 MB/s eta 0:00:14\n", + " --------- ------------------------------ 10.7/44.7 MB 2.6 MB/s eta 0:00:14\n", + " --------- ------------------------------ 10.8/44.7 MB 2.6 MB/s eta 0:00:14\n", + " --------- ------------------------------ 11.0/44.7 MB 2.6 MB/s eta 0:00:13\n", + " --------- ------------------------------ 11.2/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 11.2/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 11.4/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 11.5/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 11.7/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 11.9/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 12.0/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 12.2/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ---------- ----------------------------- 12.3/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 12.4/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 12.5/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 12.7/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 12.8/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 13.0/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 13.1/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 13.3/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ----------- ---------------------------- 13.4/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ------------ --------------------------- 13.5/44.7 MB 2.6 MB/s eta 0:00:13\n", + " ------------ --------------------------- 13.7/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------ --------------------------- 13.8/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------ --------------------------- 13.9/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------ --------------------------- 14.1/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------ --------------------------- 14.3/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------ --------------------------- 14.4/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------ --------------------------- 14.5/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 14.6/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 14.8/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 14.9/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 15.1/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 15.2/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 15.4/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 15.5/44.7 MB 2.6 MB/s eta 0:00:12\n", + " ------------- -------------------------- 15.6/44.7 MB 2.6 MB/s eta 0:00:12\n", + " -------------- ------------------------- 15.8/44.7 MB 2.6 MB/s eta 0:00:12\n", + " -------------- ------------------------- 15.9/44.7 MB 2.6 MB/s eta 0:00:12\n", + " -------------- ------------------------- 16.1/44.7 MB 2.6 MB/s eta 0:00:11\n", + " -------------- ------------------------- 16.2/44.7 MB 2.6 MB/s eta 0:00:12\n", + " -------------- ------------------------- 16.3/44.7 MB 2.6 MB/s eta 0:00:11\n", + " -------------- ------------------------- 16.4/44.7 MB 2.6 MB/s eta 0:00:11\n", + " -------------- ------------------------- 16.6/44.7 MB 2.6 MB/s eta 0:00:11\n", + " -------------- ------------------------- 16.7/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 16.8/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.0/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.1/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.2/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.3/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.5/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.5/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.7/44.7 MB 2.6 MB/s eta 0:00:11\n", + " --------------- ------------------------ 17.8/44.7 MB 2.7 MB/s eta 0:00:11\n", + " ---------------- ----------------------- 18.0/44.7 MB 2.7 MB/s eta 0:00:11\n", + " ---------------- ----------------------- 18.1/44.7 MB 2.7 MB/s eta 0:00:10\n", + " ---------------- ----------------------- 18.2/44.7 MB 2.7 MB/s eta 0:00:10\n", + " ---------------- ----------------------- 18.4/44.7 MB 2.7 MB/s eta 0:00:10\n", + " ---------------- ----------------------- 18.6/44.7 MB 2.8 MB/s eta 0:00:10\n", + " ---------------- ----------------------- 18.8/44.7 MB 2.8 MB/s eta 0:00:10\n", + " ---------------- ----------------------- 18.9/44.7 MB 2.8 MB/s eta 0:00:10\n", + " ----------------- ---------------------- 19.1/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.2/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.4/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.5/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.5/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.6/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.7/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.8/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 19.9/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ----------------- ---------------------- 20.1/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------ --------------------- 20.3/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------ --------------------- 20.5/44.7 MB 3.0 MB/s eta 0:00:09\n", + " ------------------ --------------------- 20.6/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------ --------------------- 20.7/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------ --------------------- 20.8/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------ --------------------- 21.0/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------ --------------------- 21.1/44.7 MB 2.9 MB/s eta 0:00:09\n", + " ------------------- -------------------- 21.3/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 21.5/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 21.7/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 21.7/44.7 MB 3.0 MB/s eta 0:00:08\n", + " ------------------- -------------------- 21.8/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 21.9/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 22.0/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 22.1/44.7 MB 2.9 MB/s eta 0:00:08\n", + " ------------------- -------------------- 22.1/44.7 MB 2.8 MB/s eta 0:00:08\n", + " ------------------- -------------------- 22.2/44.7 MB 2.8 MB/s eta 0:00:09\n", + " ------------------- -------------------- 22.3/44.7 MB 2.8 MB/s eta 0:00:08\n", + " ------------------- -------------------- 22.4/44.7 MB 2.8 MB/s eta 0:00:09\n", + " -------------------- ------------------- 22.5/44.7 MB 2.8 MB/s eta 0:00:09\n", + " -------------------- ------------------- 22.7/44.7 MB 2.8 MB/s eta 0:00:08\n", + " -------------------- ------------------- 22.8/44.7 MB 2.8 MB/s eta 0:00:08\n", + " -------------------- ------------------- 23.0/44.7 MB 2.8 MB/s eta 0:00:08\n", + " -------------------- ------------------- 23.2/44.7 MB 2.8 MB/s eta 0:00:08\n", + " -------------------- ------------------- 23.4/44.7 MB 2.8 MB/s eta 0:00:08\n", + " --------------------- ------------------ 23.6/44.7 MB 2.8 MB/s eta 0:00:08\n", + " --------------------- ------------------ 23.8/44.7 MB 2.9 MB/s eta 0:00:08\n", + " --------------------- ------------------ 24.0/44.7 MB 2.9 MB/s eta 0:00:08\n", + " --------------------- ------------------ 24.2/44.7 MB 2.9 MB/s eta 0:00:08\n", + " --------------------- ------------------ 24.4/44.7 MB 2.9 MB/s eta 0:00:07\n", + " --------------------- ------------------ 24.6/44.7 MB 2.9 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 24.8/44.7 MB 2.9 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 24.9/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 25.2/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 25.3/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 25.4/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 25.6/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ---------------------- ----------------- 25.7/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 25.8/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 26.0/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 26.1/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 26.3/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 26.4/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 26.6/44.7 MB 3.0 MB/s eta 0:00:07\n", + " ----------------------- ---------------- 26.7/44.7 MB 3.0 MB/s eta 0:00:06\n", + " ------------------------ --------------- 26.9/44.7 MB 3.0 MB/s eta 0:00:06\n", + " ------------------------ --------------- 27.1/44.7 MB 3.0 MB/s eta 0:00:06\n", + " ------------------------ --------------- 27.2/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------ --------------- 27.4/44.7 MB 3.0 MB/s eta 0:00:06\n", + " ------------------------ --------------- 27.5/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------ --------------- 27.7/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------ --------------- 27.8/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------ --------------- 28.0/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.1/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.2/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.4/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.5/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.7/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.7/44.7 MB 3.1 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.9/44.7 MB 3.0 MB/s eta 0:00:06\n", + " ------------------------- -------------- 28.9/44.7 MB 3.0 MB/s eta 0:00:06\n", + " ------------------------- -------------- 29.0/44.7 MB 3.0 MB/s eta 0:00:06\n", + " -------------------------- ------------- 29.1/44.7 MB 3.0 MB/s eta 0:00:06\n", + " -------------------------- ------------- 29.2/44.7 MB 3.0 MB/s eta 0:00:06\n", + " -------------------------- ------------- 29.5/44.7 MB 3.0 MB/s eta 0:00:06\n", + " -------------------------- ------------- 29.7/44.7 MB 3.0 MB/s eta 0:00:05\n", + " -------------------------- ------------- 29.9/44.7 MB 3.1 MB/s eta 0:00:05\n", + " -------------------------- ------------- 30.0/44.7 MB 3.1 MB/s eta 0:00:05\n", + " -------------------------- ------------- 30.1/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 30.2/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 30.4/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 30.5/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 30.8/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 30.9/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 31.1/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 31.2/44.7 MB 3.1 MB/s eta 0:00:05\n", + " --------------------------- ------------ 31.3/44.7 MB 3.1 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 31.5/44.7 MB 3.1 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 31.5/44.7 MB 3.1 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 31.6/44.7 MB 3.0 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 31.6/44.7 MB 3.0 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 31.7/44.7 MB 3.0 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 31.9/44.7 MB 3.0 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 32.0/44.7 MB 3.0 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 32.2/44.7 MB 3.1 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 32.3/44.7 MB 3.1 MB/s eta 0:00:05\n", + " ---------------------------- ----------- 32.4/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 32.6/44.7 MB 3.2 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 32.7/44.7 MB 3.2 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 32.8/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 33.0/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 33.1/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 33.2/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 33.4/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ----------------------------- ---------- 33.5/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------ --------- 33.8/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------ --------- 33.9/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------ --------- 34.1/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------ --------- 34.2/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------ --------- 34.4/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------ --------- 34.5/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------- -------- 34.7/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------- -------- 34.9/44.7 MB 3.1 MB/s eta 0:00:04\n", + " ------------------------------- -------- 35.0/44.7 MB 3.0 MB/s eta 0:00:04\n", + " ------------------------------- -------- 35.1/44.7 MB 3.0 MB/s eta 0:00:04\n", + " ------------------------------- -------- 35.2/44.7 MB 3.0 MB/s eta 0:00:04\n", + " ------------------------------- -------- 35.4/44.7 MB 3.0 MB/s eta 0:00:04\n", + " ------------------------------- -------- 35.5/44.7 MB 3.0 MB/s eta 0:00:04\n", + " ------------------------------- -------- 35.7/44.7 MB 3.0 MB/s eta 0:00:04\n", + " -------------------------------- ------- 35.8/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.0/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.2/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.4/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.5/44.7 MB 3.1 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.6/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.6/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.7/44.7 MB 3.0 MB/s eta 0:00:03\n", + " -------------------------------- ------- 36.8/44.7 MB 3.0 MB/s eta 0:00:03\n", + " --------------------------------- ------ 36.9/44.7 MB 3.0 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.0/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.1/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.2/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.4/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.5/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.7/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 37.8/44.7 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------------- ------ 38.0/44.7 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------------- ----- 38.1/44.7 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------------- ----- 38.3/44.7 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------------- ----- 38.5/44.7 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------------- ----- 38.6/44.7 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------------- ----- 38.8/44.7 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------------- ----- 39.0/44.7 MB 2.9 MB/s eta 0:00:02\n", + " ---------------------------------- ----- 39.1/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 39.3/44.7 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 39.5/44.7 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 39.7/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 39.8/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 40.0/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 40.1/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------ --- 40.3/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------ --- 40.5/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------ --- 40.6/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------ --- 40.8/44.7 MB 3.1 MB/s eta 0:00:02\n", + " ------------------------------------ --- 40.8/44.7 MB 3.1 MB/s eta 0:00:02\n", + " ------------------------------------ --- 40.9/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------ --- 41.1/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------ --- 41.2/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------- -- 41.4/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------- -- 41.6/44.7 MB 3.0 MB/s eta 0:00:02\n", + " ------------------------------------- -- 41.8/44.7 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------------------- -- 41.9/44.7 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------------- -- 42.1/44.7 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------------- -- 42.3/44.7 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------------- -- 42.5/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 42.6/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 42.8/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 43.0/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 43.1/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 43.3/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 43.5/44.7 MB 3.2 MB/s eta 0:00:01\n", + " -------------------------------------- - 43.6/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 43.7/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 43.8/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 44.0/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 44.2/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 44.4/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 44.6/44.7 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 44.7/44.7 MB 3.2 MB/s eta 0:00:01\n", + " ---------------------------------------- 44.7/44.7 MB 3.1 MB/s eta 0:00:00\n", + "Downloading sentry_sdk-2.11.0-py2.py3-none-any.whl (303 kB)\n", + " ---------------------------------------- 0.0/303.6 kB ? eta -:--:--\n", + " ---------------- ----------------------- 122.9/303.6 kB 7.5 MB/s eta 0:00:01\n", + " ------------------------------------- -- 286.7/303.6 kB 4.5 MB/s eta 0:00:01\n", + " ---------------------------------------- 303.6/303.6 kB 3.8 MB/s eta 0:00:00\n", + "Downloading threadpoolctl-3.5.0-py3-none-any.whl (18 kB)\n", + "Downloading tiktoken-0.7.0-cp311-cp311-win_amd64.whl (799 kB)\n", + " ---------------------------------------- 0.0/799.0 kB ? eta -:--:--\n", + " ----- ---------------------------------- 112.6/799.0 kB 3.3 MB/s eta 0:00:01\n", + " ----------- ---------------------------- 225.3/799.0 kB 2.8 MB/s eta 0:00:01\n", + " ---------------------- ----------------- 450.6/799.0 kB 3.5 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 583.7/799.0 kB 3.7 MB/s eta 0:00:01\n", + " ------------------------------------- -- 757.8/799.0 kB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 799.0/799.0 kB 3.2 MB/s eta 0:00:00\n", + "Downloading tokenizers-0.19.1-cp311-none-win_amd64.whl (2.2 MB)\n", + " ---------------------------------------- 0.0/2.2 MB ? eta -:--:--\n", + " -- ------------------------------------- 0.1/2.2 MB 4.3 MB/s eta 0:00:01\n", + " ------ --------------------------------- 0.3/2.2 MB 4.3 MB/s eta 0:00:01\n", + " -------- ------------------------------- 0.5/2.2 MB 3.7 MB/s eta 0:00:01\n", + " ----------- ---------------------------- 0.6/2.2 MB 3.5 MB/s eta 0:00:01\n", + " -------------- ------------------------- 0.8/2.2 MB 3.6 MB/s eta 0:00:01\n", + " ----------------- ---------------------- 1.0/2.2 MB 3.5 MB/s eta 0:00:01\n", + " ------------------- -------------------- 1.1/2.2 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------- ---------------- 1.3/2.2 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------- -------------- 1.4/2.2 MB 3.5 MB/s eta 0:00:01\n", + " --------------------------- ------------ 1.5/2.2 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------ --------- 1.7/2.2 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------- ------ 1.9/2.2 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 2.0/2.2 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------------- - 2.1/2.2 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 2.2/2.2 MB 3.3 MB/s eta 0:00:00\n", + "Downloading tqdm-4.66.4-py3-none-any.whl (78 kB)\n", + " ---------------------------------------- 0.0/78.3 kB ? eta -:--:--\n", + " ---------------------------------------- 78.3/78.3 kB 2.1 MB/s eta 0:00:00\n", + "Downloading widgetsnbextension-4.0.11-py3-none-any.whl (2.3 MB)\n", + " ---------------------------------------- 0.0/2.3 MB ? eta -:--:--\n", + " -- ------------------------------------- 0.2/2.3 MB 4.8 MB/s eta 0:00:01\n", + " ---- ----------------------------------- 0.3/2.3 MB 4.4 MB/s eta 0:00:01\n", + " -------- ------------------------------- 0.5/2.3 MB 3.8 MB/s eta 0:00:01\n", + " ----------- ---------------------------- 0.7/2.3 MB 3.9 MB/s eta 0:00:01\n", + " --------------- ------------------------ 0.9/2.3 MB 4.0 MB/s eta 0:00:01\n", + " ----------------- ---------------------- 1.0/2.3 MB 3.7 MB/s eta 0:00:01\n", + " ------------------- -------------------- 1.1/2.3 MB 3.5 MB/s eta 0:00:01\n", + " --------------------- ------------------ 1.2/2.3 MB 3.6 MB/s eta 0:00:01\n", + " ------------------------- -------------- 1.5/2.3 MB 3.5 MB/s eta 0:00:01\n", + " --------------------------- ------------ 1.6/2.3 MB 3.5 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 1.7/2.3 MB 3.4 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 1.7/2.3 MB 3.4 MB/s eta 0:00:01\n", + " -------------------------------- ------- 1.9/2.3 MB 3.2 MB/s eta 0:00:01\n", + " --------------------------------- ------ 2.0/2.3 MB 3.1 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 2.0/2.3 MB 2.9 MB/s eta 0:00:01\n", + " ------------------------------------ --- 2.2/2.3 MB 2.9 MB/s eta 0:00:01\n", + " --------------------------------------- 2.3/2.3 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------------- 2.3/2.3 MB 2.9 MB/s eta 0:00:00\n", + "Downloading absl_py-2.1.0-py3-none-any.whl (133 kB)\n", + " ---------------------------------------- 0.0/133.7 kB ? eta -:--:--\n", + " ------------------------ --------------- 81.9/133.7 kB 4.8 MB/s eta 0:00:01\n", + " ---------------------------------------- 133.7/133.7 kB 2.6 MB/s eta 0:00:00\n", + "Downloading iniconfig-2.0.0-py3-none-any.whl (5.9 kB)\n", + "Downloading jupyter_console-6.6.3-py3-none-any.whl (24 kB)\n", + "Downloading multiprocess-0.70.16-py311-none-any.whl (143 kB)\n", + " ---------------------------------------- 0.0/143.5 kB ? eta -:--:--\n", + " ---------------------------------------- 143.5/143.5 kB 4.3 MB/s eta 0:00:00\n", + "Downloading nbconvert-7.16.4-py3-none-any.whl (257 kB)\n", + " ---------------------------------------- 0.0/257.4 kB ? eta -:--:--\n", + " ----------- ---------------------------- 71.7/257.4 kB 3.8 MB/s eta 0:00:01\n", + " ------------------------------- -------- 204.8/257.4 kB 3.1 MB/s eta 0:00:01\n", + " ---------------------------------------- 257.4/257.4 kB 2.6 MB/s eta 0:00:00\n", + "Downloading notebook-7.2.1-py3-none-any.whl (5.0 MB)\n", + " ---------------------------------------- 0.0/5.0 MB ? eta -:--:--\n", + " --------------------------------------- 0.1/5.0 MB 2.6 MB/s eta 0:00:02\n", + " - -------------------------------------- 0.2/5.0 MB 2.4 MB/s eta 0:00:03\n", + " -- ------------------------------------- 0.3/5.0 MB 2.2 MB/s eta 0:00:03\n", + " ---- ----------------------------------- 0.5/5.0 MB 2.7 MB/s eta 0:00:02\n", + " ----- ---------------------------------- 0.6/5.0 MB 2.9 MB/s eta 0:00:02\n", + " ----- ---------------------------------- 0.7/5.0 MB 2.9 MB/s eta 0:00:02\n", + " ------ --------------------------------- 0.9/5.0 MB 2.8 MB/s eta 0:00:02\n", + " ------- -------------------------------- 1.0/5.0 MB 2.7 MB/s eta 0:00:02\n", + " -------- ------------------------------- 1.1/5.0 MB 2.6 MB/s eta 0:00:02\n", + " ---------- ----------------------------- 1.3/5.0 MB 2.7 MB/s eta 0:00:02\n", + " ----------- ---------------------------- 1.4/5.0 MB 2.8 MB/s eta 0:00:02\n", + " ------------ --------------------------- 1.6/5.0 MB 2.8 MB/s eta 0:00:02\n", + " ------------- -------------------------- 1.7/5.0 MB 2.8 MB/s eta 0:00:02\n", + " -------------- ------------------------- 1.9/5.0 MB 2.9 MB/s eta 0:00:02\n", + " --------------- ------------------------ 2.0/5.0 MB 2.8 MB/s eta 0:00:02\n", + " --------------- ------------------------ 2.0/5.0 MB 2.7 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 2.1/5.0 MB 2.7 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 2.2/5.0 MB 2.6 MB/s eta 0:00:02\n", + " ------------------ --------------------- 2.3/5.0 MB 2.6 MB/s eta 0:00:02\n", + " ------------------- -------------------- 2.4/5.0 MB 2.6 MB/s eta 0:00:02\n", + " -------------------- ------------------- 2.5/5.0 MB 2.6 MB/s eta 0:00:01\n", + " -------------------- ------------------- 2.6/5.0 MB 2.6 MB/s eta 0:00:01\n", + " --------------------- ------------------ 2.8/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ---------------------- ----------------- 2.8/5.0 MB 2.5 MB/s eta 0:00:01\n", + " ----------------------- ---------------- 3.0/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------ --------------- 3.1/5.0 MB 2.6 MB/s eta 0:00:01\n", + " -------------------------- ------------- 3.3/5.0 MB 2.6 MB/s eta 0:00:01\n", + " --------------------------- ------------ 3.5/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 3.5/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 3.7/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------------ --------- 3.8/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------------ --------- 3.9/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------------- -------- 4.0/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------------- -------- 4.0/5.0 MB 2.5 MB/s eta 0:00:01\n", + " --------------------------------- ------ 4.2/5.0 MB 2.6 MB/s eta 0:00:01\n", + " --------------------------------- ------ 4.3/5.0 MB 2.5 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 4.4/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------------------ --- 4.6/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ------------------------------------- -- 4.7/5.0 MB 2.6 MB/s eta 0:00:01\n", + " -------------------------------------- - 4.8/5.0 MB 2.6 MB/s eta 0:00:01\n", + " --------------------------------------- 4.9/5.0 MB 2.6 MB/s eta 0:00:01\n", + " ---------------------------------------- 5.0/5.0 MB 2.6 MB/s eta 0:00:00\n", + "Downloading qtconsole-5.5.2-py3-none-any.whl (123 kB)\n", + " ---------------------------------------- 0.0/123.4 kB ? eta -:--:--\n", + " ---------------------------------------- 123.4/123.4 kB 3.7 MB/s eta 0:00:00\n", + "Downloading setproctitle-1.3.3-cp311-cp311-win_amd64.whl (11 kB)\n", + "Using cached xxhash-3.4.1-cp311-cp311-win_amd64.whl (29 kB)\n", + "Downloading aiohttp-3.9.5-cp311-cp311-win_amd64.whl (370 kB)\n", + " ---------------------------------------- 0.0/370.8 kB ? eta -:--:--\n", + " ------------ --------------------------- 112.6/370.8 kB 3.2 MB/s eta 0:00:01\n", + " ------------------------- -------------- 235.5/370.8 kB 3.6 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 327.7/370.8 kB 2.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 370.8/370.8 kB 2.6 MB/s eta 0:00:00\n", + "Downloading anyio-4.4.0-py3-none-any.whl (86 kB)\n", + " ---------------------------------------- 0.0/86.8 kB ? eta -:--:--\n", + " ---------------------------------------- 86.8/86.8 kB 4.8 MB/s eta 0:00:00\n", + "Downloading bleach-6.1.0-py3-none-any.whl (162 kB)\n", + " ---------------------------------------- 0.0/162.8 kB ? eta -:--:--\n", + " ---------------------------------------- 162.8/162.8 kB 3.3 MB/s eta 0:00:00\n", + "Downloading contourpy-1.2.1-cp311-cp311-win_amd64.whl (188 kB)\n", + " ---------------------------------------- 0.0/188.2 kB ? eta -:--:--\n", + " ------------------------------ --------- 143.4/188.2 kB 4.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 188.2/188.2 kB 3.8 MB/s eta 0:00:00\n", + "Downloading cycler-0.12.1-py3-none-any.whl (8.3 kB)\n", + "Downloading distro-1.9.0-py3-none-any.whl (20 kB)\n", + "Downloading fonttools-4.53.1-cp311-cp311-win_amd64.whl (2.2 MB)\n", + " ---------------------------------------- 0.0/2.2 MB ? eta -:--:--\n", + " -- ------------------------------------- 0.1/2.2 MB 3.6 MB/s eta 0:00:01\n", + " ----- ---------------------------------- 0.3/2.2 MB 4.2 MB/s eta 0:00:01\n", + " ------- -------------------------------- 0.4/2.2 MB 3.5 MB/s eta 0:00:01\n", + " -------- ------------------------------- 0.5/2.2 MB 3.1 MB/s eta 0:00:01\n", + " ----------- ---------------------------- 0.6/2.2 MB 3.1 MB/s eta 0:00:01\n", + " -------------- ------------------------- 0.8/2.2 MB 2.9 MB/s eta 0:00:01\n", + " ----------------- ---------------------- 1.0/2.2 MB 3.0 MB/s eta 0:00:01\n", + " -------------------- ------------------- 1.1/2.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------- ------------------ 1.2/2.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------- -------------- 1.4/2.2 MB 3.2 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 1.5/2.2 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------------- -------- 1.7/2.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------------------- ------ 1.9/2.2 MB 3.1 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 1.9/2.2 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------------- -- 2.1/2.2 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------------- 2.2/2.2 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------------- 2.2/2.2 MB 2.9 MB/s eta 0:00:00\n", + "Downloading gitdb-4.0.11-py3-none-any.whl (62 kB)\n", + " ---------------------------------------- 0.0/62.7 kB ? eta -:--:--\n", + " ---------------------------------------- 62.7/62.7 kB ? eta 0:00:00\n", + "Downloading httpx-0.27.0-py3-none-any.whl (75 kB)\n", + " ---------------------------------------- 0.0/75.6 kB ? eta -:--:--\n", + " ---------------------------------------- 75.6/75.6 kB 4.4 MB/s eta 0:00:00\n", + "Downloading httpcore-1.0.5-py3-none-any.whl (77 kB)\n", + " ---------------------------------------- 0.0/77.9 kB ? eta -:--:--\n", + " ---------------------------------------- 77.9/77.9 kB 2.2 MB/s eta 0:00:00\n", + "Downloading jsonpatch-1.33-py2.py3-none-any.whl (12 kB)\n", + "Downloading jupyter_server-2.14.2-py3-none-any.whl (383 kB)\n", + " ---------------------------------------- 0.0/383.6 kB ? eta -:--:--\n", + " --------- ------------------------------ 92.2/383.6 kB 5.1 MB/s eta 0:00:01\n", + " ------------------------ --------------- 235.5/383.6 kB 3.6 MB/s eta 0:00:01\n", + " -------------------------------- ------- 307.2/383.6 kB 3.2 MB/s eta 0:00:01\n", + " --------------------------------------- 378.9/383.6 kB 2.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 383.6/383.6 kB 2.2 MB/s eta 0:00:00\n", + "Downloading jupyterlab-4.2.4-py3-none-any.whl (11.6 MB)\n", + " ---------------------------------------- 0.0/11.6 MB ? eta -:--:--\n", + " ---------------------------------------- 0.1/11.6 MB 2.6 MB/s eta 0:00:05\n", + " --------------------------------------- 0.2/11.6 MB 3.0 MB/s eta 0:00:04\n", + " - -------------------------------------- 0.4/11.6 MB 2.9 MB/s eta 0:00:04\n", + " - -------------------------------------- 0.5/11.6 MB 2.9 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.6/11.6 MB 2.9 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.8/11.6 MB 3.0 MB/s eta 0:00:04\n", + " --- ------------------------------------ 0.9/11.6 MB 2.9 MB/s eta 0:00:04\n", + " --- ------------------------------------ 1.0/11.6 MB 2.9 MB/s eta 0:00:04\n", + " --- ------------------------------------ 1.1/11.6 MB 2.8 MB/s eta 0:00:04\n", + " ---- ----------------------------------- 1.2/11.6 MB 2.7 MB/s eta 0:00:04\n", + " ---- ----------------------------------- 1.4/11.6 MB 2.7 MB/s eta 0:00:04\n", + " ----- ---------------------------------- 1.6/11.6 MB 2.8 MB/s eta 0:00:04\n", + " ----- ---------------------------------- 1.7/11.6 MB 2.9 MB/s eta 0:00:04\n", + " ------ --------------------------------- 1.9/11.6 MB 2.9 MB/s eta 0:00:04\n", + " ------ --------------------------------- 2.0/11.6 MB 2.9 MB/s eta 0:00:04\n", + " ------- -------------------------------- 2.2/11.6 MB 2.9 MB/s eta 0:00:04\n", + " -------- ------------------------------- 2.3/11.6 MB 3.0 MB/s eta 0:00:04\n", + " -------- ------------------------------- 2.4/11.6 MB 2.9 MB/s eta 0:00:04\n", + " -------- ------------------------------- 2.6/11.6 MB 3.0 MB/s eta 0:00:04\n", + " --------- ------------------------------ 2.8/11.6 MB 3.0 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 2.9/11.6 MB 3.0 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 3.1/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.3/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.4/11.6 MB 3.0 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.5/11.6 MB 3.0 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.6/11.6 MB 3.0 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.8/11.6 MB 3.0 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.0/11.6 MB 3.1 MB/s eta 0:00:03\n", + " -------------- ------------------------- 4.2/11.6 MB 3.1 MB/s eta 0:00:03\n", + " -------------- ------------------------- 4.3/11.6 MB 3.1 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.4/11.6 MB 3.1 MB/s eta 0:00:03\n", + " --------------- ------------------------ 4.6/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ---------------- ----------------------- 4.7/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ---------------- ----------------------- 4.9/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 5.0/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 5.2/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ------------------ --------------------- 5.3/11.6 MB 3.1 MB/s eta 0:00:03\n", + " ------------------ --------------------- 5.5/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------- -------------------- 5.6/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------- -------------------- 5.8/11.6 MB 3.1 MB/s eta 0:00:02\n", + " -------------------- ------------------- 5.9/11.6 MB 3.1 MB/s eta 0:00:02\n", + " -------------------- ------------------- 6.1/11.6 MB 3.1 MB/s eta 0:00:02\n", + " --------------------- ------------------ 6.2/11.6 MB 3.1 MB/s eta 0:00:02\n", + " --------------------- ------------------ 6.3/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 6.5/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 6.6/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.8/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 6.9/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------------ --------------- 7.1/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------------- -------------- 7.3/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------------- -------------- 7.4/11.6 MB 3.1 MB/s eta 0:00:02\n", + " -------------------------- ------------- 7.7/11.6 MB 3.2 MB/s eta 0:00:02\n", + " -------------------------- ------------- 7.8/11.6 MB 3.2 MB/s eta 0:00:02\n", + " --------------------------- ------------ 7.9/11.6 MB 3.1 MB/s eta 0:00:02\n", + " --------------------------- ------------ 8.0/11.6 MB 3.1 MB/s eta 0:00:02\n", + " --------------------------- ------------ 8.1/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ---------------------------- ----------- 8.3/11.6 MB 3.1 MB/s eta 0:00:02\n", + " ----------------------------- ---------- 8.5/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 8.6/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------ --------- 8.7/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------ --------- 8.8/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------ --------- 8.9/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------ --------- 9.0/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.1/11.6 MB 3.1 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.2/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.3/11.6 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------- ------- 9.3/11.6 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------- ------- 9.5/11.6 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------- ------ 9.7/11.6 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------- ------ 9.7/11.6 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------- ------ 9.9/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 10.0/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 10.2/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 10.3/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 10.4/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.6/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------ --- 10.7/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 10.8/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 11.0/11.6 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------------- - 11.1/11.6 MB 3.0 MB/s eta 0:00:01\n", + " -------------------------------------- - 11.2/11.6 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------------- 11.4/11.6 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------------- 11.4/11.6 MB 3.0 MB/s eta 0:00:01\n", + " --------------------------------------- 11.6/11.6 MB 3.0 MB/s eta 0:00:01\n", + " ---------------------------------------- 11.6/11.6 MB 3.0 MB/s eta 0:00:00\n", + "Downloading jupyterlab_server-2.27.3-py3-none-any.whl (59 kB)\n", + " ---------------------------------------- 0.0/59.7 kB ? eta -:--:--\n", + " ---------------------------------------- 59.7/59.7 kB ? eta 0:00:00\n", + "Downloading kiwisolver-1.4.5-cp311-cp311-win_amd64.whl (56 kB)\n", + " ---------------------------------------- 0.0/56.1 kB ? eta -:--:--\n", + " ---------------------------------------- 56.1/56.1 kB 1.5 MB/s eta 0:00:00\n", + "Downloading langsmith-0.1.93-py3-none-any.whl (139 kB)\n", + " ---------------------------------------- 0.0/139.8 kB ? eta -:--:--\n", + " ---------------------------------------- 139.8/139.8 kB 4.2 MB/s eta 0:00:00\n", + "Downloading mistune-3.0.2-py3-none-any.whl (47 kB)\n", + " ---------------------------------------- 0.0/48.0 kB ? eta -:--:--\n", + " ---------------------------------------- 48.0/48.0 kB 2.4 MB/s eta 0:00:00\n", + "Downloading nbclient-0.10.0-py3-none-any.whl (25 kB)\n", + "Downloading nbformat-5.10.4-py3-none-any.whl (78 kB)\n", + " ---------------------------------------- 0.0/78.5 kB ? eta -:--:--\n", + " ---------------------------------------- 78.5/78.5 kB 4.5 MB/s eta 0:00:00\n", + "Downloading notebook_shim-0.2.4-py3-none-any.whl (13 kB)\n", + "Downloading pandocfilters-1.5.1-py2.py3-none-any.whl (8.7 kB)\n", + "Downloading pyarrow-17.0.0-cp311-cp311-win_amd64.whl (25.2 MB)\n", + " ---------------------------------------- 0.0/25.2 MB ? eta -:--:--\n", + " ---------------------------------------- 0.1/25.2 MB 3.5 MB/s eta 0:00:08\n", + " ---------------------------------------- 0.2/25.2 MB 2.8 MB/s eta 0:00:10\n", + " --------------------------------------- 0.4/25.2 MB 2.9 MB/s eta 0:00:09\n", + " --------------------------------------- 0.5/25.2 MB 3.1 MB/s eta 0:00:08\n", + " - -------------------------------------- 0.7/25.2 MB 3.2 MB/s eta 0:00:08\n", + " - -------------------------------------- 0.9/25.2 MB 3.3 MB/s eta 0:00:08\n", + " - -------------------------------------- 1.0/25.2 MB 3.2 MB/s eta 0:00:08\n", + " - -------------------------------------- 1.2/25.2 MB 3.5 MB/s eta 0:00:07\n", + " -- ------------------------------------- 1.3/25.2 MB 3.3 MB/s eta 0:00:08\n", + " -- ------------------------------------- 1.5/25.2 MB 3.3 MB/s eta 0:00:08\n", + " -- ------------------------------------- 1.5/25.2 MB 3.1 MB/s eta 0:00:08\n", + " -- ------------------------------------- 1.6/25.2 MB 3.0 MB/s eta 0:00:08\n", + " -- ------------------------------------- 1.8/25.2 MB 3.1 MB/s eta 0:00:08\n", + " --- ------------------------------------ 1.9/25.2 MB 3.1 MB/s eta 0:00:08\n", + " --- ------------------------------------ 2.0/25.2 MB 3.0 MB/s eta 0:00:08\n", + " --- ------------------------------------ 2.1/25.2 MB 2.9 MB/s eta 0:00:08\n", + " --- ------------------------------------ 2.3/25.2 MB 3.0 MB/s eta 0:00:08\n", + " --- ------------------------------------ 2.4/25.2 MB 2.9 MB/s eta 0:00:08\n", + " ---- ----------------------------------- 2.6/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ---- ----------------------------------- 2.8/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ---- ----------------------------------- 2.8/25.2 MB 2.9 MB/s eta 0:00:08\n", + " ---- ----------------------------------- 3.0/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ----- ---------------------------------- 3.2/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ----- ---------------------------------- 3.4/25.2 MB 3.1 MB/s eta 0:00:08\n", + " ----- ---------------------------------- 3.6/25.2 MB 3.1 MB/s eta 0:00:08\n", + " ----- ---------------------------------- 3.7/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ------ --------------------------------- 3.8/25.2 MB 3.1 MB/s eta 0:00:07\n", + " ------ --------------------------------- 3.9/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ------ --------------------------------- 4.1/25.2 MB 3.0 MB/s eta 0:00:07\n", + " ------ --------------------------------- 4.2/25.2 MB 3.0 MB/s eta 0:00:07\n", + " ------ --------------------------------- 4.2/25.2 MB 3.0 MB/s eta 0:00:08\n", + " ------ --------------------------------- 4.3/25.2 MB 2.9 MB/s eta 0:00:08\n", + " ------- -------------------------------- 4.5/25.2 MB 2.9 MB/s eta 0:00:08\n", + " ------- -------------------------------- 4.6/25.2 MB 2.9 MB/s eta 0:00:08\n", + " ------- -------------------------------- 4.8/25.2 MB 3.0 MB/s eta 0:00:07\n", + " ------- -------------------------------- 4.9/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ------- -------------------------------- 4.9/25.2 MB 2.9 MB/s eta 0:00:07\n", + " -------- ------------------------------- 5.0/25.2 MB 2.9 MB/s eta 0:00:08\n", + " -------- ------------------------------- 5.2/25.2 MB 2.9 MB/s eta 0:00:07\n", + " -------- ------------------------------- 5.3/25.2 MB 2.9 MB/s eta 0:00:07\n", + " -------- ------------------------------- 5.4/25.2 MB 2.9 MB/s eta 0:00:07\n", + " -------- ------------------------------- 5.6/25.2 MB 2.9 MB/s eta 0:00:07\n", + " -------- ------------------------------- 5.7/25.2 MB 2.9 MB/s eta 0:00:07\n", + " --------- ------------------------------ 5.8/25.2 MB 2.9 MB/s eta 0:00:07\n", + " --------- ------------------------------ 5.9/25.2 MB 2.9 MB/s eta 0:00:07\n", + " --------- ------------------------------ 6.1/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ---------- ----------------------------- 6.3/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ---------- ----------------------------- 6.4/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ---------- ----------------------------- 6.6/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ---------- ----------------------------- 6.7/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ---------- ----------------------------- 6.8/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ----------- ---------------------------- 6.9/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ----------- ---------------------------- 7.1/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ----------- ---------------------------- 7.3/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ----------- ---------------------------- 7.5/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ------------ --------------------------- 7.7/25.2 MB 3.0 MB/s eta 0:00:06\n", + " ------------ --------------------------- 7.8/25.2 MB 3.0 MB/s eta 0:00:06\n", + " ------------ --------------------------- 7.8/25.2 MB 3.0 MB/s eta 0:00:06\n", + " ------------ --------------------------- 7.8/25.2 MB 2.9 MB/s eta 0:00:07\n", + " ------------ --------------------------- 7.9/25.2 MB 2.8 MB/s eta 0:00:07\n", + " ------------ --------------------------- 8.1/25.2 MB 2.8 MB/s eta 0:00:06\n", + " ------------- -------------------------- 8.3/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ------------- -------------------------- 8.4/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ------------- -------------------------- 8.6/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ------------- -------------------------- 8.7/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ------------- -------------------------- 8.8/25.2 MB 2.9 MB/s eta 0:00:06\n", + " -------------- ------------------------- 8.9/25.2 MB 2.9 MB/s eta 0:00:06\n", + " -------------- ------------------------- 9.1/25.2 MB 2.9 MB/s eta 0:00:06\n", + " -------------- ------------------------- 9.2/25.2 MB 2.9 MB/s eta 0:00:06\n", + " --------------- ------------------------ 9.5/25.2 MB 2.9 MB/s eta 0:00:06\n", + " --------------- ------------------------ 9.7/25.2 MB 2.9 MB/s eta 0:00:06\n", + " --------------- ------------------------ 9.8/25.2 MB 2.9 MB/s eta 0:00:06\n", + " --------------- ------------------------ 9.9/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ---------------- ----------------------- 10.1/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ---------------- ----------------------- 10.2/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ---------------- ----------------------- 10.4/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ---------------- ----------------------- 10.5/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ---------------- ----------------------- 10.6/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ---------------- ----------------------- 10.6/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ----------------- ---------------------- 10.8/25.2 MB 2.9 MB/s eta 0:00:06\n", + " ----------------- ---------------------- 11.0/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ----------------- ---------------------- 11.1/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ----------------- ---------------------- 11.2/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ----------------- ---------------------- 11.3/25.2 MB 2.8 MB/s eta 0:00:05\n", + " ------------------ --------------------- 11.4/25.2 MB 2.8 MB/s eta 0:00:05\n", + " ------------------ --------------------- 11.5/25.2 MB 2.8 MB/s eta 0:00:05\n", + " ------------------ --------------------- 11.7/25.2 MB 2.8 MB/s eta 0:00:05\n", + " ------------------ --------------------- 11.8/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ------------------- -------------------- 12.0/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ------------------- -------------------- 12.1/25.2 MB 2.8 MB/s eta 0:00:05\n", + " ------------------- -------------------- 12.2/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ------------------- -------------------- 12.4/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ------------------- -------------------- 12.5/25.2 MB 2.9 MB/s eta 0:00:05\n", + " ------------------- -------------------- 12.6/25.2 MB 2.9 MB/s eta 0:00:05\n", + " -------------------- ------------------- 12.7/25.2 MB 2.9 MB/s eta 0:00:05\n", + " -------------------- ------------------- 12.9/25.2 MB 2.9 MB/s eta 0:00:05\n", + " -------------------- ------------------- 13.0/25.2 MB 2.8 MB/s eta 0:00:05\n", + " -------------------- ------------------- 13.1/25.2 MB 2.9 MB/s eta 0:00:05\n", + " --------------------- ------------------ 13.3/25.2 MB 2.8 MB/s eta 0:00:05\n", + " --------------------- ------------------ 13.4/25.2 MB 2.8 MB/s eta 0:00:05\n", + " --------------------- ------------------ 13.5/25.2 MB 2.8 MB/s eta 0:00:05\n", + " --------------------- ------------------ 13.6/25.2 MB 2.8 MB/s eta 0:00:05\n", + " --------------------- ------------------ 13.8/25.2 MB 2.8 MB/s eta 0:00:05\n", + " ---------------------- ----------------- 14.0/25.2 MB 2.8 MB/s eta 0:00:04\n", + " ---------------------- ----------------- 14.0/25.2 MB 2.8 MB/s eta 0:00:04\n", + " ---------------------- ----------------- 14.2/25.2 MB 2.8 MB/s eta 0:00:04\n", + " ---------------------- ----------------- 14.4/25.2 MB 2.8 MB/s eta 0:00:04\n", + " ----------------------- ---------------- 14.5/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ----------------------- ---------------- 14.7/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ----------------------- ---------------- 14.8/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ----------------------- ---------------- 14.9/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ----------------------- ---------------- 15.0/25.2 MB 2.8 MB/s eta 0:00:04\n", + " ------------------------ --------------- 15.2/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------ --------------- 15.2/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------ --------------- 15.4/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------ --------------- 15.5/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------ --------------- 15.7/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------- -------------- 15.8/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------- -------------- 15.9/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------- -------------- 16.0/25.2 MB 2.9 MB/s eta 0:00:04\n", + " ------------------------- -------------- 16.2/25.2 MB 2.9 MB/s eta 0:00:04\n", + " -------------------------- ------------- 16.4/25.2 MB 2.9 MB/s eta 0:00:04\n", + " -------------------------- ------------- 16.5/25.2 MB 2.9 MB/s eta 0:00:04\n", + " -------------------------- ------------- 16.6/25.2 MB 2.8 MB/s eta 0:00:04\n", + " -------------------------- ------------- 16.6/25.2 MB 2.8 MB/s eta 0:00:04\n", + " -------------------------- ------------- 16.8/25.2 MB 2.8 MB/s eta 0:00:03\n", + " --------------------------- ------------ 17.0/25.2 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------- ------------ 17.2/25.2 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------- ------------ 17.3/25.2 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------- ------------ 17.4/25.2 MB 2.9 MB/s eta 0:00:03\n", + " --------------------------- ------------ 17.6/25.2 MB 2.8 MB/s eta 0:00:03\n", + " ---------------------------- ----------- 17.7/25.2 MB 2.8 MB/s eta 0:00:03\n", + " ---------------------------- ----------- 17.9/25.2 MB 2.8 MB/s eta 0:00:03\n", + " ---------------------------- ----------- 18.0/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ---------------------------- ----------- 18.2/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ----------------------------- ---------- 18.4/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ----------------------------- ---------- 18.5/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ----------------------------- ---------- 18.6/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ----------------------------- ---------- 18.7/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ----------------------------- ---------- 18.8/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ------------------------------ --------- 19.0/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ------------------------------ --------- 19.2/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ------------------------------ --------- 19.3/25.2 MB 2.9 MB/s eta 0:00:03\n", + " ------------------------------ --------- 19.5/25.2 MB 2.9 MB/s eta 0:00:02\n", + " ------------------------------- -------- 19.6/25.2 MB 2.9 MB/s eta 0:00:02\n", + " ------------------------------- -------- 19.8/25.2 MB 2.9 MB/s eta 0:00:02\n", + " ------------------------------- -------- 20.0/25.2 MB 2.9 MB/s eta 0:00:02\n", + " -------------------------------- ------- 20.2/25.2 MB 2.9 MB/s eta 0:00:02\n", + " -------------------------------- ------- 20.3/25.2 MB 2.9 MB/s eta 0:00:02\n", + " -------------------------------- ------- 20.4/25.2 MB 2.9 MB/s eta 0:00:02\n", + " -------------------------------- ------- 20.6/25.2 MB 2.9 MB/s eta 0:00:02\n", + " -------------------------------- ------- 20.6/25.2 MB 2.9 MB/s eta 0:00:02\n", + " --------------------------------- ------ 20.8/25.2 MB 2.9 MB/s eta 0:00:02\n", + " --------------------------------- ------ 20.9/25.2 MB 2.9 MB/s eta 0:00:02\n", + " --------------------------------- ------ 21.1/25.2 MB 2.9 MB/s eta 0:00:02\n", + " --------------------------------- ------ 21.3/25.2 MB 2.9 MB/s eta 0:00:02\n", + " ---------------------------------- ----- 21.4/25.2 MB 3.0 MB/s eta 0:00:02\n", + " ---------------------------------- ----- 21.6/25.2 MB 3.0 MB/s eta 0:00:02\n", + " ---------------------------------- ----- 21.8/25.2 MB 3.0 MB/s eta 0:00:02\n", + " ---------------------------------- ----- 22.0/25.2 MB 3.0 MB/s eta 0:00:02\n", + " ----------------------------------- ---- 22.1/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 22.3/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 22.3/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 22.4/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 22.5/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 22.6/25.2 MB 2.9 MB/s eta 0:00:01\n", + " ------------------------------------ --- 22.7/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------ --- 23.0/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------ --- 23.1/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 23.3/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 23.5/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 23.6/25.2 MB 3.0 MB/s eta 0:00:01\n", + " ------------------------------------- -- 23.9/25.2 MB 3.1 MB/s eta 0:00:01\n", + " -------------------------------------- - 24.1/25.2 MB 3.1 MB/s eta 0:00:01\n", + " -------------------------------------- - 24.2/25.2 MB 3.1 MB/s eta 0:00:01\n", + " -------------------------------------- - 24.5/25.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------------------------- 24.6/25.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------------------------- 24.7/25.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------------------------- 24.8/25.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------------------------- 25.0/25.2 MB 3.1 MB/s eta 0:00:01\n", + " --------------------------------------- 25.1/25.2 MB 3.1 MB/s eta 0:00:01\n", + " ---------------------------------------- 25.2/25.2 MB 3.1 MB/s eta 0:00:00\n", + "Downloading pydantic-2.8.2-py3-none-any.whl (423 kB)\n", + " ---------------------------------------- 0.0/423.9 kB ? eta -:--:--\n", + " ---------------- ----------------------- 174.1/423.9 kB 5.3 MB/s eta 0:00:01\n", + " --------------------------- ------------ 286.7/423.9 kB 2.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 423.9/423.9 kB 3.3 MB/s eta 0:00:00\n", + "Downloading pydantic_core-2.20.1-cp311-none-win_amd64.whl (1.9 MB)\n", + " ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n", + " ---- ----------------------------------- 0.2/1.9 MB 4.0 MB/s eta 0:00:01\n", + " -------- ------------------------------- 0.4/1.9 MB 4.9 MB/s eta 0:00:01\n", + " ---------- ----------------------------- 0.5/1.9 MB 4.1 MB/s eta 0:00:01\n", + " ---------------- ----------------------- 0.8/1.9 MB 4.6 MB/s eta 0:00:01\n", + " -------------------- ------------------- 1.0/1.9 MB 4.4 MB/s eta 0:00:01\n", + " ----------------------- ---------------- 1.1/1.9 MB 4.2 MB/s eta 0:00:01\n", + " -------------------------- ------------- 1.2/1.9 MB 3.9 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 1.4/1.9 MB 3.9 MB/s eta 0:00:01\n", + " --------------------------------- ------ 1.6/1.9 MB 3.9 MB/s eta 0:00:01\n", + " ------------------------------------- -- 1.8/1.9 MB 3.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 1.9/1.9 MB 3.8 MB/s eta 0:00:00\n", + "Downloading pyparsing-3.1.2-py3-none-any.whl (103 kB)\n", + " ---------------------------------------- 0.0/103.2 kB ? eta -:--:--\n", + " ---------------------------------------- 103.2/103.2 kB 3.0 MB/s eta 0:00:00\n", + "Downloading pytz-2024.1-py2.py3-none-any.whl (505 kB)\n", + " ---------------------------------------- 0.0/505.5 kB ? eta -:--:--\n", + " ------------- -------------------------- 174.1/505.5 kB 5.3 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 378.9/505.5 kB 4.7 MB/s eta 0:00:01\n", + " ---------------------------------------- 505.5/505.5 kB 4.5 MB/s eta 0:00:00\n", + "Downloading QtPy-2.4.1-py3-none-any.whl (93 kB)\n", + " ---------------------------------------- 0.0/93.5 kB ? eta -:--:--\n", + " ---------------------------------------- 93.5/93.5 kB 2.7 MB/s eta 0:00:00\n", + "Downloading sniffio-1.3.1-py3-none-any.whl (10 kB)\n", + "Downloading tenacity-8.5.0-py3-none-any.whl (28 kB)\n", + "Downloading tzdata-2024.1-py2.py3-none-any.whl (345 kB)\n", + " ---------------------------------------- 0.0/345.4 kB ? eta -:--:--\n", + " ---------------- ----------------------- 143.4/345.4 kB 4.3 MB/s eta 0:00:01\n", + " --------------------------------------- 337.9/345.4 kB 4.2 MB/s eta 0:00:01\n", + " ---------------------------------------- 345.4/345.4 kB 3.6 MB/s eta 0:00:00\n", + "Downloading beautifulsoup4-4.12.3-py3-none-any.whl (147 kB)\n", + " ---------------------------------------- 0.0/147.9 kB ? eta -:--:--\n", + " ---------------------------------------- 147.9/147.9 kB 4.4 MB/s eta 0:00:00\n", + "Downloading defusedxml-0.7.1-py2.py3-none-any.whl (25 kB)\n", + "Downloading jupyterlab_pygments-0.3.0-py3-none-any.whl (15 kB)\n", + "Downloading pyarrow_hotfix-0.6-py3-none-any.whl (7.9 kB)\n", + "Downloading tinycss2-1.3.0-py3-none-any.whl (22 kB)\n", + "Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n", + "Downloading annotated_types-0.7.0-py3-none-any.whl (13 kB)\n", + "Downloading argon2_cffi-23.1.0-py3-none-any.whl (15 kB)\n", + "Downloading async_lru-2.0.4-py3-none-any.whl (6.1 kB)\n", + "Downloading attrs-23.2.0-py3-none-any.whl (60 kB)\n", + " ---------------------------------------- 0.0/60.8 kB ? eta -:--:--\n", + " ---------------------------------------- 60.8/60.8 kB ? eta 0:00:00\n", + "Downloading Babel-2.15.0-py3-none-any.whl (9.6 MB)\n", + " ---------------------------------------- 0.0/9.6 MB ? eta -:--:--\n", + " --------------------------------------- 0.2/9.6 MB 5.3 MB/s eta 0:00:02\n", + " - -------------------------------------- 0.4/9.6 MB 4.9 MB/s eta 0:00:02\n", + " -- ------------------------------------- 0.6/9.6 MB 4.8 MB/s eta 0:00:02\n", + " --- ------------------------------------ 0.9/9.6 MB 5.0 MB/s eta 0:00:02\n", + " ---- ----------------------------------- 1.0/9.6 MB 4.1 MB/s eta 0:00:03\n", + " ---- ----------------------------------- 1.1/9.6 MB 4.2 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.3/9.6 MB 4.2 MB/s eta 0:00:02\n", + " ------ --------------------------------- 1.5/9.6 MB 4.1 MB/s eta 0:00:02\n", + " ------ --------------------------------- 1.6/9.6 MB 4.0 MB/s eta 0:00:03\n", + " ------- -------------------------------- 1.8/9.6 MB 3.9 MB/s eta 0:00:03\n", + " ------- -------------------------------- 1.9/9.6 MB 3.8 MB/s eta 0:00:03\n", + " -------- ------------------------------- 2.1/9.6 MB 3.9 MB/s eta 0:00:02\n", + " --------- ------------------------------ 2.4/9.6 MB 3.9 MB/s eta 0:00:02\n", + " ---------- ----------------------------- 2.5/9.6 MB 4.0 MB/s eta 0:00:02\n", + " ----------- ---------------------------- 2.7/9.6 MB 4.0 MB/s eta 0:00:02\n", + " ------------ --------------------------- 3.0/9.6 MB 4.0 MB/s eta 0:00:02\n", + " ------------ --------------------------- 3.1/9.6 MB 4.0 MB/s eta 0:00:02\n", + " ------------- -------------------------- 3.4/9.6 MB 4.0 MB/s eta 0:00:02\n", + " -------------- ------------------------- 3.5/9.6 MB 4.0 MB/s eta 0:00:02\n", + " --------------- ------------------------ 3.7/9.6 MB 4.0 MB/s eta 0:00:02\n", + " --------------- ------------------------ 3.8/9.6 MB 3.9 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 3.9/9.6 MB 3.9 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 4.1/9.6 MB 3.8 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 4.3/9.6 MB 3.9 MB/s eta 0:00:02\n", + " ------------------ --------------------- 4.5/9.6 MB 3.9 MB/s eta 0:00:02\n", + " ------------------- -------------------- 4.7/9.6 MB 3.9 MB/s eta 0:00:02\n", + " -------------------- ------------------- 4.8/9.6 MB 3.9 MB/s eta 0:00:02\n", + " -------------------- ------------------- 4.9/9.6 MB 3.8 MB/s eta 0:00:02\n", + " --------------------- ------------------ 5.1/9.6 MB 3.7 MB/s eta 0:00:02\n", + " --------------------- ------------------ 5.2/9.6 MB 3.8 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 5.4/9.6 MB 3.8 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 5.6/9.6 MB 3.8 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 5.8/9.6 MB 3.8 MB/s eta 0:00:02\n", + " ------------------------ --------------- 5.9/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ------------------------- -------------- 6.1/9.6 MB 3.8 MB/s eta 0:00:01\n", + " -------------------------- ------------- 6.4/9.6 MB 3.8 MB/s eta 0:00:01\n", + " --------------------------- ------------ 6.6/9.6 MB 3.8 MB/s eta 0:00:01\n", + " --------------------------- ------------ 6.7/9.6 MB 3.8 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 6.8/9.6 MB 3.8 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 6.9/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 7.1/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ------------------------------ --------- 7.3/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ------------------------------ --------- 7.4/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ------------------------------- -------- 7.6/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ------------------------------- -------- 7.7/9.6 MB 3.6 MB/s eta 0:00:01\n", + " -------------------------------- ------- 7.8/9.6 MB 3.7 MB/s eta 0:00:01\n", + " --------------------------------- ------ 8.0/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 8.2/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 8.4/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 8.5/9.6 MB 3.6 MB/s eta 0:00:01\n", + " ------------------------------------ --- 8.7/9.6 MB 3.6 MB/s eta 0:00:01\n", + " ------------------------------------- -- 8.9/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ------------------------------------- -- 9.1/9.6 MB 3.7 MB/s eta 0:00:01\n", + " -------------------------------------- - 9.3/9.6 MB 3.7 MB/s eta 0:00:01\n", + " --------------------------------------- 9.5/9.6 MB 3.7 MB/s eta 0:00:01\n", + " --------------------------------------- 9.6/9.6 MB 3.7 MB/s eta 0:00:01\n", + " ---------------------------------------- 9.6/9.6 MB 3.7 MB/s eta 0:00:00\n", + "Downloading fastjsonschema-2.20.0-py3-none-any.whl (23 kB)\n", + "Using cached frozenlist-1.4.1-cp311-cp311-win_amd64.whl (50 kB)\n", + "Downloading json5-0.9.25-py3-none-any.whl (30 kB)\n", + "Downloading jsonpointer-3.0.0-py2.py3-none-any.whl (7.6 kB)\n", + "Downloading jsonschema-4.23.0-py3-none-any.whl (88 kB)\n", + " ---------------------------------------- 0.0/88.5 kB ? eta -:--:--\n", + " ------------------------------------- -- 81.9/88.5 kB 2.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 88.5/88.5 kB 1.7 MB/s eta 0:00:00\n", + "Downloading jupyter_events-0.10.0-py3-none-any.whl (18 kB)\n", + "Downloading jupyter_lsp-2.2.5-py3-none-any.whl (69 kB)\n", + " ---------------------------------------- 0.0/69.1 kB ? eta -:--:--\n", + " ---------------------------------------- 69.1/69.1 kB 3.7 MB/s eta 0:00:00\n", + "Downloading jupyter_server_terminals-0.5.3-py3-none-any.whl (13 kB)\n", + "Downloading multidict-6.0.5-cp311-cp311-win_amd64.whl (28 kB)\n", + "Downloading orjson-3.10.6-cp311-none-win_amd64.whl (136 kB)\n", + " ---------------------------------------- 0.0/136.4 kB ? eta -:--:--\n", + " ---------------------------------------- 136.4/136.4 kB 4.1 MB/s eta 0:00:00\n", + "Downloading overrides-7.7.0-py3-none-any.whl (17 kB)\n", + "Downloading prometheus_client-0.20.0-py3-none-any.whl (54 kB)\n", + " ---------------------------------------- 0.0/54.5 kB ? eta -:--:--\n", + " ---------------------------------------- 54.5/54.5 kB 2.8 MB/s eta 0:00:00\n", + "Downloading pywinpty-2.0.13-cp311-none-win_amd64.whl (1.4 MB)\n", + " ---------------------------------------- 0.0/1.4 MB ? eta -:--:--\n", + " ------ --------------------------------- 0.2/1.4 MB 4.8 MB/s eta 0:00:01\n", + " --------- ------------------------------ 0.3/1.4 MB 3.9 MB/s eta 0:00:01\n", + " ---------------- ----------------------- 0.6/1.4 MB 4.1 MB/s eta 0:00:01\n", + " ------------------------ --------------- 0.8/1.4 MB 4.4 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 1.0/1.4 MB 4.3 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 1.2/1.4 MB 4.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 1.4/1.4 MB 4.4 MB/s eta 0:00:00\n", + "Downloading Send2Trash-1.8.3-py3-none-any.whl (18 kB)\n", + "Downloading smmap-5.0.1-py3-none-any.whl (24 kB)\n", + "Downloading soupsieve-2.5-py3-none-any.whl (36 kB)\n", + "Downloading terminado-0.18.1-py3-none-any.whl (14 kB)\n", + "Downloading webencodings-0.5.1-py2.py3-none-any.whl (11 kB)\n", + "Downloading websocket_client-1.8.0-py3-none-any.whl (58 kB)\n", + " ---------------------------------------- 0.0/58.8 kB ? eta -:--:--\n", + " ---------------------------------------- 58.8/58.8 kB ? eta 0:00:00\n", + "Downloading yarl-1.9.4-cp311-cp311-win_amd64.whl (76 kB)\n", + " ---------------------------------------- 0.0/76.7 kB ? eta -:--:--\n", + " ---------------------------------------- 76.7/76.7 kB 4.2 MB/s eta 0:00:00\n", + "Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n", + " ---------------------------------------- 0.0/58.3 kB ? eta -:--:--\n", + " ---------------------------------------- 58.3/58.3 kB 3.0 MB/s eta 0:00:00\n", + "Downloading jsonschema_specifications-2023.12.1-py3-none-any.whl (18 kB)\n", + "Downloading python_json_logger-2.0.7-py3-none-any.whl (8.1 kB)\n", + "Downloading referencing-0.35.1-py3-none-any.whl (26 kB)\n", + "Downloading rfc3986_validator-0.1.1-py2.py3-none-any.whl (4.2 kB)\n", + "Downloading rpds_py-0.19.1-cp311-none-win_amd64.whl (210 kB)\n", + " ---------------------------------------- 0.0/210.6 kB ? eta -:--:--\n", + " ---------------------------------------- 210.6/210.6 kB 6.5 MB/s eta 0:00:00\n", + "Downloading argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl (30 kB)\n", + "Downloading rfc3339_validator-0.1.4-py2.py3-none-any.whl (3.5 kB)\n", + "Downloading cffi-1.16.0-cp311-cp311-win_amd64.whl (181 kB)\n", + " ---------------------------------------- 0.0/181.5 kB ? eta -:--:--\n", + " --------------------------- ------------ 122.9/181.5 kB 3.6 MB/s eta 0:00:01\n", + " ---------------------------------------- 181.5/181.5 kB 2.8 MB/s eta 0:00:00\n", + "Downloading webcolors-24.6.0-py3-none-any.whl (14 kB)\n", + "Downloading fqdn-1.5.1-py3-none-any.whl (9.1 kB)\n", + "Downloading isoduration-20.11.0-py3-none-any.whl (11 kB)\n", + "Downloading uri_template-1.3.0-py3-none-any.whl (11 kB)\n", + "Downloading arrow-1.3.0-py3-none-any.whl (66 kB)\n", + " ---------------------------------------- 0.0/66.4 kB ? eta -:--:--\n", + " ---------------------------------------- 66.4/66.4 kB 3.7 MB/s eta 0:00:00\n", + "Downloading pycparser-2.22-py3-none-any.whl (117 kB)\n", + " ---------------------------------------- 0.0/117.6 kB ? eta -:--:--\n", + " ---------------------------------------- 117.6/117.6 kB 3.5 MB/s eta 0:00:00\n", + "Downloading types_python_dateutil-2.9.0.20240316-py3-none-any.whl (9.7 kB)\n", + "Building wheels for collected packages: rouge_score\n", + " Building wheel for rouge_score (setup.py): started\n", + " Building wheel for rouge_score (setup.py): finished with status 'done'\n", + " Created wheel for rouge_score: filename=rouge_score-0.1.2-py3-none-any.whl size=24972 sha256=501fb47c6aa829dea3a63891f58e0912bba19be6193f787aa32b61378edb2746\n", + " Stored in directory: c:\\users\\ht\\appdata\\local\\pip\\cache\\wheels\\1e\\19\\43\\8a442dc83660ca25e163e1bd1f89919284ab0d0c1475475148\n", + "Successfully built rouge_score\n", + "Installing collected packages: webencodings, sentencepiece, pytz, fastjsonschema, xxhash, widgetsnbextension, websocket-client, webcolors, uri-template, tzdata, types-python-dateutil, tqdm, tinycss2, threadpoolctl, tenacity, soupsieve, sniffio, smmap, setproctitle, sentry-sdk, send2trash, scipy, safetensors, rpds-py, rfc3986-validator, rfc3339-validator, regex, qtpy, pywinpty, python-json-logger, python-dotenv, pyparsing, pydantic-core, pycparser, pyarrow-hotfix, pyarrow, protobuf, prometheus-client, pluggy, pathspec, pandocfilters, overrides, orjson, mypy-extensions, multidict, mistune, kiwisolver, jupyterlab-widgets, jupyterlab-pygments, jsonpointer, json5, joblib, iniconfig, h11, fsspec, frozenlist, fqdn, fonttools, einops, docker-pycreds, distro, dill, defusedxml, cycler, contourpy, click, bleach, babel, attrs, async-lru, annotated-types, absl-py, yarl, tiktoken, terminado, scikit-learn, referencing, pytest, pydantic, pandas, nltk, multiprocess, matplotlib, jsonpatch, huggingface_hub, httpcore, gitdb, cffi, black, beautifulsoup4, arrow, anyio, aiosignal, tokenizers, seaborn, rouge_score, langsmith, jupyter-server-terminals, jsonschema-specifications, isoduration, httpx, gitpython, argon2-cffi-bindings, aiohttp, accelerate, wandb, transformers, openai, langchain-core, jsonschema, ipywidgets, argon2-cffi, qtconsole, peft, nbformat, langchain_openai, jupyter-console, datasets, nbclient, jupyter-events, evaluate, nbconvert, jupyter-server, notebook-shim, jupyterlab-server, jupyter-lsp, jupyterlab, notebook, jupyter\n", + "Successfully installed absl-py-2.1.0 accelerate-0.32.1 aiohttp-3.9.5 aiosignal-1.3.1 annotated-types-0.7.0 anyio-4.4.0 argon2-cffi-23.1.0 argon2-cffi-bindings-21.2.0 arrow-1.3.0 async-lru-2.0.4 attrs-23.2.0 babel-2.15.0 beautifulsoup4-4.12.3 black-24.4.0 bleach-6.1.0 cffi-1.16.0 click-8.1.7 contourpy-1.2.1 cycler-0.12.1 datasets-2.20.0 defusedxml-0.7.1 dill-0.3.8 distro-1.9.0 docker-pycreds-0.4.0 einops-0.8.0 evaluate-0.4.2 fastjsonschema-2.20.0 fonttools-4.53.1 fqdn-1.5.1 frozenlist-1.4.1 fsspec-2024.5.0 gitdb-4.0.11 gitpython-3.1.43 h11-0.14.0 httpcore-1.0.5 httpx-0.27.0 huggingface_hub-0.23.2 iniconfig-2.0.0 ipywidgets-8.1.3 isoduration-20.11.0 joblib-1.4.2 json5-0.9.25 jsonpatch-1.33 jsonpointer-3.0.0 jsonschema-4.23.0 jsonschema-specifications-2023.12.1 jupyter-1.0.0 jupyter-console-6.6.3 jupyter-events-0.10.0 jupyter-lsp-2.2.5 jupyter-server-2.14.2 jupyter-server-terminals-0.5.3 jupyterlab-4.2.4 jupyterlab-pygments-0.3.0 jupyterlab-server-2.27.3 jupyterlab-widgets-3.0.11 kiwisolver-1.4.5 langchain-core-0.2.23 langchain_openai-0.1.13 langsmith-0.1.93 matplotlib-3.9.1 mistune-3.0.2 multidict-6.0.5 multiprocess-0.70.16 mypy-extensions-1.0.0 nbclient-0.10.0 nbconvert-7.16.4 nbformat-5.10.4 nltk-3.8.1 notebook-7.2.1 notebook-shim-0.2.4 openai-1.37.0 orjson-3.10.6 overrides-7.7.0 pandas-2.2.2 pandocfilters-1.5.1 pathspec-0.12.1 peft-0.11.1 pluggy-1.5.0 prometheus-client-0.20.0 protobuf-5.27.2 pyarrow-17.0.0 pyarrow-hotfix-0.6 pycparser-2.22 pydantic-2.8.2 pydantic-core-2.20.1 pyparsing-3.1.2 pytest-8.2.1 python-dotenv-1.0.1 python-json-logger-2.0.7 pytz-2024.1 pywinpty-2.0.13 qtconsole-5.5.2 qtpy-2.4.1 referencing-0.35.1 regex-2024.5.15 rfc3339-validator-0.1.4 rfc3986-validator-0.1.1 rouge_score-0.1.2 rpds-py-0.19.1 safetensors-0.4.3 scikit-learn-1.5.0 scipy-1.14.0 seaborn-0.13.2 send2trash-1.8.3 sentencepiece-0.2.0 sentry-sdk-2.11.0 setproctitle-1.3.3 smmap-5.0.1 sniffio-1.3.1 soupsieve-2.5 tenacity-8.5.0 terminado-0.18.1 threadpoolctl-3.5.0 tiktoken-0.7.0 tinycss2-1.3.0 tokenizers-0.19.1 tqdm-4.66.4 transformers-4.42.4 types-python-dateutil-2.9.0.20240316 tzdata-2024.1 uri-template-1.3.0 wandb-0.17.4 webcolors-24.6.0 webencodings-0.5.1 websocket-client-1.8.0 widgetsnbextension-4.0.11 xxhash-3.4.1 yarl-1.9.4\n", + "Note: you may need to restart the kernel to use updated packages.\n", + "Obtaining file:///C:/Users/HT/Documents/URP/logical-reasoning/llama-factory\n", + " Installing build dependencies: started\n", + " Installing build dependencies: finished with status 'done'\n", + " Checking if build backend supports build_editable: started\n", + " Checking if build backend supports build_editable: finished with status 'done'\n", + " Getting requirements to build editable: started\n", + " Getting requirements to build editable: finished with status 'done'\n", + " Preparing editable metadata (pyproject.toml): started\n", + " Preparing editable metadata (pyproject.toml): finished with status 'done'\n", + "Requirement already satisfied: transformers>=4.41.2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (4.42.4)\n", + "Requirement already satisfied: datasets>=2.16.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (2.20.0)\n", + "Requirement already satisfied: accelerate>=0.30.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (0.32.1)\n", + "Requirement already satisfied: peft>=0.11.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (0.11.1)\n", + "Collecting trl>=0.8.6 (from llamafactory==0.8.4.dev0)\n", + " Downloading trl-0.9.6-py3-none-any.whl.metadata (12 kB)\n", + "Collecting gradio>=4.0.0 (from llamafactory==0.8.4.dev0)\n", + " Downloading gradio-4.39.0-py3-none-any.whl.metadata (15 kB)\n", + "Requirement already satisfied: pandas>=2.0.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (2.2.2)\n", + "Requirement already satisfied: scipy in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (1.14.0)\n", + "Requirement already satisfied: einops in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (0.8.0)\n", + "Requirement already satisfied: sentencepiece in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (0.2.0)\n", + "Requirement already satisfied: tiktoken in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (0.7.0)\n", + "Requirement already satisfied: protobuf in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (5.27.2)\n", + "Collecting uvicorn (from llamafactory==0.8.4.dev0)\n", + " Downloading uvicorn-0.30.3-py3-none-any.whl.metadata (6.5 kB)\n", + "Requirement already satisfied: pydantic in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (2.8.2)\n", + "Collecting fastapi (from llamafactory==0.8.4.dev0)\n", + " Downloading fastapi-0.111.1-py3-none-any.whl.metadata (26 kB)\n", + "Collecting sse-starlette (from llamafactory==0.8.4.dev0)\n", + " Downloading sse_starlette-2.1.2-py3-none-any.whl.metadata (5.8 kB)\n", + "Requirement already satisfied: matplotlib>=3.7.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (3.9.1)\n", + "Collecting fire (from llamafactory==0.8.4.dev0)\n", + " Downloading fire-0.6.0.tar.gz (88 kB)\n", + " ---------------------------------------- 0.0/88.4 kB ? eta -:--:--\n", + " ------------------ --------------------- 41.0/88.4 kB 2.0 MB/s eta 0:00:01\n", + " ---------------------------------------- 88.4/88.4 kB 1.3 MB/s eta 0:00:00\n", + " Preparing metadata (setup.py): started\n", + " Preparing metadata (setup.py): finished with status 'done'\n", + "Requirement already satisfied: packaging in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (24.1)\n", + "Requirement already satisfied: pyyaml in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (6.0.1)\n", + "Requirement already satisfied: numpy<2.0.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (1.26.4)\n", + "Collecting bitsandbytes>=0.39.0 (from llamafactory==0.8.4.dev0)\n", + " Downloading bitsandbytes-0.43.2-py3-none-win_amd64.whl.metadata (3.5 kB)\n", + "Requirement already satisfied: torch>=1.13.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from llamafactory==0.8.4.dev0) (2.4.0)\n", + "Requirement already satisfied: psutil in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from accelerate>=0.30.1->llamafactory==0.8.4.dev0) (5.9.0)\n", + "Requirement already satisfied: huggingface-hub in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from accelerate>=0.30.1->llamafactory==0.8.4.dev0) (0.23.2)\n", + "Requirement already satisfied: safetensors>=0.3.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from accelerate>=0.30.1->llamafactory==0.8.4.dev0) (0.4.3)\n", + "Requirement already satisfied: filelock in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (3.13.1)\n", + "Requirement already satisfied: pyarrow>=15.0.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (17.0.0)\n", + "Requirement already satisfied: pyarrow-hotfix in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (0.6)\n", + "Requirement already satisfied: dill<0.3.9,>=0.3.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (0.3.8)\n", + "Requirement already satisfied: requests>=2.32.2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (2.32.3)\n", + "Requirement already satisfied: tqdm>=4.66.3 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (4.66.4)\n", + "Requirement already satisfied: xxhash in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (3.4.1)\n", + "Requirement already satisfied: multiprocess in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (0.70.16)\n", + "Requirement already satisfied: fsspec<=2024.5.0,>=2023.1.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from fsspec[http]<=2024.5.0,>=2023.1.0->datasets>=2.16.0->llamafactory==0.8.4.dev0) (2024.5.0)\n", + "Requirement already satisfied: aiohttp in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from datasets>=2.16.0->llamafactory==0.8.4.dev0) (3.9.5)\n", + "Collecting aiofiles<24.0,>=22.0 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading aiofiles-23.2.1-py3-none-any.whl.metadata (9.7 kB)\n", + "Requirement already satisfied: anyio<5.0,>=3.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (4.4.0)\n", + "Collecting ffmpy (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading ffmpy-0.3.2.tar.gz (5.5 kB)\n", + " Preparing metadata (setup.py): started\n", + " Preparing metadata (setup.py): finished with status 'done'\n", + "Collecting gradio-client==1.1.1 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading gradio_client-1.1.1-py3-none-any.whl.metadata (7.1 kB)\n", + "Requirement already satisfied: httpx>=0.24.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (0.27.0)\n", + "Collecting importlib-resources<7.0,>=1.3 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading importlib_resources-6.4.0-py3-none-any.whl.metadata (3.9 kB)\n", + "Requirement already satisfied: jinja2<4.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (3.1.4)\n", + "Requirement already satisfied: markupsafe~=2.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (2.1.3)\n", + "Requirement already satisfied: orjson~=3.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (3.10.6)\n", + "Requirement already satisfied: pillow<11.0,>=8.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (10.4.0)\n", + "Collecting pydub (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading pydub-0.25.1-py2.py3-none-any.whl.metadata (1.4 kB)\n", + "Collecting python-multipart>=0.0.9 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading python_multipart-0.0.9-py3-none-any.whl.metadata (2.5 kB)\n", + "Collecting ruff>=0.2.2 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading ruff-0.5.4-py3-none-win_amd64.whl.metadata (25 kB)\n", + "Collecting semantic-version~=2.0 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading semantic_version-2.10.0-py2.py3-none-any.whl.metadata (9.7 kB)\n", + "Collecting tomlkit==0.12.0 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading tomlkit-0.12.0-py3-none-any.whl.metadata (2.7 kB)\n", + "Collecting typer<1.0,>=0.12 (from gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading typer-0.12.3-py3-none-any.whl.metadata (15 kB)\n", + "Requirement already satisfied: typing-extensions~=4.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (4.12.2)\n", + "Requirement already satisfied: urllib3~=2.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from gradio>=4.0.0->llamafactory==0.8.4.dev0) (2.2.2)\n", + "Collecting websockets<12.0,>=10.0 (from gradio-client==1.1.1->gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading websockets-11.0.3-cp311-cp311-win_amd64.whl.metadata (6.8 kB)\n", + "Requirement already satisfied: contourpy>=1.0.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib>=3.7.0->llamafactory==0.8.4.dev0) (1.2.1)\n", + "Requirement already satisfied: cycler>=0.10 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib>=3.7.0->llamafactory==0.8.4.dev0) (0.12.1)\n", + "Requirement already satisfied: fonttools>=4.22.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib>=3.7.0->llamafactory==0.8.4.dev0) (4.53.1)\n", + "Requirement already satisfied: kiwisolver>=1.3.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib>=3.7.0->llamafactory==0.8.4.dev0) (1.4.5)\n", + "Requirement already satisfied: pyparsing>=2.3.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib>=3.7.0->llamafactory==0.8.4.dev0) (3.1.2)\n", + "Requirement already satisfied: python-dateutil>=2.7 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from matplotlib>=3.7.0->llamafactory==0.8.4.dev0) (2.9.0)\n", + "Requirement already satisfied: pytz>=2020.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from pandas>=2.0.0->llamafactory==0.8.4.dev0) (2024.1)\n", + "Requirement already satisfied: tzdata>=2022.7 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from pandas>=2.0.0->llamafactory==0.8.4.dev0) (2024.1)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from pydantic->llamafactory==0.8.4.dev0) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.20.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from pydantic->llamafactory==0.8.4.dev0) (2.20.1)\n", + "Requirement already satisfied: sympy in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from torch>=1.13.1->llamafactory==0.8.4.dev0) (1.12)\n", + "Requirement already satisfied: networkx in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from torch>=1.13.1->llamafactory==0.8.4.dev0) (3.3)\n", + "Requirement already satisfied: regex!=2019.12.17 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from transformers>=4.41.2->llamafactory==0.8.4.dev0) (2024.5.15)\n", + "Requirement already satisfied: tokenizers<0.20,>=0.19 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from transformers>=4.41.2->llamafactory==0.8.4.dev0) (0.19.1)\n", + "Collecting tyro>=0.5.11 (from trl>=0.8.6->llamafactory==0.8.4.dev0)\n", + " Downloading tyro-0.8.5-py3-none-any.whl.metadata (8.2 kB)\n", + "Requirement already satisfied: click>=7.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from uvicorn->llamafactory==0.8.4.dev0) (8.1.7)\n", + "Requirement already satisfied: h11>=0.8 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from uvicorn->llamafactory==0.8.4.dev0) (0.14.0)\n", + "Collecting starlette<0.38.0,>=0.37.2 (from fastapi->llamafactory==0.8.4.dev0)\n", + " Downloading starlette-0.37.2-py3-none-any.whl.metadata (5.9 kB)\n", + "Collecting fastapi-cli>=0.0.2 (from fastapi->llamafactory==0.8.4.dev0)\n", + " Downloading fastapi_cli-0.0.4-py3-none-any.whl.metadata (7.0 kB)\n", + "Collecting email_validator>=2.0.0 (from fastapi->llamafactory==0.8.4.dev0)\n", + " Downloading email_validator-2.2.0-py3-none-any.whl.metadata (25 kB)\n", + "Requirement already satisfied: six in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from fire->llamafactory==0.8.4.dev0) (1.16.0)\n", + "Collecting termcolor (from fire->llamafactory==0.8.4.dev0)\n", + " Downloading termcolor-2.4.0-py3-none-any.whl.metadata (6.1 kB)\n", + "Requirement already satisfied: idna>=2.8 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from anyio<5.0,>=3.0->gradio>=4.0.0->llamafactory==0.8.4.dev0) (3.7)\n", + "Requirement already satisfied: sniffio>=1.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from anyio<5.0,>=3.0->gradio>=4.0.0->llamafactory==0.8.4.dev0) (1.3.1)\n", + "Requirement already satisfied: colorama in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from click>=7.0->uvicorn->llamafactory==0.8.4.dev0) (0.4.6)\n", + "Collecting dnspython>=2.0.0 (from email_validator>=2.0.0->fastapi->llamafactory==0.8.4.dev0)\n", + " Downloading dnspython-2.6.1-py3-none-any.whl.metadata (5.8 kB)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from aiohttp->datasets>=2.16.0->llamafactory==0.8.4.dev0) (1.3.1)\n", + "Requirement already satisfied: attrs>=17.3.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from aiohttp->datasets>=2.16.0->llamafactory==0.8.4.dev0) (23.2.0)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from aiohttp->datasets>=2.16.0->llamafactory==0.8.4.dev0) (1.4.1)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from aiohttp->datasets>=2.16.0->llamafactory==0.8.4.dev0) (6.0.5)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from aiohttp->datasets>=2.16.0->llamafactory==0.8.4.dev0) (1.9.4)\n", + "Requirement already satisfied: certifi in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from httpx>=0.24.1->gradio>=4.0.0->llamafactory==0.8.4.dev0) (2024.7.4)\n", + "Requirement already satisfied: httpcore==1.* in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from httpx>=0.24.1->gradio>=4.0.0->llamafactory==0.8.4.dev0) (1.0.5)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from requests>=2.32.2->datasets>=2.16.0->llamafactory==0.8.4.dev0) (3.3.2)\n", + "Collecting shellingham>=1.3.0 (from typer<1.0,>=0.12->gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading shellingham-1.5.4-py2.py3-none-any.whl.metadata (3.5 kB)\n", + "Collecting rich>=10.11.0 (from typer<1.0,>=0.12->gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading rich-13.7.1-py3-none-any.whl.metadata (18 kB)\n", + "Collecting docstring-parser>=0.16 (from tyro>=0.5.11->trl>=0.8.6->llamafactory==0.8.4.dev0)\n", + " Downloading docstring_parser-0.16-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting shtab>=1.5.6 (from tyro>=0.5.11->trl>=0.8.6->llamafactory==0.8.4.dev0)\n", + " Downloading shtab-1.7.1-py3-none-any.whl.metadata (7.3 kB)\n", + "Collecting httptools>=0.5.0 (from uvicorn[standard]>=0.12.0->fastapi->llamafactory==0.8.4.dev0)\n", + " Downloading httptools-0.6.1-cp311-cp311-win_amd64.whl.metadata (3.7 kB)\n", + "Requirement already satisfied: python-dotenv>=0.13 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from uvicorn[standard]>=0.12.0->fastapi->llamafactory==0.8.4.dev0) (1.0.1)\n", + "Collecting watchfiles>=0.13 (from uvicorn[standard]>=0.12.0->fastapi->llamafactory==0.8.4.dev0)\n", + " Downloading watchfiles-0.22.0-cp311-none-win_amd64.whl.metadata (5.0 kB)\n", + "Requirement already satisfied: mpmath>=0.19 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from sympy->torch>=1.13.1->llamafactory==0.8.4.dev0) (1.3.0)\n", + "Collecting markdown-it-py>=2.2.0 (from rich>=10.11.0->typer<1.0,>=0.12->gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading markdown_it_py-3.0.0-py3-none-any.whl.metadata (6.9 kB)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in c:\\users\\ht\\anaconda3\\envs\\comp\\lib\\site-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio>=4.0.0->llamafactory==0.8.4.dev0) (2.18.0)\n", + "Collecting mdurl~=0.1 (from markdown-it-py>=2.2.0->rich>=10.11.0->typer<1.0,>=0.12->gradio>=4.0.0->llamafactory==0.8.4.dev0)\n", + " Downloading mdurl-0.1.2-py3-none-any.whl.metadata (1.6 kB)\n", + "Downloading bitsandbytes-0.43.2-py3-none-win_amd64.whl (136.5 MB)\n", + " ---------------------------------------- 0.0/136.5 MB ? eta -:--:--\n", + " ---------------------------------------- 0.1/136.5 MB 3.2 MB/s eta 0:00:43\n", + " ---------------------------------------- 0.2/136.5 MB 2.4 MB/s eta 0:00:58\n", + " ---------------------------------------- 0.3/136.5 MB 2.2 MB/s eta 0:01:02\n", + " ---------------------------------------- 0.4/136.5 MB 2.1 MB/s eta 0:01:04\n", + " ---------------------------------------- 0.5/136.5 MB 2.2 MB/s eta 0:01:02\n", + " ---------------------------------------- 0.6/136.5 MB 2.1 MB/s eta 0:01:05\n", + " ---------------------------------------- 0.7/136.5 MB 2.2 MB/s eta 0:01:03\n", + " ---------------------------------------- 0.8/136.5 MB 2.1 MB/s eta 0:01:05\n", + " ---------------------------------------- 0.9/136.5 MB 2.1 MB/s eta 0:01:05\n", + " ---------------------------------------- 1.0/136.5 MB 2.3 MB/s eta 0:01:00\n", + " ---------------------------------------- 1.2/136.5 MB 2.3 MB/s eta 0:01:00\n", + " ---------------------------------------- 1.3/136.5 MB 2.4 MB/s eta 0:00:56\n", + " ---------------------------------------- 1.4/136.5 MB 2.4 MB/s eta 0:00:57\n", + " ---------------------------------------- 1.6/136.5 MB 2.4 MB/s eta 0:00:57\n", + " ---------------------------------------- 1.7/136.5 MB 2.4 MB/s eta 0:00:57\n", + " --------------------------------------- 1.8/136.5 MB 2.4 MB/s eta 0:00:57\n", + " --------------------------------------- 1.9/136.5 MB 2.4 MB/s eta 0:00:56\n", + " --------------------------------------- 2.0/136.5 MB 2.4 MB/s eta 0:00:55\n", + " --------------------------------------- 2.1/136.5 MB 2.4 MB/s eta 0:00:56\n", + " --------------------------------------- 2.3/136.5 MB 2.4 MB/s eta 0:00:56\n", + " --------------------------------------- 2.4/136.5 MB 2.4 MB/s eta 0:00:56\n", + " --------------------------------------- 2.5/136.5 MB 2.4 MB/s eta 0:00:55\n", + " --------------------------------------- 2.7/136.5 MB 2.5 MB/s eta 0:00:55\n", + " --------------------------------------- 2.9/136.5 MB 2.6 MB/s eta 0:00:53\n", + " --------------------------------------- 3.0/136.5 MB 2.6 MB/s eta 0:00:52\n", + " --------------------------------------- 3.1/136.5 MB 2.6 MB/s eta 0:00:53\n", + " --------------------------------------- 3.3/136.5 MB 2.6 MB/s eta 0:00:51\n", + " - -------------------------------------- 3.5/136.5 MB 2.7 MB/s eta 0:00:50\n", + " - -------------------------------------- 3.7/136.5 MB 2.7 MB/s eta 0:00:49\n", + " - -------------------------------------- 3.8/136.5 MB 2.7 MB/s eta 0:00:50\n", + " - -------------------------------------- 4.0/136.5 MB 2.7 MB/s eta 0:00:49\n", + " - -------------------------------------- 4.2/136.5 MB 2.8 MB/s eta 0:00:48\n", + " - -------------------------------------- 4.3/136.5 MB 2.8 MB/s eta 0:00:48\n", + " - -------------------------------------- 4.5/136.5 MB 2.8 MB/s eta 0:00:47\n", + " - -------------------------------------- 4.6/136.5 MB 2.8 MB/s eta 0:00:48\n", + " - -------------------------------------- 4.8/136.5 MB 2.8 MB/s eta 0:00:47\n", + " - -------------------------------------- 5.0/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 5.2/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 5.3/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 5.5/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 5.6/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 5.7/136.5 MB 2.9 MB/s eta 0:00:45\n", + " - -------------------------------------- 5.8/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 6.0/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 6.1/136.5 MB 2.9 MB/s eta 0:00:45\n", + " - -------------------------------------- 6.2/136.5 MB 2.9 MB/s eta 0:00:45\n", + " - -------------------------------------- 6.4/136.5 MB 2.9 MB/s eta 0:00:46\n", + " - -------------------------------------- 6.5/136.5 MB 2.9 MB/s eta 0:00:45\n", + " - -------------------------------------- 6.6/136.5 MB 2.9 MB/s eta 0:00:45\n", + " - -------------------------------------- 6.8/136.5 MB 2.9 MB/s eta 0:00:46\n", + " -- ------------------------------------- 6.9/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 7.1/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 7.2/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 7.4/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 7.5/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 7.6/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 7.8/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 7.9/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 8.1/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 8.3/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 8.3/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 8.3/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 8.5/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 8.7/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 8.8/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 9.0/136.5 MB 2.9 MB/s eta 0:00:45\n", + " -- ------------------------------------- 9.1/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 9.3/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 9.5/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 9.6/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 9.7/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 9.8/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 10.0/136.5 MB 2.9 MB/s eta 0:00:44\n", + " -- ------------------------------------- 10.0/136.5 MB 2.9 MB/s eta 0:00:44\n", + " --- ------------------------------------ 10.3/136.5 MB 2.9 MB/s eta 0:00:44\n", + " --- ------------------------------------ 10.4/136.5 MB 2.9 MB/s eta 0:00:44\n", + " --- ------------------------------------ 10.6/136.5 MB 3.0 MB/s eta 0:00:43\n", + " --- ------------------------------------ 10.9/136.5 MB 3.0 MB/s eta 0:00:42\n", + " --- ------------------------------------ 11.1/136.5 MB 3.1 MB/s eta 0:00:41\n", + " --- ------------------------------------ 11.3/136.5 MB 3.1 MB/s eta 0:00:41\n", + " --- ------------------------------------ 11.5/136.5 MB 3.1 MB/s eta 0:00:40\n", + " --- ------------------------------------ 11.7/136.5 MB 3.1 MB/s eta 0:00:40\n", + " --- ------------------------------------ 11.8/136.5 MB 3.2 MB/s eta 0:00:40\n", + " --- ------------------------------------ 12.0/136.5 MB 3.2 MB/s eta 0:00:40\n", + " --- ------------------------------------ 12.2/136.5 MB 3.2 MB/s eta 0:00:39\n", + " --- ------------------------------------ 12.4/136.5 MB 3.3 MB/s eta 0:00:39\n", + " --- ------------------------------------ 12.5/136.5 MB 3.3 MB/s eta 0:00:39\n", + " --- ------------------------------------ 12.7/136.5 MB 3.3 MB/s eta 0:00:38\n", + " --- ------------------------------------ 12.9/136.5 MB 3.3 MB/s eta 0:00:38\n", + " --- ------------------------------------ 13.1/136.5 MB 3.3 MB/s eta 0:00:38\n", + " --- ------------------------------------ 13.3/136.5 MB 3.3 MB/s eta 0:00:38\n", + " --- ------------------------------------ 13.5/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 13.8/136.5 MB 3.4 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 14.0/136.5 MB 3.4 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 14.2/136.5 MB 3.4 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 14.4/136.5 MB 3.4 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 14.5/136.5 MB 3.4 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 14.7/136.5 MB 3.4 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 14.8/136.5 MB 3.4 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 14.9/136.5 MB 3.4 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.0/136.5 MB 3.4 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.1/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.2/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.4/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.5/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.7/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 15.9/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 16.0/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 16.1/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 16.3/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 16.4/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ---- ----------------------------------- 16.6/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 16.7/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 16.9/136.5 MB 3.4 MB/s eta 0:00:36\n", + " ---- ----------------------------------- 17.0/136.5 MB 3.4 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 17.1/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 17.3/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 17.4/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 17.5/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 17.6/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 17.7/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 17.8/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 17.9/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 18.0/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 18.1/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 18.2/136.5 MB 3.2 MB/s eta 0:00:38\n", + " ----- ---------------------------------- 18.3/136.5 MB 3.2 MB/s eta 0:00:38\n", + " ----- ---------------------------------- 18.5/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 18.7/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 18.8/136.5 MB 3.3 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 18.9/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.0/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.2/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.3/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.4/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.5/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.7/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 19.9/136.5 MB 3.2 MB/s eta 0:00:37\n", + " ----- ---------------------------------- 20.1/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 20.3/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ----- ---------------------------------- 20.4/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 20.6/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 20.7/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 21.0/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 21.1/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 21.4/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 21.6/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 21.8/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.0/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.1/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.3/136.5 MB 3.3 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.4/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.6/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.7/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 22.8/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 23.0/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 23.1/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 23.3/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 23.6/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 23.7/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------ --------------------------------- 23.8/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 24.0/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 24.0/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 24.3/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 24.4/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 24.7/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 24.8/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 25.1/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 25.2/136.5 MB 3.2 MB/s eta 0:00:36\n", + " ------- -------------------------------- 25.3/136.5 MB 3.2 MB/s eta 0:00:35\n", + " ------- -------------------------------- 25.5/136.5 MB 3.2 MB/s eta 0:00:35\n", + " ------- -------------------------------- 25.6/136.5 MB 3.2 MB/s eta 0:00:35\n", + " ------- -------------------------------- 25.8/136.5 MB 3.2 MB/s eta 0:00:35\n", + " ------- -------------------------------- 25.9/136.5 MB 3.2 MB/s eta 0:00:35\n", + " ------- -------------------------------- 26.0/136.5 MB 3.2 MB/s eta 0:00:35\n", + " ------- -------------------------------- 26.1/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.1/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.3/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.3/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.5/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.6/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.7/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 26.9/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 27.0/136.5 MB 3.1 MB/s eta 0:00:36\n", + " ------- -------------------------------- 27.2/136.5 MB 3.1 MB/s eta 0:00:36\n", + " -------- ------------------------------- 27.3/136.5 MB 3.1 MB/s eta 0:00:36\n", + " -------- ------------------------------- 27.5/136.5 MB 3.1 MB/s eta 0:00:35\n", + " -------- ------------------------------- 27.7/136.5 MB 3.1 MB/s eta 0:00:35\n", + " -------- ------------------------------- 27.8/136.5 MB 3.1 MB/s eta 0:00:35\n", + " -------- ------------------------------- 27.8/136.5 MB 3.1 MB/s eta 0:00:35\n", + " -------- ------------------------------- 27.9/136.5 MB 3.1 MB/s eta 0:00:35\n", + " -------- ------------------------------- 28.1/136.5 MB 3.2 MB/s eta 0:00:35\n", + " -------- ------------------------------- 28.3/136.5 MB 3.1 MB/s eta 0:00:35\n", + " -------- ------------------------------- 28.4/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 28.6/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 28.8/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 28.9/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 29.1/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 29.2/136.5 MB 3.3 MB/s eta 0:00:33\n", + " -------- ------------------------------- 29.3/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 29.5/136.5 MB 3.2 MB/s eta 0:00:33\n", + " -------- ------------------------------- 29.7/136.5 MB 3.3 MB/s eta 0:00:33\n", + " -------- ------------------------------- 29.8/136.5 MB 3.3 MB/s eta 0:00:33\n", + " -------- ------------------------------- 29.9/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 30.1/136.5 MB 3.2 MB/s eta 0:00:33\n", + " -------- ------------------------------- 30.2/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 30.4/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 30.5/136.5 MB 3.2 MB/s eta 0:00:34\n", + " -------- ------------------------------- 30.6/136.5 MB 3.2 MB/s eta 0:00:34\n", + " --------- ------------------------------ 30.7/136.5 MB 3.2 MB/s eta 0:00:34\n", + " --------- ------------------------------ 30.9/136.5 MB 3.2 MB/s eta 0:00:34\n", + " --------- ------------------------------ 31.0/136.5 MB 3.2 MB/s eta 0:00:34\n", + " --------- ------------------------------ 31.1/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 31.3/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 31.4/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 31.6/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 31.7/136.5 MB 3.1 MB/s eta 0:00:35\n", + " --------- ------------------------------ 31.9/136.5 MB 3.1 MB/s eta 0:00:35\n", + " --------- ------------------------------ 32.1/136.5 MB 3.1 MB/s eta 0:00:35\n", + " --------- ------------------------------ 32.2/136.5 MB 3.1 MB/s eta 0:00:35\n", + " --------- ------------------------------ 32.3/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 32.5/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 32.6/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 32.8/136.5 MB 3.1 MB/s eta 0:00:34\n", + " --------- ------------------------------ 32.8/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 33.0/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 33.2/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 33.3/136.5 MB 3.0 MB/s eta 0:00:34\n", + " --------- ------------------------------ 33.4/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 33.5/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 33.7/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 33.8/136.5 MB 3.0 MB/s eta 0:00:35\n", + " --------- ------------------------------ 34.0/136.5 MB 3.0 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 34.1/136.5 MB 3.0 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 34.3/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 34.5/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 34.6/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 34.8/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 34.8/136.5 MB 3.0 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 34.9/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.1/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.2/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.3/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.5/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.6/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.8/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 35.9/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 36.0/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 36.2/136.5 MB 2.9 MB/s eta 0:00:35\n", + " ---------- ----------------------------- 36.3/136.5 MB 2.9 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 36.5/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 36.6/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 36.7/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 36.9/136.5 MB 3.0 MB/s eta 0:00:34\n", + " ---------- ----------------------------- 37.1/136.5 MB 3.0 MB/s eta 0:00:33\n", + " ---------- ----------------------------- 37.2/136.5 MB 3.0 MB/s eta 0:00:33\n", + " ---------- ----------------------------- 37.4/136.5 MB 3.0 MB/s eta 0:00:33\n", + " ----------- ---------------------------- 37.5/136.5 MB 3.0 MB/s eta 0:00:33\n", + " ----------- ---------------------------- 37.7/136.5 MB 3.0 MB/s eta 0:00:33\n", + " ----------- ---------------------------- 37.9/136.5 MB 3.0 MB/s eta 0:00:33\n", + " ----------- ---------------------------- 38.1/136.5 MB 3.1 MB/s eta 0:00:33\n", + " ----------- ---------------------------- 38.2/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 38.3/136.5 MB 3.1 MB/s eta 0:00:33\n", + " ----------- ---------------------------- 38.5/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 38.7/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 38.8/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 39.0/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 39.2/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 39.3/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 39.5/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 39.6/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 39.8/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.0/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.0/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.1/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.2/136.5 MB 3.0 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.4/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.6/136.5 MB 3.1 MB/s eta 0:00:32\n", + " ----------- ---------------------------- 40.8/136.5 MB 3.1 MB/s eta 0:00:31\n", + " ------------ --------------------------- 41.0/136.5 MB 3.1 MB/s eta 0:00:31\n", + " ------------ --------------------------- 41.2/136.5 MB 3.1 MB/s eta 0:00:31\n", + " ------------ --------------------------- 41.3/136.5 MB 3.1 MB/s eta 0:00:31\n", + " ------------ --------------------------- 41.5/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 41.7/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 41.9/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 42.1/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 42.2/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 42.4/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 42.5/136.5 MB 3.2 MB/s eta 0:00:30\n", + " ------------ --------------------------- 42.9/136.5 MB 3.3 MB/s eta 0:00:29\n", + " ------------ --------------------------- 43.1/136.5 MB 3.3 MB/s eta 0:00:29\n", + " ------------ --------------------------- 43.3/136.5 MB 3.3 MB/s eta 0:00:29\n", + " ------------ --------------------------- 43.4/136.5 MB 3.3 MB/s eta 0:00:29\n", + " ------------ --------------------------- 43.6/136.5 MB 3.4 MB/s eta 0:00:28\n", + " ------------ --------------------------- 43.8/136.5 MB 3.4 MB/s eta 0:00:28\n", + " ------------ --------------------------- 43.8/136.5 MB 3.4 MB/s eta 0:00:28\n", + " ------------ --------------------------- 44.1/136.5 MB 3.4 MB/s eta 0:00:28\n", + " ------------ --------------------------- 44.2/136.5 MB 3.4 MB/s eta 0:00:28\n", + " ------------ --------------------------- 44.3/136.5 MB 3.4 MB/s eta 0:00:28\n", + " ------------- -------------------------- 44.4/136.5 MB 3.3 MB/s eta 0:00:28\n", + " ------------- -------------------------- 44.5/136.5 MB 3.3 MB/s eta 0:00:28\n", + " ------------- -------------------------- 44.6/136.5 MB 3.3 MB/s eta 0:00:28\n", + " ------------- -------------------------- 44.7/136.5 MB 3.3 MB/s eta 0:00:29\n", + " ------------- -------------------------- 44.8/136.5 MB 3.3 MB/s eta 0:00:29\n", + " ------------- -------------------------- 44.9/136.5 MB 3.2 MB/s eta 0:00:29\n", + " ------------- -------------------------- 45.0/136.5 MB 3.2 MB/s eta 0:00:29\n", + " ------------- -------------------------- 45.1/136.5 MB 3.2 MB/s eta 0:00:29\n", + " ------------- -------------------------- 45.1/136.5 MB 3.2 MB/s eta 0:00:29\n", + " ------------- -------------------------- 45.2/136.5 MB 3.2 MB/s eta 0:00:29\n", + " ------------- -------------------------- 45.3/136.5 MB 3.2 MB/s eta 0:00:29\n", + " ------------- -------------------------- 45.4/136.5 MB 3.1 MB/s eta 0:00:30\n", + " ------------- -------------------------- 45.5/136.5 MB 3.1 MB/s eta 0:00:30\n", + " ------------- -------------------------- 45.5/136.5 MB 3.1 MB/s eta 0:00:30\n", + " ------------- -------------------------- 45.5/136.5 MB 3.1 MB/s eta 0:00:30\n", + " ------------- -------------------------- 45.6/136.5 MB 3.1 MB/s eta 0:00:30\n", + " ------------- -------------------------- 45.6/136.5 MB 3.0 MB/s eta 0:00:31\n", + " ------------- -------------------------- 45.7/136.5 MB 3.0 MB/s eta 0:00:31\n", + " ------------- -------------------------- 45.7/136.5 MB 3.0 MB/s eta 0:00:31\n", + " ------------- -------------------------- 45.8/136.5 MB 2.9 MB/s eta 0:00:31\n", + " ------------- -------------------------- 45.9/136.5 MB 2.9 MB/s eta 0:00:32\n", + " ------------- -------------------------- 45.9/136.5 MB 2.9 MB/s eta 0:00:32\n", + " ------------- -------------------------- 46.0/136.5 MB 2.9 MB/s eta 0:00:32\n", + " ------------- -------------------------- 46.0/136.5 MB 2.8 MB/s eta 0:00:32\n", + " ------------- -------------------------- 46.1/136.5 MB 2.8 MB/s eta 0:00:33\n", + " ------------- -------------------------- 46.2/136.5 MB 2.8 MB/s eta 0:00:33\n", + " ------------- -------------------------- 46.2/136.5 MB 2.8 MB/s eta 0:00:33\n", + " ------------- -------------------------- 46.2/136.5 MB 2.8 MB/s eta 0:00:33\n", + " ------------- -------------------------- 46.3/136.5 MB 2.7 MB/s eta 0:00:34\n", + " ------------- -------------------------- 46.3/136.5 MB 2.7 MB/s eta 0:00:34\n", + " ------------- -------------------------- 46.3/136.5 MB 2.7 MB/s eta 0:00:34\n", + " ------------- -------------------------- 46.4/136.5 MB 2.7 MB/s eta 0:00:35\n", + " ------------- -------------------------- 46.4/136.5 MB 2.6 MB/s eta 0:00:35\n", + " ------------- -------------------------- 46.5/136.5 MB 2.6 MB/s eta 0:00:35\n", + " ------------- -------------------------- 46.5/136.5 MB 2.6 MB/s eta 0:00:35\n", + " ------------- -------------------------- 46.6/136.5 MB 2.6 MB/s eta 0:00:36\n", + " ------------- -------------------------- 46.6/136.5 MB 2.5 MB/s eta 0:00:36\n", + " ------------- -------------------------- 46.7/136.5 MB 2.5 MB/s eta 0:00:36\n", + " ------------- -------------------------- 46.7/136.5 MB 2.5 MB/s eta 0:00:36\n", + " ------------- -------------------------- 46.7/136.5 MB 2.5 MB/s eta 0:00:36\n", + " ------------- -------------------------- 46.7/136.5 MB 2.5 MB/s eta 0:00:37\n", + " ------------- -------------------------- 46.8/136.5 MB 2.5 MB/s eta 0:00:37\n", + " ------------- -------------------------- 46.8/136.5 MB 2.4 MB/s eta 0:00:37\n", + " ------------- -------------------------- 46.8/136.5 MB 2.4 MB/s eta 0:00:38\n", + " ------------- -------------------------- 46.9/136.5 MB 2.4 MB/s eta 0:00:38\n", + " ------------- -------------------------- 46.9/136.5 MB 2.4 MB/s eta 0:00:38\n", + " ------------- -------------------------- 46.9/136.5 MB 2.3 MB/s eta 0:00:39\n", + " ------------- -------------------------- 47.0/136.5 MB 2.3 MB/s eta 0:00:39\n", + " ------------- -------------------------- 47.0/136.5 MB 2.3 MB/s eta 0:00:39\n", + " ------------- -------------------------- 47.1/136.5 MB 2.3 MB/s eta 0:00:40\n", + " ------------- -------------------------- 47.2/136.5 MB 2.3 MB/s eta 0:00:40\n", + " ------------- -------------------------- 47.3/136.5 MB 2.3 MB/s eta 0:00:40\n", + " ------------- -------------------------- 47.3/136.5 MB 2.2 MB/s eta 0:00:40\n", + " ------------- -------------------------- 47.5/136.5 MB 2.2 MB/s eta 0:00:40\n", + " ------------- -------------------------- 47.5/136.5 MB 2.2 MB/s eta 0:00:40\n", + " ------------- -------------------------- 47.7/136.5 MB 2.2 MB/s eta 0:00:41\n", + " ------------- -------------------------- 47.7/136.5 MB 2.2 MB/s eta 0:00:41\n", + " -------------- ------------------------- 47.8/136.5 MB 2.2 MB/s eta 0:00:41\n", + " -------------- ------------------------- 47.9/136.5 MB 2.2 MB/s eta 0:00:41\n", + " -------------- ------------------------- 47.9/136.5 MB 2.2 MB/s eta 0:00:41\n", + " -------------- ------------------------- 48.0/136.5 MB 2.2 MB/s eta 0:00:41\n", + " -------------- ------------------------- 48.1/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.2/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.3/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.4/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.5/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.6/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.6/136.5 MB 2.1 MB/s eta 0:00:42\n", + " -------------- ------------------------- 48.7/136.5 MB 2.1 MB/s eta 0:00:43\n", + " -------------- ------------------------- 48.8/136.5 MB 2.1 MB/s eta 0:00:43\n", + " -------------- ------------------------- 48.9/136.5 MB 2.1 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.0/136.5 MB 2.1 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.2/136.5 MB 2.1 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.2/136.5 MB 2.1 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.3/136.5 MB 2.0 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.4/136.5 MB 2.0 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.5/136.5 MB 2.0 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.6/136.5 MB 2.0 MB/s eta 0:00:43\n", + " -------------- ------------------------- 49.7/136.5 MB 2.0 MB/s eta 0:00:44\n", + " -------------- ------------------------- 49.8/136.5 MB 2.0 MB/s eta 0:00:44\n", + " -------------- ------------------------- 49.9/136.5 MB 2.0 MB/s eta 0:00:44\n", + " -------------- ------------------------- 50.0/136.5 MB 2.0 MB/s eta 0:00:44\n", + " -------------- ------------------------- 50.0/136.5 MB 2.0 MB/s eta 0:00:44\n", + " -------------- ------------------------- 50.0/136.5 MB 2.0 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.1/136.5 MB 2.0 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.1/136.5 MB 1.9 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.2/136.5 MB 1.9 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.3/136.5 MB 1.9 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.3/136.5 MB 1.9 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.4/136.5 MB 1.9 MB/s eta 0:00:45\n", + " -------------- ------------------------- 50.5/136.5 MB 1.9 MB/s eta 0:00:46\n", + " -------------- ------------------------- 50.6/136.5 MB 1.9 MB/s eta 0:00:46\n", + " -------------- ------------------------- 50.6/136.5 MB 1.9 MB/s eta 0:00:46\n", + " -------------- ------------------------- 50.7/136.5 MB 1.9 MB/s eta 0:00:46\n", + " -------------- ------------------------- 50.8/136.5 MB 1.9 MB/s eta 0:00:46\n", + " -------------- ------------------------- 50.8/136.5 MB 1.9 MB/s eta 0:00:47\n", + " -------------- ------------------------- 50.9/136.5 MB 1.8 MB/s eta 0:00:47\n", + " -------------- ------------------------- 50.9/136.5 MB 1.8 MB/s eta 0:00:47\n", + " -------------- ------------------------- 50.9/136.5 MB 1.8 MB/s eta 0:00:47\n", + " -------------- ------------------------- 51.0/136.5 MB 1.8 MB/s eta 0:00:48\n", + " -------------- ------------------------- 51.1/136.5 MB 1.8 MB/s eta 0:00:48\n", + " -------------- ------------------------- 51.1/136.5 MB 1.8 MB/s eta 0:00:48\n", + " --------------- ------------------------ 51.2/136.5 MB 1.8 MB/s eta 0:00:48\n", + " --------------- ------------------------ 51.3/136.5 MB 1.8 MB/s eta 0:00:48\n", + " --------------- ------------------------ 51.4/136.5 MB 1.8 MB/s eta 0:00:49\n", + " --------------- ------------------------ 51.5/136.5 MB 1.8 MB/s eta 0:00:49\n", + " --------------- ------------------------ 51.7/136.5 MB 1.8 MB/s eta 0:00:49\n", + " --------------- ------------------------ 51.8/136.5 MB 1.8 MB/s eta 0:00:49\n", + " --------------- ------------------------ 51.9/136.5 MB 1.8 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.0/136.5 MB 1.8 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.2/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.3/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.4/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.6/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.7/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.8/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 52.9/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 53.0/136.5 MB 1.7 MB/s eta 0:00:49\n", + " --------------- ------------------------ 53.1/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.2/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.3/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.4/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.6/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.7/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.7/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 53.9/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 54.0/136.5 MB 1.6 MB/s eta 0:00:51\n", + " --------------- ------------------------ 54.1/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 54.3/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 54.5/136.5 MB 1.7 MB/s eta 0:00:50\n", + " --------------- ------------------------ 54.6/136.5 MB 1.7 MB/s eta 0:00:50\n", + " ---------------- ----------------------- 54.8/136.5 MB 1.7 MB/s eta 0:00:49\n", + " ---------------- ----------------------- 54.9/136.5 MB 1.7 MB/s eta 0:00:49\n", + " ---------------- ----------------------- 55.0/136.5 MB 1.7 MB/s eta 0:00:49\n", + " ---------------- ----------------------- 55.1/136.5 MB 1.7 MB/s eta 0:00:49\n", + " ---------------- ----------------------- 55.3/136.5 MB 1.7 MB/s eta 0:00:49\n", + " ---------------- ----------------------- 55.4/136.5 MB 1.7 MB/s eta 0:00:48\n", + " ---------------- ----------------------- 55.5/136.5 MB 1.7 MB/s eta 0:00:48\n", + " ---------------- ----------------------- 55.7/136.5 MB 1.7 MB/s eta 0:00:47\n", + " ---------------- ----------------------- 55.8/136.5 MB 1.7 MB/s eta 0:00:47\n", + " ---------------- ----------------------- 55.8/136.5 MB 1.7 MB/s eta 0:00:47\n", + " ---------------- ----------------------- 56.0/136.5 MB 1.8 MB/s eta 0:00:46\n", + " ---------------- ----------------------- 56.0/136.5 MB 1.8 MB/s eta 0:00:46\n", + " ---------------- ----------------------- 56.1/136.5 MB 1.8 MB/s eta 0:00:46\n", + " ---------------- ----------------------- 56.2/136.5 MB 1.8 MB/s eta 0:00:46\n", + " ---------------- ----------------------- 56.2/136.5 MB 1.8 MB/s eta 0:00:45\n", + " ---------------- ----------------------- 56.3/136.5 MB 1.8 MB/s eta 0:00:45\n", + " ---------------- ----------------------- 56.4/136.5 MB 1.8 MB/s eta 0:00:45\n", + " ---------------- ----------------------- 56.5/136.5 MB 1.8 MB/s eta 0:00:45\n", + " ---------------- ----------------------- 56.6/136.5 MB 1.9 MB/s eta 0:00:43\n", + " ---------------- ----------------------- 56.8/136.5 MB 1.9 MB/s eta 0:00:43\n", + " ---------------- ----------------------- 56.9/136.5 MB 1.9 MB/s eta 0:00:41\n", + " ---------------- ----------------------- 57.0/136.5 MB 2.0 MB/s eta 0:00:41\n", + " ---------------- ----------------------- 57.2/136.5 MB 2.1 MB/s eta 0:00:39\n", + " ---------------- ----------------------- 57.2/136.5 MB 2.1 MB/s eta 0:00:39\n", + " ---------------- ----------------------- 57.3/136.5 MB 2.1 MB/s eta 0:00:39\n", + " ---------------- ----------------------- 57.4/136.5 MB 2.1 MB/s eta 0:00:39\n", + " ---------------- ----------------------- 57.5/136.5 MB 2.1 MB/s eta 0:00:39\n", + " ---------------- ----------------------- 57.7/136.5 MB 2.1 MB/s eta 0:00:39\n", + " ---------------- ----------------------- 57.8/136.5 MB 2.1 MB/s eta 0:00:38\n", + " ---------------- ----------------------- 57.9/136.5 MB 2.1 MB/s eta 0:00:38\n", + " ----------------- ---------------------- 58.1/136.5 MB 2.1 MB/s eta 0:00:38\n", + " ----------------- ---------------------- 58.2/136.5 MB 2.1 MB/s eta 0:00:37\n", + " ----------------- ---------------------- 58.4/136.5 MB 2.1 MB/s eta 0:00:37\n", + " ----------------- ---------------------- 58.6/136.5 MB 2.2 MB/s eta 0:00:37\n", + " ----------------- ---------------------- 58.7/136.5 MB 2.2 MB/s eta 0:00:36\n", + " ----------------- ---------------------- 58.9/136.5 MB 2.2 MB/s eta 0:00:36\n", + " ----------------- ---------------------- 59.1/136.5 MB 2.2 MB/s eta 0:00:35\n", + " ----------------- ---------------------- 59.3/136.5 MB 2.2 MB/s eta 0:00:35\n", + " ----------------- ---------------------- 59.5/136.5 MB 2.3 MB/s eta 0:00:34\n", + " ----------------- ---------------------- 59.7/136.5 MB 2.3 MB/s eta 0:00:34\n", + " ----------------- ---------------------- 59.8/136.5 MB 2.3 MB/s eta 0:00:34\n", + " ----------------- ---------------------- 59.9/136.5 MB 2.3 MB/s eta 0:00:34\n", + " ----------------- ---------------------- 60.1/136.5 MB 2.3 MB/s eta 0:00:33\n", + " ----------------- ---------------------- 60.2/136.5 MB 2.4 MB/s eta 0:00:33\n", + " ----------------- ---------------------- 60.3/136.5 MB 2.4 MB/s eta 0:00:32\n", + " ----------------- ---------------------- 60.4/136.5 MB 2.4 MB/s eta 0:00:32\n", + " ----------------- ---------------------- 60.6/136.5 MB 2.5 MB/s eta 0:00:31\n", + " ----------------- ---------------------- 60.7/136.5 MB 2.5 MB/s eta 0:00:31\n", + " ----------------- ---------------------- 60.8/136.5 MB 2.5 MB/s eta 0:00:31\n", + " ----------------- ---------------------- 61.0/136.5 MB 2.5 MB/s eta 0:00:30\n", + " ----------------- ---------------------- 61.1/136.5 MB 2.6 MB/s eta 0:00:30\n", + " ----------------- ---------------------- 61.2/136.5 MB 2.6 MB/s eta 0:00:29\n", + " ----------------- ---------------------- 61.3/136.5 MB 2.6 MB/s eta 0:00:29\n", + " ------------------ --------------------- 61.4/136.5 MB 2.6 MB/s eta 0:00:29\n", + " ------------------ --------------------- 61.6/136.5 MB 2.7 MB/s eta 0:00:29\n", + " ------------------ --------------------- 61.8/136.5 MB 2.7 MB/s eta 0:00:28\n", + " ------------------ --------------------- 62.0/136.5 MB 2.7 MB/s eta 0:00:28\n", + " ------------------ --------------------- 62.1/136.5 MB 2.7 MB/s eta 0:00:28\n", + " ------------------ --------------------- 62.2/136.5 MB 2.7 MB/s eta 0:00:28\n", + " ------------------ --------------------- 62.4/136.5 MB 2.7 MB/s eta 0:00:28\n", + " ------------------ --------------------- 62.6/136.5 MB 2.7 MB/s eta 0:00:28\n", + " ------------------ --------------------- 62.8/136.5 MB 2.7 MB/s eta 0:00:27\n", + " ------------------ --------------------- 62.9/136.5 MB 2.8 MB/s eta 0:00:27\n", + " ------------------ --------------------- 63.1/136.5 MB 2.8 MB/s eta 0:00:27\n", + " ------------------ --------------------- 63.3/136.5 MB 2.8 MB/s eta 0:00:26\n", + " ------------------ --------------------- 63.4/136.5 MB 2.8 MB/s eta 0:00:26\n", + " ------------------ --------------------- 63.6/136.5 MB 2.9 MB/s eta 0:00:26\n", + " ------------------ --------------------- 63.8/136.5 MB 2.9 MB/s eta 0:00:26\n", + " ------------------ --------------------- 64.0/136.5 MB 2.9 MB/s eta 0:00:25\n", + " ------------------ --------------------- 64.2/136.5 MB 2.9 MB/s eta 0:00:25\n", + " ------------------ --------------------- 64.4/136.5 MB 3.0 MB/s eta 0:00:25\n", + " ------------------ --------------------- 64.5/136.5 MB 2.9 MB/s eta 0:00:25\n", + " ------------------ --------------------- 64.6/136.5 MB 2.9 MB/s eta 0:00:25\n", + " ------------------- -------------------- 64.8/136.5 MB 2.9 MB/s eta 0:00:25\n", + " ------------------- -------------------- 65.0/136.5 MB 3.0 MB/s eta 0:00:25\n", + " ------------------- -------------------- 65.2/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 65.4/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 65.6/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 65.7/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 65.8/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 65.8/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 65.9/136.5 MB 2.9 MB/s eta 0:00:24\n", + " ------------------- -------------------- 66.0/136.5 MB 2.9 MB/s eta 0:00:25\n", + " ------------------- -------------------- 66.2/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 66.3/136.5 MB 3.0 MB/s eta 0:00:24\n", + " ------------------- -------------------- 66.5/136.5 MB 3.0 MB/s eta 0:00:23\n", + " ------------------- -------------------- 66.7/136.5 MB 3.1 MB/s eta 0:00:23\n", + " ------------------- -------------------- 66.9/136.5 MB 3.2 MB/s eta 0:00:23\n", + " ------------------- -------------------- 67.0/136.5 MB 3.2 MB/s eta 0:00:22\n", + " ------------------- -------------------- 67.2/136.5 MB 3.2 MB/s eta 0:00:22\n", + " ------------------- -------------------- 67.4/136.5 MB 3.2 MB/s eta 0:00:22\n", + " ------------------- -------------------- 67.7/136.5 MB 3.3 MB/s eta 0:00:21\n", + " ------------------- -------------------- 67.9/136.5 MB 3.4 MB/s eta 0:00:21\n", + " ------------------- -------------------- 68.1/136.5 MB 3.4 MB/s eta 0:00:21\n", + " -------------------- ------------------- 68.3/136.5 MB 3.4 MB/s eta 0:00:21\n", + " -------------------- ------------------- 68.4/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 68.6/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 68.8/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 68.9/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 68.9/136.5 MB 3.3 MB/s eta 0:00:21\n", + " -------------------- ------------------- 69.1/136.5 MB 3.4 MB/s eta 0:00:21\n", + " -------------------- ------------------- 69.3/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 69.5/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 69.7/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 69.8/136.5 MB 3.3 MB/s eta 0:00:21\n", + " -------------------- ------------------- 70.0/136.5 MB 3.3 MB/s eta 0:00:20\n", + " -------------------- ------------------- 70.2/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 70.3/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 70.5/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 70.7/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 70.9/136.5 MB 3.4 MB/s eta 0:00:20\n", + " -------------------- ------------------- 71.1/136.5 MB 3.5 MB/s eta 0:00:19\n", + " -------------------- ------------------- 71.2/136.5 MB 3.5 MB/s eta 0:00:19\n", + " -------------------- ------------------- 71.4/136.5 MB 3.5 MB/s eta 0:00:19\n", + " -------------------- ------------------- 71.6/136.5 MB 3.6 MB/s eta 0:00:19\n", + " --------------------- ------------------ 71.8/136.5 MB 3.6 MB/s eta 0:00:19\n", + " --------------------- ------------------ 72.1/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 72.2/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 72.4/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 72.5/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 72.6/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 72.9/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 73.1/136.5 MB 3.6 MB/s eta 0:00:18\n", + " --------------------- ------------------ 73.4/136.5 MB 3.7 MB/s eta 0:00:18\n", + " --------------------- ------------------ 73.6/136.5 MB 3.7 MB/s eta 0:00:18\n", + " --------------------- ------------------ 73.9/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 74.0/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 74.2/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 74.4/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 74.5/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 74.6/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 74.8/136.5 MB 3.7 MB/s eta 0:00:17\n", + " --------------------- ------------------ 75.0/136.5 MB 3.7 MB/s eta 0:00:17\n", + " ---------------------- ----------------- 75.2/136.5 MB 3.7 MB/s eta 0:00:17\n", + " ---------------------- ----------------- 75.3/136.5 MB 3.7 MB/s eta 0:00:17\n", + " ---------------------- ----------------- 75.6/136.5 MB 3.7 MB/s eta 0:00:17\n", + " ---------------------- ----------------- 75.8/136.5 MB 3.7 MB/s eta 0:00:17\n", + " ---------------------- ----------------- 76.0/136.5 MB 3.7 MB/s eta 0:00:17\n", + " ---------------------- ----------------- 76.3/136.5 MB 3.9 MB/s eta 0:00:16\n", + " ---------------------- ----------------- 76.6/136.5 MB 4.0 MB/s eta 0:00:16\n", + " ---------------------- ----------------- 76.7/136.5 MB 4.0 MB/s eta 0:00:16\n", + " ---------------------- ----------------- 76.9/136.5 MB 4.0 MB/s eta 0:00:16\n", + " ---------------------- ----------------- 77.2/136.5 MB 4.0 MB/s eta 0:00:15\n", + " ---------------------- ----------------- 77.5/136.5 MB 4.1 MB/s eta 0:00:15\n", + " ---------------------- ----------------- 77.8/136.5 MB 4.1 MB/s eta 0:00:15\n", + " ---------------------- ----------------- 78.0/136.5 MB 4.1 MB/s eta 0:00:15\n", + " ---------------------- ----------------- 78.3/136.5 MB 4.1 MB/s eta 0:00:15\n", + " ----------------------- ---------------- 78.6/136.5 MB 4.2 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 78.7/136.5 MB 4.1 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 78.9/136.5 MB 4.1 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 79.1/136.5 MB 4.2 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 79.4/136.5 MB 4.3 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 79.5/136.5 MB 4.3 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 79.8/136.5 MB 4.3 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 80.0/136.5 MB 4.3 MB/s eta 0:00:14\n", + " ----------------------- ---------------- 80.2/136.5 MB 4.4 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 80.3/136.5 MB 4.4 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 80.6/136.5 MB 4.4 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 80.8/136.5 MB 4.4 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 81.0/136.5 MB 4.4 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 81.2/136.5 MB 4.4 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 81.5/136.5 MB 4.5 MB/s eta 0:00:13\n", + " ----------------------- ---------------- 81.8/136.5 MB 4.5 MB/s eta 0:00:13\n", + " ------------------------ --------------- 82.0/136.5 MB 4.5 MB/s eta 0:00:12\n", + " ------------------------ --------------- 82.3/136.5 MB 4.6 MB/s eta 0:00:12\n", + " ------------------------ --------------- 82.6/136.5 MB 4.6 MB/s eta 0:00:12\n", + " ------------------------ --------------- 82.9/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 83.1/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 83.4/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 83.5/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 83.8/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 84.0/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 84.2/136.5 MB 4.7 MB/s eta 0:00:12\n", + " ------------------------ --------------- 84.4/136.5 MB 4.8 MB/s eta 0:00:11\n", + " ------------------------ --------------- 84.7/136.5 MB 4.8 MB/s eta 0:00:11\n", + " ------------------------ --------------- 84.9/136.5 MB 4.9 MB/s eta 0:00:11\n", + " ------------------------ --------------- 85.1/136.5 MB 4.9 MB/s eta 0:00:11\n", + " ------------------------- -------------- 85.4/136.5 MB 5.0 MB/s eta 0:00:11\n", + " ------------------------- -------------- 85.6/136.5 MB 5.0 MB/s eta 0:00:11\n", + " ------------------------- -------------- 85.7/136.5 MB 5.0 MB/s eta 0:00:11\n", + " ------------------------- -------------- 85.9/136.5 MB 4.9 MB/s eta 0:00:11\n", + " ------------------------- -------------- 86.1/136.5 MB 4.8 MB/s eta 0:00:11\n", + " ------------------------- -------------- 86.3/136.5 MB 4.9 MB/s eta 0:00:11\n", + " ------------------------- -------------- 86.4/136.5 MB 4.9 MB/s eta 0:00:11\n", + " ------------------------- -------------- 86.5/136.5 MB 4.7 MB/s eta 0:00:11\n", + " ------------------------- -------------- 86.8/136.5 MB 4.7 MB/s eta 0:00:11\n", + " ------------------------- -------------- 87.1/136.5 MB 4.8 MB/s eta 0:00:11\n", + " ------------------------- -------------- 87.2/136.5 MB 4.8 MB/s eta 0:00:11\n", + " ------------------------- -------------- 87.3/136.5 MB 4.7 MB/s eta 0:00:11\n", + " ------------------------- -------------- 87.5/136.5 MB 4.6 MB/s eta 0:00:11\n", + " ------------------------- -------------- 87.6/136.5 MB 4.6 MB/s eta 0:00:11\n", + " ------------------------- -------------- 87.8/136.5 MB 4.6 MB/s eta 0:00:11\n", + " ------------------------- -------------- 88.1/136.5 MB 4.6 MB/s eta 0:00:11\n", + " ------------------------- -------------- 88.3/136.5 MB 4.6 MB/s eta 0:00:11\n", + " ------------------------- -------------- 88.6/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 88.8/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 88.9/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 89.1/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 89.4/136.5 MB 4.6 MB/s eta 0:00:11\n", + " -------------------------- ------------- 89.6/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 89.9/136.5 MB 4.6 MB/s eta 0:00:11\n", + " -------------------------- ------------- 90.1/136.5 MB 4.6 MB/s eta 0:00:11\n", + " -------------------------- ------------- 90.2/136.5 MB 4.6 MB/s eta 0:00:11\n", + " -------------------------- ------------- 90.4/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 90.6/136.5 MB 4.6 MB/s eta 0:00:11\n", + " -------------------------- ------------- 90.8/136.5 MB 4.5 MB/s eta 0:00:11\n", + " -------------------------- ------------- 91.1/136.5 MB 4.6 MB/s eta 0:00:10\n", + " -------------------------- ------------- 91.3/136.5 MB 4.5 MB/s eta 0:00:10\n", + " -------------------------- ------------- 91.5/136.5 MB 4.5 MB/s eta 0:00:10\n", + " -------------------------- ------------- 91.8/136.5 MB 4.6 MB/s eta 0:00:10\n", + " -------------------------- ------------- 92.1/136.5 MB 4.6 MB/s eta 0:00:10\n", + " --------------------------- ------------ 92.4/136.5 MB 4.6 MB/s eta 0:00:10\n", + " --------------------------- ------------ 92.5/136.5 MB 4.6 MB/s eta 0:00:10\n", + " --------------------------- ------------ 92.7/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 92.9/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 93.1/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 93.4/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 93.6/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 93.8/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 94.0/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 94.2/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 94.5/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 94.7/136.5 MB 4.5 MB/s eta 0:00:10\n", + " --------------------------- ------------ 94.9/136.5 MB 4.4 MB/s eta 0:00:10\n", + " --------------------------- ------------ 95.1/136.5 MB 4.4 MB/s eta 0:00:10\n", + " --------------------------- ------------ 95.3/136.5 MB 4.5 MB/s eta 0:00:10\n", + " ---------------------------- ----------- 95.6/136.5 MB 4.5 MB/s eta 0:00:10\n", + " ---------------------------- ----------- 95.9/136.5 MB 4.5 MB/s eta 0:00:10\n", + " ---------------------------- ----------- 96.1/136.5 MB 4.6 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 96.3/136.5 MB 4.5 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 96.6/136.5 MB 4.6 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 96.8/136.5 MB 4.7 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 97.1/136.5 MB 4.7 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 97.3/136.5 MB 4.6 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 97.5/136.5 MB 4.7 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 97.8/136.5 MB 4.8 MB/s eta 0:00:09\n", + " ---------------------------- ----------- 98.1/136.5 MB 4.8 MB/s eta 0:00:08\n", + " ---------------------------- ----------- 98.2/136.5 MB 4.9 MB/s eta 0:00:08\n", + " ---------------------------- ----------- 98.5/136.5 MB 4.9 MB/s eta 0:00:08\n", + " ---------------------------- ----------- 98.6/136.5 MB 4.8 MB/s eta 0:00:08\n", + " ---------------------------- ----------- 98.8/136.5 MB 4.7 MB/s eta 0:00:09\n", + " ----------------------------- ---------- 99.0/136.5 MB 4.8 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 99.2/136.5 MB 4.8 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 99.4/136.5 MB 4.7 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 99.5/136.5 MB 4.7 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 99.7/136.5 MB 4.7 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 100.0/136.5 MB 4.7 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 100.1/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 100.2/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 100.4/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 100.6/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 100.8/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 101.0/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 101.1/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 101.3/136.5 MB 4.6 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 101.5/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 101.8/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 102.0/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ----------------------------- ---------- 102.3/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ------------------------------ --------- 102.5/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ------------------------------ --------- 102.7/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ------------------------------ --------- 102.8/136.5 MB 4.5 MB/s eta 0:00:08\n", + " ------------------------------ --------- 102.9/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 103.0/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 103.2/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 103.5/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 103.7/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 103.9/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 104.0/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 104.1/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------ --------- 104.3/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------ --------- 104.5/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------ --------- 104.7/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------ --------- 105.1/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------ --------- 105.3/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 105.6/136.5 MB 4.4 MB/s eta 0:00:08\n", + " ------------------------------ --------- 105.7/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------- -------- 106.0/136.5 MB 4.3 MB/s eta 0:00:08\n", + " ------------------------------- -------- 106.3/136.5 MB 4.3 MB/s eta 0:00:07\n", + " ------------------------------- -------- 106.5/136.5 MB 4.3 MB/s eta 0:00:07\n", + " ------------------------------- -------- 106.8/136.5 MB 4.3 MB/s eta 0:00:07\n", + " ------------------------------- -------- 107.0/136.5 MB 4.3 MB/s eta 0:00:07\n", + " ------------------------------- -------- 107.0/136.5 MB 4.3 MB/s eta 0:00:07\n", + " ------------------------------- -------- 107.1/136.5 MB 4.3 MB/s eta 0:00:07\n", + " ------------------------------- -------- 107.2/136.5 MB 4.2 MB/s eta 0:00:08\n", + " ------------------------------- -------- 107.4/136.5 MB 4.1 MB/s eta 0:00:08\n", + " ------------------------------- -------- 107.6/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 107.8/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 108.1/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 108.2/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 108.4/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 108.6/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 108.7/136.5 MB 4.0 MB/s eta 0:00:07\n", + " ------------------------------- -------- 109.0/136.5 MB 4.1 MB/s eta 0:00:07\n", + " ------------------------------- -------- 109.1/136.5 MB 4.1 MB/s eta 0:00:07\n", + " -------------------------------- ------- 109.4/136.5 MB 4.1 MB/s eta 0:00:07\n", + " -------------------------------- ------- 109.6/136.5 MB 4.1 MB/s eta 0:00:07\n", + " -------------------------------- ------- 109.8/136.5 MB 4.1 MB/s eta 0:00:07\n", + " -------------------------------- ------- 110.0/136.5 MB 4.1 MB/s eta 0:00:07\n", + " -------------------------------- ------- 110.3/136.5 MB 4.2 MB/s eta 0:00:07\n", + " -------------------------------- ------- 110.4/136.5 MB 4.2 MB/s eta 0:00:07\n", + " -------------------------------- ------- 110.7/136.5 MB 4.2 MB/s eta 0:00:07\n", + " -------------------------------- ------- 110.9/136.5 MB 4.2 MB/s eta 0:00:07\n", + " -------------------------------- ------- 111.1/136.5 MB 4.3 MB/s eta 0:00:06\n", + " -------------------------------- ------- 111.4/136.5 MB 4.3 MB/s eta 0:00:06\n", + " -------------------------------- ------- 111.5/136.5 MB 4.3 MB/s eta 0:00:06\n", + " -------------------------------- ------- 111.7/136.5 MB 4.3 MB/s eta 0:00:06\n", + " -------------------------------- ------- 111.8/136.5 MB 4.2 MB/s eta 0:00:06\n", + " -------------------------------- ------- 112.0/136.5 MB 4.2 MB/s eta 0:00:06\n", + " -------------------------------- ------- 112.1/136.5 MB 4.2 MB/s eta 0:00:06\n", + " -------------------------------- ------- 112.3/136.5 MB 4.1 MB/s eta 0:00:06\n", + " -------------------------------- ------- 112.4/136.5 MB 4.1 MB/s eta 0:00:06\n", + " -------------------------------- ------- 112.5/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 112.7/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 112.9/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 113.1/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 113.2/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 113.4/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 113.5/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 113.7/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 113.9/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 114.2/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 114.4/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 114.6/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 114.8/136.5 MB 4.2 MB/s eta 0:00:06\n", + " --------------------------------- ------ 115.0/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 115.2/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 115.4/136.5 MB 4.1 MB/s eta 0:00:06\n", + " --------------------------------- ------ 115.5/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 115.7/136.5 MB 4.0 MB/s eta 0:00:06\n", + " --------------------------------- ------ 115.9/136.5 MB 4.0 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.0/136.5 MB 4.0 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.2/136.5 MB 4.0 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.2/136.5 MB 3.9 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.4/136.5 MB 3.9 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.6/136.5 MB 3.9 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.8/136.5 MB 3.8 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 116.8/136.5 MB 3.8 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 117.0/136.5 MB 3.8 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 117.1/136.5 MB 3.7 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 117.2/136.5 MB 3.7 MB/s eta 0:00:06\n", + " ---------------------------------- ----- 117.5/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 117.7/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 117.9/136.5 MB 3.9 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 118.1/136.5 MB 3.9 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 118.2/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 118.4/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 118.5/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 118.7/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 118.8/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 119.0/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ---------------------------------- ----- 119.2/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 119.4/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 119.6/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 119.8/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 120.0/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 120.3/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 120.5/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 120.7/136.5 MB 3.8 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 120.8/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.0/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.1/136.5 MB 3.7 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.2/136.5 MB 3.6 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.3/136.5 MB 3.6 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.5/136.5 MB 3.6 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.6/136.5 MB 3.6 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 121.8/136.5 MB 3.6 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 122.0/136.5 MB 3.6 MB/s eta 0:00:05\n", + " ----------------------------------- ---- 122.2/136.5 MB 3.6 MB/s eta 0:00:04\n", + " ----------------------------------- ---- 122.3/136.5 MB 3.6 MB/s eta 0:00:04\n", + " ----------------------------------- ---- 122.4/136.5 MB 3.6 MB/s eta 0:00:04\n", + " ----------------------------------- ---- 122.7/136.5 MB 3.6 MB/s eta 0:00:04\n", + " ----------------------------------- ---- 122.8/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 123.0/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 123.2/136.5 MB 3.6 MB/s eta 0:00:04\n", + " ------------------------------------ --- 123.5/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 123.7/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 123.8/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 124.1/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 124.3/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 124.5/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 124.7/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 125.0/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 125.1/136.5 MB 3.7 MB/s eta 0:00:04\n", + " ------------------------------------ --- 125.4/136.5 MB 3.7 MB/s eta 0:00:03\n", + " ------------------------------------ --- 125.5/136.5 MB 3.7 MB/s eta 0:00:03\n", + " ------------------------------------ --- 125.8/136.5 MB 3.8 MB/s eta 0:00:03\n", + " ------------------------------------ --- 126.0/136.5 MB 3.8 MB/s eta 0:00:03\n", + " ------------------------------------- -- 126.3/136.5 MB 3.8 MB/s eta 0:00:03\n", + " ------------------------------------- -- 126.4/136.5 MB 3.9 MB/s eta 0:00:03\n", + " ------------------------------------- -- 126.8/136.5 MB 4.0 MB/s eta 0:00:03\n", + " ------------------------------------- -- 127.0/136.5 MB 4.0 MB/s eta 0:00:03\n", + " ------------------------------------- -- 127.2/136.5 MB 4.0 MB/s eta 0:00:03\n", + " ------------------------------------- -- 127.3/136.5 MB 4.0 MB/s eta 0:00:03\n", + " ------------------------------------- -- 127.5/136.5 MB 4.0 MB/s eta 0:00:03\n", + " ------------------------------------- -- 127.8/136.5 MB 4.1 MB/s eta 0:00:03\n", + " ------------------------------------- -- 127.9/136.5 MB 4.1 MB/s eta 0:00:03\n", + " ------------------------------------- -- 128.0/136.5 MB 4.0 MB/s eta 0:00:03\n", + " ------------------------------------- -- 128.2/136.5 MB 3.9 MB/s eta 0:00:03\n", + " ------------------------------------- -- 128.5/136.5 MB 4.0 MB/s eta 0:00:02\n", + " ------------------------------------- -- 128.7/136.5 MB 4.1 MB/s eta 0:00:02\n", + " ------------------------------------- -- 129.0/136.5 MB 4.2 MB/s eta 0:00:02\n", + " ------------------------------------- -- 129.2/136.5 MB 4.2 MB/s eta 0:00:02\n", + " ------------------------------------- -- 129.6/136.5 MB 4.2 MB/s eta 0:00:02\n", + " -------------------------------------- - 129.9/136.5 MB 4.3 MB/s eta 0:00:02\n", + " -------------------------------------- - 130.1/136.5 MB 4.3 MB/s eta 0:00:02\n", + " -------------------------------------- - 130.3/136.5 MB 4.3 MB/s eta 0:00:02\n", + " -------------------------------------- - 130.4/136.5 MB 4.2 MB/s eta 0:00:02\n", + " -------------------------------------- - 130.7/136.5 MB 4.3 MB/s eta 0:00:02\n", + " -------------------------------------- - 130.8/136.5 MB 4.2 MB/s eta 0:00:02\n", + " -------------------------------------- - 131.1/136.5 MB 4.3 MB/s eta 0:00:02\n", + " -------------------------------------- - 131.3/136.5 MB 4.3 MB/s eta 0:00:02\n", + " -------------------------------------- - 131.5/136.5 MB 4.4 MB/s eta 0:00:02\n", + " -------------------------------------- - 131.7/136.5 MB 4.4 MB/s eta 0:00:02\n", + " -------------------------------------- - 131.8/136.5 MB 4.4 MB/s eta 0:00:02\n", + " -------------------------------------- - 132.0/136.5 MB 4.4 MB/s eta 0:00:02\n", + " -------------------------------------- - 132.1/136.5 MB 4.4 MB/s eta 0:00:02\n", + " -------------------------------------- - 132.4/136.5 MB 4.4 MB/s eta 0:00:01\n", + " -------------------------------------- - 132.6/136.5 MB 4.5 MB/s eta 0:00:01\n", + " -------------------------------------- - 133.0/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 133.3/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 133.6/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 133.8/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 134.0/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 134.2/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 134.4/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 134.7/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 134.8/136.5 MB 4.7 MB/s eta 0:00:01\n", + " --------------------------------------- 135.0/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 135.0/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 135.2/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 135.4/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 135.7/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 135.8/136.5 MB 4.6 MB/s eta 0:00:01\n", + " --------------------------------------- 135.9/136.5 MB 4.5 MB/s eta 0:00:01\n", + " --------------------------------------- 136.2/136.5 MB 4.5 MB/s eta 0:00:01\n", + " --------------------------------------- 136.4/136.5 MB 4.5 MB/s eta 0:00:01\n", + " --------------------------------------- 136.5/136.5 MB 4.5 MB/s eta 0:00:01\n", + " --------------------------------------- 136.5/136.5 MB 4.5 MB/s eta 0:00:01\n", + " --------------------------------------- 136.5/136.5 MB 4.5 MB/s eta 0:00:01\n", + " ---------------------------------------- 136.5/136.5 MB 4.2 MB/s eta 0:00:00\n", + "Downloading gradio-4.39.0-py3-none-any.whl (12.4 MB)\n", + " ---------------------------------------- 0.0/12.4 MB ? eta -:--:--\n", + " --------------------------------------- 0.2/12.4 MB 7.6 MB/s eta 0:00:02\n", + " - -------------------------------------- 0.5/12.4 MB 5.6 MB/s eta 0:00:03\n", + " -- ------------------------------------- 0.8/12.4 MB 6.4 MB/s eta 0:00:02\n", + " --- ------------------------------------ 1.0/12.4 MB 5.8 MB/s eta 0:00:02\n", + " --- ------------------------------------ 1.0/12.4 MB 5.8 MB/s eta 0:00:02\n", + " ---- ----------------------------------- 1.4/12.4 MB 5.1 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.6/12.4 MB 5.0 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.6/12.4 MB 4.5 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.7/12.4 MB 4.1 MB/s eta 0:00:03\n", + " ----- ---------------------------------- 1.8/12.4 MB 4.1 MB/s eta 0:00:03\n", + " ------ --------------------------------- 1.9/12.4 MB 3.7 MB/s eta 0:00:03\n", + " ------ --------------------------------- 2.1/12.4 MB 3.8 MB/s eta 0:00:03\n", + " ------- -------------------------------- 2.2/12.4 MB 3.7 MB/s eta 0:00:03\n", + " ------- -------------------------------- 2.5/12.4 MB 3.8 MB/s eta 0:00:03\n", + " -------- ------------------------------- 2.6/12.4 MB 3.7 MB/s eta 0:00:03\n", + " -------- ------------------------------- 2.7/12.4 MB 3.6 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.9/12.4 MB 3.6 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 3.1/12.4 MB 3.7 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 3.2/12.4 MB 3.6 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 3.3/12.4 MB 3.5 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.5/12.4 MB 3.5 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.6/12.4 MB 3.5 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 3.7/12.4 MB 3.5 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.8/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------ --------------------------- 3.9/12.4 MB 3.3 MB/s eta 0:00:03\n", + " ------------ --------------------------- 4.0/12.4 MB 3.3 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------- -------------------------- 4.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 5.3/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ----------------- ---------------------- 5.5/12.4 MB 3.3 MB/s eta 0:00:03\n", + " ------------------ --------------------- 5.7/12.4 MB 3.4 MB/s eta 0:00:03\n", + " ------------------ --------------------- 5.9/12.4 MB 3.4 MB/s eta 0:00:02\n", + " ------------------- -------------------- 6.1/12.4 MB 3.4 MB/s eta 0:00:02\n", + " -------------------- ------------------- 6.3/12.4 MB 3.4 MB/s eta 0:00:02\n", + " -------------------- ------------------- 6.5/12.4 MB 3.5 MB/s eta 0:00:02\n", + " --------------------- ------------------ 6.7/12.4 MB 3.5 MB/s eta 0:00:02\n", + " --------------------- ------------------ 6.8/12.4 MB 3.4 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 7.0/12.4 MB 3.5 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 7.3/12.4 MB 3.5 MB/s eta 0:00:02\n", + " ------------------------ --------------- 7.5/12.4 MB 3.6 MB/s eta 0:00:02\n", + " ------------------------- -------------- 7.8/12.4 MB 3.6 MB/s eta 0:00:02\n", + " ------------------------- -------------- 7.9/12.4 MB 3.6 MB/s eta 0:00:02\n", + " ------------------------- -------------- 8.1/12.4 MB 3.6 MB/s eta 0:00:02\n", + " -------------------------- ------------- 8.4/12.4 MB 3.6 MB/s eta 0:00:02\n", + " --------------------------- ------------ 8.5/12.4 MB 3.6 MB/s eta 0:00:02\n", + " --------------------------- ------------ 8.6/12.4 MB 3.6 MB/s eta 0:00:02\n", + " ---------------------------- ----------- 8.8/12.4 MB 3.6 MB/s eta 0:00:02\n", + " ----------------------------- ---------- 9.0/12.4 MB 3.6 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 9.1/12.4 MB 3.6 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 9.1/12.4 MB 3.6 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 9.1/12.4 MB 3.6 MB/s eta 0:00:01\n", + " ------------------------------ --------- 9.6/12.4 MB 3.6 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.8/12.4 MB 3.6 MB/s eta 0:00:01\n", + " ------------------------------- -------- 9.9/12.4 MB 3.6 MB/s eta 0:00:01\n", + " -------------------------------- ------- 10.1/12.4 MB 3.6 MB/s eta 0:00:01\n", + " -------------------------------- ------- 10.2/12.4 MB 3.5 MB/s eta 0:00:01\n", + " --------------------------------- ------ 10.5/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 10.6/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 10.7/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 10.9/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 11.2/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------------ --- 11.4/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------------ --- 11.5/12.4 MB 3.5 MB/s eta 0:00:01\n", + " ------------------------------------- -- 11.6/12.4 MB 3.4 MB/s eta 0:00:01\n", + " ------------------------------------- -- 11.8/12.4 MB 3.4 MB/s eta 0:00:01\n", + " -------------------------------------- - 11.8/12.4 MB 3.4 MB/s eta 0:00:01\n", + " -------------------------------------- - 11.9/12.4 MB 3.5 MB/s eta 0:00:01\n", + " -------------------------------------- - 12.0/12.4 MB 3.4 MB/s eta 0:00:01\n", + " -------------------------------------- - 12.0/12.4 MB 3.4 MB/s eta 0:00:01\n", + " -------------------------------------- - 12.1/12.4 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------------- 12.3/12.4 MB 3.4 MB/s eta 0:00:01\n", + " --------------------------------------- 12.4/12.4 MB 3.4 MB/s eta 0:00:01\n", + " ---------------------------------------- 12.4/12.4 MB 3.3 MB/s eta 0:00:00\n", + "Downloading gradio_client-1.1.1-py3-none-any.whl (318 kB)\n", + " ---------------------------------------- 0.0/318.2 kB ? eta -:--:--\n", + " ------------------ --------------------- 143.4/318.2 kB 4.2 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 225.3/318.2 kB 2.8 MB/s eta 0:00:01\n", + " ---------------------------------------- 318.2/318.2 kB 2.8 MB/s eta 0:00:00\n", + "Downloading tomlkit-0.12.0-py3-none-any.whl (37 kB)\n", + "Downloading trl-0.9.6-py3-none-any.whl (245 kB)\n", + " ---------------------------------------- 0.0/245.8 kB ? eta -:--:--\n", + " --------- ------------------------------ 61.4/245.8 kB 1.7 MB/s eta 0:00:01\n", + " --------------------------------- ------ 204.8/245.8 kB 2.5 MB/s eta 0:00:01\n", + " ---------------------------------------- 245.8/245.8 kB 2.5 MB/s eta 0:00:00\n", + "Downloading uvicorn-0.30.3-py3-none-any.whl (62 kB)\n", + " ---------------------------------------- 0.0/62.8 kB ? eta -:--:--\n", + " ---------------------------------------- 62.8/62.8 kB 1.7 MB/s eta 0:00:00\n", + "Downloading fastapi-0.111.1-py3-none-any.whl (92 kB)\n", + " ---------------------------------------- 0.0/92.2 kB ? eta -:--:--\n", + " ---------------------------------------- 92.2/92.2 kB 1.7 MB/s eta 0:00:00\n", + "Downloading sse_starlette-2.1.2-py3-none-any.whl (9.3 kB)\n", + "Downloading aiofiles-23.2.1-py3-none-any.whl (15 kB)\n", + "Downloading email_validator-2.2.0-py3-none-any.whl (33 kB)\n", + "Downloading fastapi_cli-0.0.4-py3-none-any.whl (9.5 kB)\n", + "Downloading importlib_resources-6.4.0-py3-none-any.whl (38 kB)\n", + "Downloading python_multipart-0.0.9-py3-none-any.whl (22 kB)\n", + "Downloading ruff-0.5.4-py3-none-win_amd64.whl (8.6 MB)\n", + " ---------------------------------------- 0.0/8.6 MB ? eta -:--:--\n", + " --------------------------------------- 0.1/8.6 MB 4.3 MB/s eta 0:00:02\n", + " - -------------------------------------- 0.2/8.6 MB 3.8 MB/s eta 0:00:03\n", + " - -------------------------------------- 0.3/8.6 MB 2.4 MB/s eta 0:00:04\n", + " - -------------------------------------- 0.4/8.6 MB 2.2 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.5/8.6 MB 2.2 MB/s eta 0:00:04\n", + " -- ------------------------------------- 0.6/8.6 MB 2.2 MB/s eta 0:00:04\n", + " --- ------------------------------------ 0.7/8.6 MB 2.2 MB/s eta 0:00:04\n", + " --- ------------------------------------ 0.8/8.6 MB 2.3 MB/s eta 0:00:04\n", + " ---- ----------------------------------- 0.9/8.6 MB 2.3 MB/s eta 0:00:04\n", + " ----- ---------------------------------- 1.1/8.6 MB 2.4 MB/s eta 0:00:04\n", + " ------ --------------------------------- 1.3/8.6 MB 2.6 MB/s eta 0:00:03\n", + " ------ --------------------------------- 1.5/8.6 MB 2.7 MB/s eta 0:00:03\n", + " ------- -------------------------------- 1.6/8.6 MB 2.7 MB/s eta 0:00:03\n", + " -------- ------------------------------- 1.9/8.6 MB 3.0 MB/s eta 0:00:03\n", + " --------- ------------------------------ 2.0/8.6 MB 3.0 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 2.2/8.6 MB 2.9 MB/s eta 0:00:03\n", + " ---------- ----------------------------- 2.3/8.6 MB 2.9 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 2.4/8.6 MB 2.9 MB/s eta 0:00:03\n", + " ----------- ---------------------------- 2.5/8.6 MB 2.9 MB/s eta 0:00:03\n", + " ------------ --------------------------- 2.7/8.6 MB 2.9 MB/s eta 0:00:03\n", + " ------------- -------------------------- 2.8/8.6 MB 2.9 MB/s eta 0:00:02\n", + " -------------- ------------------------- 3.1/8.6 MB 3.0 MB/s eta 0:00:02\n", + " --------------- ------------------------ 3.3/8.6 MB 3.1 MB/s eta 0:00:02\n", + " --------------- ------------------------ 3.4/8.6 MB 3.1 MB/s eta 0:00:02\n", + " ---------------- ----------------------- 3.5/8.6 MB 3.0 MB/s eta 0:00:02\n", + " ----------------- ---------------------- 3.7/8.6 MB 3.0 MB/s eta 0:00:02\n", + " ------------------ --------------------- 3.9/8.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------- -------------------- 4.1/8.6 MB 3.1 MB/s eta 0:00:02\n", + " ------------------- -------------------- 4.3/8.6 MB 3.1 MB/s eta 0:00:02\n", + " -------------------- ------------------- 4.5/8.6 MB 3.2 MB/s eta 0:00:02\n", + " --------------------- ------------------ 4.6/8.6 MB 3.2 MB/s eta 0:00:02\n", + " ---------------------- ----------------- 4.7/8.6 MB 3.2 MB/s eta 0:00:02\n", + " ----------------------- ---------------- 5.0/8.6 MB 3.2 MB/s eta 0:00:02\n", + " ------------------------ --------------- 5.2/8.6 MB 3.3 MB/s eta 0:00:02\n", + " ------------------------- -------------- 5.4/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------- -------------- 5.6/8.6 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------- ------------- 5.7/8.6 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------- ------------- 5.8/8.6 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------- ------------ 5.8/8.6 MB 3.2 MB/s eta 0:00:01\n", + " ---------------------------- ----------- 6.0/8.6 MB 3.2 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 6.2/8.6 MB 3.2 MB/s eta 0:00:01\n", + " ----------------------------- ---------- 6.4/8.6 MB 3.2 MB/s eta 0:00:01\n", + " ------------------------------ --------- 6.6/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------- -------- 6.7/8.6 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------- ------- 6.9/8.6 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------- ------- 7.0/8.6 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------- ------ 7.2/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 7.4/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------- ----- 7.5/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ----------------------------------- ---- 7.6/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------ --- 7.9/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ------------------------------------- -- 8.1/8.6 MB 3.3 MB/s eta 0:00:01\n", + " -------------------------------------- - 8.2/8.6 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------------- 8.4/8.6 MB 3.3 MB/s eta 0:00:01\n", + " --------------------------------------- 8.6/8.6 MB 3.3 MB/s eta 0:00:01\n", + " ---------------------------------------- 8.6/8.6 MB 3.3 MB/s eta 0:00:00\n", + "Downloading semantic_version-2.10.0-py2.py3-none-any.whl (15 kB)\n", + "Downloading starlette-0.37.2-py3-none-any.whl (71 kB)\n", + " ---------------------------------------- 0.0/71.9 kB ? eta -:--:--\n", + " ---------------------------------------- 71.9/71.9 kB 2.0 MB/s eta 0:00:00\n", + "Downloading typer-0.12.3-py3-none-any.whl (47 kB)\n", + " ---------------------------------------- 0.0/47.2 kB ? eta -:--:--\n", + " ---------------------------------------- 47.2/47.2 kB 2.5 MB/s eta 0:00:00\n", + "Downloading tyro-0.8.5-py3-none-any.whl (103 kB)\n", + " ---------------------------------------- 0.0/103.4 kB ? eta -:--:--\n", + " ---------------------------------------- 103.4/103.4 kB 2.9 MB/s eta 0:00:00\n", + "Downloading pydub-0.25.1-py2.py3-none-any.whl (32 kB)\n", + "Downloading termcolor-2.4.0-py3-none-any.whl (7.7 kB)\n", + "Downloading dnspython-2.6.1-py3-none-any.whl (307 kB)\n", + " ---------------------------------------- 0.0/307.7 kB ? eta -:--:--\n", + " --------------- ------------------------ 122.9/307.7 kB 3.6 MB/s eta 0:00:01\n", + " ---------------------------------------- 307.7/307.7 kB 3.8 MB/s eta 0:00:00\n", + "Downloading docstring_parser-0.16-py3-none-any.whl (36 kB)\n", + "Downloading httptools-0.6.1-cp311-cp311-win_amd64.whl (55 kB)\n", + " ---------------------------------------- 0.0/55.4 kB ? eta -:--:--\n", + " ---------------------------------------- 55.4/55.4 kB 2.8 MB/s eta 0:00:00\n", + "Downloading rich-13.7.1-py3-none-any.whl (240 kB)\n", + " ---------------------------------------- 0.0/240.7 kB ? eta -:--:--\n", + " -------------------------------- ------- 194.6/240.7 kB 3.9 MB/s eta 0:00:01\n", + " ---------------------------------------- 240.7/240.7 kB 3.7 MB/s eta 0:00:00\n", + "Downloading shellingham-1.5.4-py2.py3-none-any.whl (9.8 kB)\n", + "Downloading shtab-1.7.1-py3-none-any.whl (14 kB)\n", + "Downloading watchfiles-0.22.0-cp311-none-win_amd64.whl (281 kB)\n", + " ---------------------------------------- 0.0/282.0 kB ? eta -:--:--\n", + " ----------------------------- ---------- 204.8/282.0 kB 4.1 MB/s eta 0:00:01\n", + " ---------------------------------------- 282.0/282.0 kB 4.3 MB/s eta 0:00:00\n", + "Downloading websockets-11.0.3-cp311-cp311-win_amd64.whl (124 kB)\n", + " ---------------------------------------- 0.0/124.7 kB ? eta -:--:--\n", + " ---------------------------------------- 124.7/124.7 kB 7.2 MB/s eta 0:00:00\n", + "Downloading markdown_it_py-3.0.0-py3-none-any.whl (87 kB)\n", + " ---------------------------------------- 0.0/87.5 kB ? eta -:--:--\n", + " ---------------------------------------- 87.5/87.5 kB 4.8 MB/s eta 0:00:00\n", + "Downloading mdurl-0.1.2-py3-none-any.whl (10.0 kB)\n", + "Checking if build backend supports build_editable: started\n", + "Checking if build backend supports build_editable: finished with status 'done'\n", + "Building wheels for collected packages: llamafactory, fire, ffmpy\n", + " Building editable for llamafactory (pyproject.toml): started\n", + " Building editable for llamafactory (pyproject.toml): finished with status 'done'\n", + " Created wheel for llamafactory: filename=llamafactory-0.8.4.dev0-0.editable-py3-none-any.whl size=16545 sha256=ebab968c3cb2a7fcc5b0e88024156b723b728fb1bcffe6bcac56c2d449d6c8c0\n", + " Stored in directory: C:\\Users\\HT\\AppData\\Local\\Temp\\pip-ephem-wheel-cache-5kyehr6r\\wheels\\30\\01\\4b\\06ea8a63b21ee32531dffb8b1d2401bff7514c60c22c232066\n", + " Building wheel for fire (setup.py): started\n", + " Building wheel for fire (setup.py): finished with status 'done'\n", + " Created wheel for fire: filename=fire-0.6.0-py2.py3-none-any.whl size=117047 sha256=b438186432a78be481df47da5435f06f77c1bb00e56e1c9aad8523827d5300e3\n", + " Stored in directory: c:\\users\\ht\\appdata\\local\\pip\\cache\\wheels\\6a\\f3\\0c\\fa347dfa663f573462c6533d259c2c859e97e103d1ce21538f\n", + " Building wheel for ffmpy (setup.py): started\n", + " Building wheel for ffmpy (setup.py): finished with status 'done'\n", + " Created wheel for ffmpy: filename=ffmpy-0.3.2-py3-none-any.whl size=5607 sha256=811eb032bcc6bcf1a508c8913e7bfcba11e2ffeaa902a3f92594876e2a853371\n", + " Stored in directory: c:\\users\\ht\\appdata\\local\\pip\\cache\\wheels\\55\\3c\\f2\\f6e34046bac0d57c13c7d08123b85872423b89c8f59bafda51\n", + "Successfully built llamafactory fire ffmpy\n", + "Installing collected packages: pydub, ffmpy, websockets, tomlkit, termcolor, shtab, shellingham, semantic-version, ruff, python-multipart, mdurl, importlib-resources, httptools, docstring-parser, dnspython, aiofiles, watchfiles, uvicorn, starlette, markdown-it-py, fire, email_validator, sse-starlette, rich, gradio-client, bitsandbytes, tyro, typer, trl, fastapi-cli, fastapi, gradio, llamafactory\n", + "Successfully installed aiofiles-23.2.1 bitsandbytes-0.43.2 dnspython-2.6.1 docstring-parser-0.16 email_validator-2.2.0 fastapi-0.111.1 fastapi-cli-0.0.4 ffmpy-0.3.2 fire-0.6.0 gradio-4.39.0 gradio-client-1.1.1 httptools-0.6.1 importlib-resources-6.4.0 llamafactory-0.8.4.dev0 markdown-it-py-3.0.0 mdurl-0.1.2 pydub-0.25.1 python-multipart-0.0.9 rich-13.7.1 ruff-0.5.4 semantic-version-2.10.0 shellingham-1.5.4 shtab-1.7.1 sse-starlette-2.1.2 starlette-0.37.2 termcolor-2.4.0 tomlkit-0.12.0 trl-0.9.6 typer-0.12.3 tyro-0.8.5 uvicorn-0.30.3 watchfiles-0.22.0 websockets-11.0.3\n" ] } ], "source": [ "if need_to_setup_env:\n", - " %pip install -r requirements.txt" + " %pip install -r requirements.txt\n", + " !cd llama-factory && pip install -e .[torch,bitsandbytes]" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 3, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -148,7 +3041,7 @@ "True" ] }, - "execution_count": 5, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -166,7 +3059,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 4, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -360,7 +3253,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -373,7 +3266,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "JSON file saved to LLaMA-Factory/data/mgtv_train.json\n" + "JSON file saved to llama-factory/data/mgtv_train.json\n" ] } ], @@ -382,7 +3275,7 @@ "import json\n", "\n", "# Define the directory where you want to save the JSON file\n", - "output_dir = \"LLaMA-Factory/data/\"\n", + "output_dir = \"llama-factory/data/\"\n", "\n", "# Ensure the directory exists\n", "os.makedirs(output_dir, exist_ok=True)\n", @@ -399,23 +3292,23 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Sat Jul 20 22:46:57 2024 \n", + "Wed Jul 24 22:53:38 2024 \n", "+---------------------------------------------------------------------------------------+\n", - "| NVIDIA-SMI 535.98 Driver Version: 535.98 CUDA Version: 12.2 |\n", + "| NVIDIA-SMI 536.25 Driver Version: 536.25 CUDA Version: 12.2 |\n", "|-----------------------------------------+----------------------+----------------------+\n", "| GPU Name TCC/WDDM | Bus-Id Disp.A | Volatile Uncorr. ECC |\n", "| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\n", "| | | MIG M. |\n", "|=========================================+======================+======================|\n", "| 0 NVIDIA GeForce RTX 3070 ... WDDM | 00000000:01:00.0 On | N/A |\n", - "| N/A 41C P8 15W / 140W | 1383MiB / 8192MiB | 2% Default |\n", + "| N/A 40C P8 14W / 140W | 1695MiB / 8192MiB | 0% Default |\n", "| | | N/A |\n", "+-----------------------------------------+----------------------+----------------------+\n", " \n", @@ -424,21 +3317,20 @@ "| GPU GI CI PID Type Process name GPU Memory |\n", "| ID ID Usage |\n", "|=======================================================================================|\n", - "| 0 N/A N/A 908 C ...ta\\Local\\Programs\\Ollama\\ollama.exe N/A |\n", - "| 0 N/A N/A 3092 C+G ...n\\126.0.2592.102\\msedgewebview2.exe N/A |\n", - "| 0 N/A N/A 7888 C+G ...mpt_builder\\LogiAiPromptBuilder.exe N/A |\n", - "| 0 N/A N/A 10552 C+G ...tionsPlus\\logioptionsplus_agent.exe N/A |\n", - "| 0 N/A N/A 10772 C+G C:\\Windows\\explorer.exe N/A |\n", - "| 0 N/A N/A 12996 C+G ....Search_cw5n1h2txyewy\\SearchApp.exe N/A |\n", - "| 0 N/A N/A 15364 C+G ...CBS_cw5n1h2txyewy\\TextInputHost.exe N/A |\n", - "| 0 N/A N/A 15944 C+G ...oogle\\Chrome\\Application\\chrome.exe N/A |\n", - "| 0 N/A N/A 16984 C+G ...t.LockApp_cw5n1h2txyewy\\LockApp.exe N/A |\n", - "| 0 N/A N/A 19196 C+G ...5n1h2txyewy\\ShellExperienceHost.exe N/A |\n", - "| 0 N/A N/A 19352 C+G ...ft Office\\root\\Office16\\ONENOTE.EXE N/A |\n", - "| 0 N/A N/A 21732 C+G ...aming\\Telegram Desktop\\Telegram.exe N/A |\n", - "| 0 N/A N/A 22920 C+G ...Desktop\\app-3.4.2\\GitHubDesktop.exe N/A |\n", - "| 0 N/A N/A 27608 C+G ...Programs\\Microsoft VS Code\\Code.exe N/A |\n", - "| 0 N/A N/A 30412 C+G ...8.0_x64__cv1g1gvanyjgm\\WhatsApp.exe N/A |\n", + "| 0 N/A N/A 3304 C+G C:\\Windows\\explorer.exe N/A |\n", + "| 0 N/A N/A 4856 C+G ...CBS_cw5n1h2txyewy\\TextInputHost.exe N/A |\n", + "| 0 N/A N/A 5700 C+G ...aming\\Telegram Desktop\\Telegram.exe N/A |\n", + "| 0 N/A N/A 8500 C+G ...Programs\\Microsoft VS Code\\Code.exe N/A |\n", + "| 0 N/A N/A 8808 C+G ...mpt_builder\\LogiAiPromptBuilder.exe N/A |\n", + "| 0 N/A N/A 10000 C+G ...tionsPlus\\logioptionsplus_agent.exe N/A |\n", + "| 0 N/A N/A 12240 C+G ....Search_cw5n1h2txyewy\\SearchApp.exe N/A |\n", + "| 0 N/A N/A 16684 C ...ta\\Local\\Programs\\Ollama\\ollama.exe N/A |\n", + "| 0 N/A N/A 16820 C+G ...8.0_x64__cv1g1gvanyjgm\\WhatsApp.exe N/A |\n", + "| 0 N/A N/A 16840 C+G ...oogle\\Chrome\\Application\\chrome.exe N/A |\n", + "| 0 N/A N/A 17556 C+G ...5n1h2txyewy\\ShellExperienceHost.exe N/A |\n", + "| 0 N/A N/A 18544 C+G ...t.LockApp_cw5n1h2txyewy\\LockApp.exe N/A |\n", + "| 0 N/A N/A 24744 C+G ...n\\126.0.2592.113\\msedgewebview2.exe N/A |\n", + "| 0 N/A N/A 27380 C+G ...ft Office\\root\\Office16\\ONENOTE.EXE N/A |\n", "+---------------------------------------------------------------------------------------+\n" ] } @@ -449,7 +3341,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -458,20 +3350,20 @@ "text": [ "Git Bash Path: C:/Program Files/Git/bin/bash.exe\n", "Script Path: scripts/tune-lf.sh\n", - "Config Path: config/qwen2_7b_lora_sft_4bit.yaml\n", - "Config file not found at config/qwen2_7b_lora_sft_4bit.yaml\n" + "Config Path: config/qwen2_0.5b_lora_sft_4bit.yaml\n", + "Config file not found at config/qwen2_0.5b_lora_sft_4bit.yaml\n" ] }, { "ename": "CalledProcessError", - "evalue": "Command '['bash', 'scripts/tune-lf.sh', 'config/qwen2_7b_lora_sft_4bit.yaml']' returned non-zero exit status 127.", + "evalue": "Command '['bash', 'scripts/tune-lf.sh', 'config/qwen2_0.5b_lora_sft_4bit.yaml']' returned non-zero exit status 127.", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mCalledProcessError\u001b[0m Traceback (most recent call last)", "File \u001b[1;32m:23\u001b[0m\n", - "File \u001b[1;32mc:\\Users\\HT\\AppData\\Local\\Programs\\Python\\Python312\\Lib\\subprocess.py:571\u001b[0m, in \u001b[0;36mrun\u001b[1;34m(input, capture_output, timeout, check, *popenargs, **kwargs)\u001b[0m\n\u001b[0;32m 569\u001b[0m retcode \u001b[38;5;241m=\u001b[39m process\u001b[38;5;241m.\u001b[39mpoll()\n\u001b[0;32m 570\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m check \u001b[38;5;129;01mand\u001b[39;00m retcode:\n\u001b[1;32m--> 571\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CalledProcessError(retcode, process\u001b[38;5;241m.\u001b[39margs,\n\u001b[0;32m 572\u001b[0m output\u001b[38;5;241m=\u001b[39mstdout, stderr\u001b[38;5;241m=\u001b[39mstderr)\n\u001b[0;32m 573\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m CompletedProcess(process\u001b[38;5;241m.\u001b[39margs, retcode, stdout, stderr)\n", - "\u001b[1;31mCalledProcessError\u001b[0m: Command '['bash', 'scripts/tune-lf.sh', 'config/qwen2_7b_lora_sft_4bit.yaml']' returned non-zero exit status 127." + "File \u001b[1;32mc:\\Users\\HT\\anaconda3\\envs\\comp\\Lib\\subprocess.py:571\u001b[0m, in \u001b[0;36mrun\u001b[1;34m(input, capture_output, timeout, check, *popenargs, **kwargs)\u001b[0m\n\u001b[0;32m 569\u001b[0m retcode \u001b[38;5;241m=\u001b[39m process\u001b[38;5;241m.\u001b[39mpoll()\n\u001b[0;32m 570\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m check \u001b[38;5;129;01mand\u001b[39;00m retcode:\n\u001b[1;32m--> 571\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CalledProcessError(retcode, process\u001b[38;5;241m.\u001b[39margs,\n\u001b[0;32m 572\u001b[0m output\u001b[38;5;241m=\u001b[39mstdout, stderr\u001b[38;5;241m=\u001b[39mstderr)\n\u001b[0;32m 573\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m CompletedProcess(process\u001b[38;5;241m.\u001b[39margs, retcode, stdout, stderr)\n", + "\u001b[1;31mCalledProcessError\u001b[0m: Command '['bash', 'scripts/tune-lf.sh', 'config/qwen2_0.5b_lora_sft_4bit.yaml']' returned non-zero exit status 127." ] } ], @@ -483,7 +3375,7 @@ "git_bash_path = r\"C:/Program Files/Git/bin/bash.exe\"\n", "\n", "script_path = \"scripts/tune-lf.sh\"\n", - "config_path = \"config/qwen2_7b_lora_sft_4bit.yaml\"\n", + "config_path = \"config/qwen2_0.5b_lora_sft_4bit.yaml\"\n", "\n", "# Print paths for debugging\n", "print(f\"Git Bash Path: {git_bash_path}\")\n", @@ -504,7 +3396,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -513,50 +3405,8 @@ "text": [ "Git Bash Path: C:/Program Files/Git/bin/bash.exe\n", "Script Path: scripts/tune-lf.sh\n", - "Config Path: config/qwen2_7b_lora_sft_4bit.yaml\n", - "Config file not found at config/qwen2_7b_lora_sft_4bit.yaml\n", - "Command failed with error code: 127\n", - "Output: Current Directory:\n", - "/c/Users/HT/Documents/URP/logical-reasoning/llama-factory\n", - "config/qwen2_7b_lora_sft_4bit.yaml:\n", - " {\n", - " \"model_name_or_path\": \"Qwen/Qwen2-7B\",\n", - " \"stage\": \"sft\",\n", - " \"do_train\": true,\n", - " \"finetuning_type\": \"lora\",\n", - " \"lora_target\": \"all\",\n", - " \"quantization_bit\": 4,\n", - " \"loraplus_lr_ratio\": 16.0,\n", - " \"upcast_layernorm\": true,\n", - " \"dataset\": \"mgtv_train\",\n", - " \"template\": \"qwen\",\n", - " \"cutoff_len\": 4096,\n", - " \"max_samples\": 25000,\n", - " \"overwrite_cache\": true,\n", - " \"preprocessing_num_workers\": 16,\n", - " \"output_dir\": \"saves/qwen2_7b/lora/sft_4bit\",\n", - " \"logging_steps\": 562,\n", - " \"save_steps\": 562,\n", - " \"plot_loss\": true,\n", - " \"overwrite_output_dir\": true,\n", - " \"per_device_train_batch_size\": 32,\n", - " \"gradient_accumulation_steps\": 8,\n", - " \"learning_rate\": 0.0001,\n", - " \"num_train_epochs\": 3.0,\n", - " \"lr_scheduler_type\": \"cosine\",\n", - " \"warmup_ratio\": 0.1,\n", - " \"bf16\": true,\n", - " \"ddp_timeout\": 180000000,\n", - " \"val_size\": 0.1,\n", - " \"per_device_eval_batch_size\": 1,\n", - " \"eval_strategy\": \"steps\",\n", - " \"eval_steps\": 562,\n", - " \"report_to\": \"wandb\",\n", - " \"run_name\": \"qwen2_7b\"\n", - "}\n", - "\n", - "Error: scripts/tune-lf.sh: line 9: llamafactory-cli: command not found\n", - "\n" + "Config Path: config/qwen2_0.5b_lora_sft_4bit.yaml\n", + "Config file not found at config/qwen2_0.5b_lora_sft_4bit.yaml\n" ] } ], @@ -567,7 +3417,7 @@ "git_bash_path = r\"C:/Program Files/Git/bin/bash.exe\"\n", "\n", "script_path = \"scripts/tune-lf.sh\"\n", - "config_path = \"config/qwen2_7b_lora_sft_4bit.yaml\"\n", + "config_path = \"config/qwen2_0.5b_lora_sft_4bit.yaml\"\n", "\n", "# Print paths for debugging\n", "print(f\"Git Bash Path: {git_bash_path}\")\n", @@ -601,6069 +3451,60 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "zyxvE1nfX8cq", - "outputId": "1d3bddc5-289f-48b7-c2ce-5e9bd1684ea0" - }, + "execution_count": 4, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "/content/LLaMA-Factory\n" + "c:\\Users\\HT\\Documents\\URP\\logical-reasoning\\llama-factory\n" ] } ], "source": [ - "import json\n", - "%cd /content/LLaMA-Factory/\n", - "\n", - "args = dict(\n", - " model_name_or_path=\"Qwen/Qwen2-7B\", # use Qwen/Qwen2-7B-Instruct model\n", - "\n", - " stage=\"sft\", # do supervised fine-tuning\n", - " do_train=True,\n", - " finetuning_type=\"lora\", # use LoRA adapters to save memory\n", - " lora_target=\"all\", # attach LoRA adapters to all linear layers\n", - " quantization_bit=4,\n", - " loraplus_lr_ratio=16.0, # 16x base LoRA learning rate\n", - "\n", - " dataset=\"mgtv_train\",\n", - " template=\"qwen\",\n", - " cutoff_len=4096,\n", - " max_samples=5000,\n", - " overwrite_cache=\"true\",\n", - " preprocessing_num_workers=16,\n", - "\n", - " output_dir=\"/content/qwen2-7b\",\n", - " logging_steps=562,\n", - " save_steps=562,\n", - " plot_loss=\"true\",\n", - " overwrite_output_dir=\"true\",\n", - "\n", - " per_device_train_batch_size=1, # the batch size\n", - " gradient_accumulation_steps=8, # the gradient accumulation steps\n", - " learning_rate=0.001, # the learning rate\n", - " num_train_epochs=6.0, # the epochs of training\n", - " lr_scheduler_type=\"cosine\", # use cosine learning rate scheduler\n", - " warmup_ratio=0.1, # use warmup scheduler\n", - " bf16=True,\n", - " ddp_timeout=180000000, #5.71 years lol\n", - "\n", - " val_size=0.1,\n", - " per_device_eval_batch_size=1,\n", - " eval_strategy=\"steps\",\n", - " eval_steps=562,\n", - "\n", - " report_to=\"wandb\",\n", - ")\n", - "\n", - "with open(\"train_qwen2_7b.json\", \"w\", encoding=\"utf-8\") as f:\n", - " json.dump(args, f, indent=2)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "id": "QlYqm4TePib3" - }, - "outputs": [], - "source": [ - "with open(\"data/dataset_info.json\", 'r+') as file:\n", - " # First we load existing data into a dict.\n", - " file_data = json.load(file)\n", - " # Insert new_data at the beginning of the emp_details list.\n", - " qwen2_7b = {\"mgtv_train\": {\n", - " \"file_name\": \"mgtv_train.json\"\n", - " }\n", - " }\n", - "\n", - " qwen2_7b.update(file_data)\n", - " file.seek(0)\n", - " # convert back to json.\n", - " json.dump(qwen2_7b, file, indent=2)" + "%cd llama-factory" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "background_save": true, - "base_uri": "https://localhost:8080/" - }, - "id": "VEuCMjMpITg-", - "outputId": "76cf7882-3ae8-4c53-8d6c-c59b3557af0e" - }, + "execution_count": 5, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "2024-07-15 14:34:28.658348: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", - "2024-07-15 14:34:28.710574: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2024-07-15 14:34:28.710630: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2024-07-15 14:34:28.712064: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2024-07-15 14:34:28.719927: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2024-07-15 14:34:29.954969: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", - "07/15/2024 14:34:36 - WARNING - llamafactory.hparams.parser - We recommend enable `upcast_layernorm` in quantized training.\n", - "07/15/2024 14:34:36 - INFO - llamafactory.hparams.parser - Process rank: 0, device: cuda:0, n_gpu: 1, distributed training: False, compute dtype: torch.bfloat16\n", - "tokenizer_config.json: 100% 1.29k/1.29k [00:00<00:00, 9.74MB/s]\n", - "vocab.json: 100% 2.78M/2.78M [00:00<00:00, 10.4MB/s]\n", - "merges.txt: 100% 1.67M/1.67M [00:00<00:00, 6.67MB/s]\n", - "tokenizer.json: 100% 7.03M/7.03M [00:00<00:00, 18.8MB/s]\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-15 14:34:38,471 >> loading file vocab.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/vocab.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-15 14:34:38,471 >> loading file merges.txt from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/merges.txt\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-15 14:34:38,471 >> loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-15 14:34:38,471 >> loading file added_tokens.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-15 14:34:38,471 >> loading file special_tokens_map.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-15 14:34:38,471 >> loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer_config.json\n", - "[WARNING|logging.py:314] 2024-07-15 14:34:38,733 >> Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", - "07/15/2024 14:34:38 - INFO - llamafactory.data.template - Replace eos token: <|im_end|>\n", - "07/15/2024 14:34:38 - INFO - llamafactory.data.loader - Loading dataset mgtv_train.json...\n", - "Generating train split: 25000 examples [00:01, 18396.69 examples/s]\n", - "/usr/local/lib/python3.10/dist-packages/multiprocess/popen_fork.py:66: RuntimeWarning: os.fork() was called. os.fork() is incompatible with multithreaded code, and JAX is multithreaded, so this will likely lead to a deadlock.\n", - " self.pid = os.fork()\n", - "Converting format of dataset (num_proc=16): 100% 5000/5000 [00:00<00:00, 21217.02 examples/s]\n", - "Running tokenizer on dataset (num_proc=16): 100% 5000/5000 [00:02<00:00, 1705.27 examples/s]\n", - "training example:\n", - "input_ids:\n", - "[151644, 8948, 198, 2610, 525, 264, 10950, 17847, 13, 151645, 198, 151644, 872, 198, 56568, 101909, 104913, 99329, 9370, 106040, 1773, 99329, 104190, 104506, 5122, 16, 13, 26853, 224, 57218, 28946, 36993, 101051, 46944, 107969, 33872, 1773, 17, 13, 26853, 224, 57218, 28946, 105125, 107666, 36407, 45912, 105814, 3837, 104482, 117647, 107969, 33872, 1773, 18, 13, 69162, 34204, 103991, 86119, 3837, 106040, 44063, 100345, 107591, 102104, 87752, 105220, 109487, 100653, 5122, 20412, 5373, 99520, 5373, 16530, 99335, 5373, 102104, 88991, 5373, 56007, 24339, 32100, 1773, 19, 13, 49602, 252, 99590, 15946, 53153, 42855, 99885, 102158, 27369, 3837, 105827, 65770, 99475, 109487, 101047, 110281, 18600, 1773, 77557, 3837, 108620, 99360, 2073, 99520, 854, 65770, 99475, 12857, 2073, 16530, 55807, 20, 13, 26853, 224, 57218, 28946, 85106, 100345, 102104, 36407, 113272, 90395, 103941, 109363, 107969, 33872, 9370, 88991, 102349, 1773, 14880, 110439, 100001, 104190, 102104, 111842, 101080, 103936, 1773, 107969, 33872, 25, 4687, 107591, 25, 4687, 111842, 101080, 103936, 25, 5613, 107969, 33872, 25, 220, 100833, 53930, 107969, 5122, 102505, 9370, 115865, 73562, 109628, 45629, 105489, 3837, 104133, 111718, 106023, 5122, 101988, 115865, 110731, 9370, 105419, 3837, 115865, 99810, 69249, 59743, 104133, 104003, 115865, 36993, 16530, 101401, 68536, 99723, 3837, 115967, 104270, 102060, 110666, 112031, 1773, 14880, 109363, 115865, 110786, 101423, 104249, 1773, 107591, 25, 10236, 250, 253, 48921, 101221, 57218, 101961, 7948, 100894, 9370, 99288, 99818, 101063, 1773, 104269, 99288, 99818, 100774, 13343, 3837, 99798, 57218, 101961, 105664, 102373, 48921, 100271, 1773, 99650, 105616, 18493, 115865, 110731, 9370, 105419, 104388, 1773, 103968, 3837, 102606, 102115, 17340, 3837, 102373, 18493, 106340, 24562, 99774, 82224, 104424, 15946, 99372, 99244, 1773, 110597, 9370, 99288, 99818, 100012, 101416, 63109, 99242, 9370, 102373, 3837, 101988, 101938, 44063, 104003, 115865, 101329, 99314, 3837, 107974, 102373, 9370, 104575, 24562, 3837, 105699, 116418, 100005, 103000, 90663, 1773, 100147, 101070, 105443, 34187, 100097, 3837, 104989, 100833, 69249, 46944, 105190, 9370, 106023, 1773, 111842, 101080, 103936, 25, 4891, 223, 115, 100623, 21317, 99315, 101037, 151645, 198, 151644, 77091, 198, 99520, 151645]\n", - "inputs:\n", - "<|im_start|>system\n", - "You are a helpful assistant.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:1. 参与者会得到一个谜题。2. 参与者可以通过提问来获取线索,尝试解开谜题。3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。请严格按照这些规则回答参与者提出的问题。谜题: {}实际情况: {}参与者提出的问题: {}\n", - "谜题: 乡村之谜:消失的南瓜 在甄家村里,有一个古老的传说:每年南瓜丰收的季节,南瓜田里总有一个最大的南瓜会不翼而飞,村民们对此现象困惑不解。请找出南瓜失踪背后的原因。实际情况: 真相原来与一位年迈的农夫有关。这位农夫年轻时,曾与一位美丽的姑娘相恋。他们约定在南瓜丰收的季节结婚。然而,命运弄人,姑娘在婚礼前的一场意外中离世。悲伤的农夫为了纪念心爱的姑娘,每年都会将最大的南瓜偷走,放到姑娘的墓前,以此寄托自己的哀思。这一行为延续了多年,成为了乡村里一个神秘的传说。参与者提出的问题: 偷的人信神吗<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "label_ids:\n", - "[-100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, -100, 99520, 151645]\n", - "labels:\n", - "不是<|im_end|>\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "config.json: 100% 664/664 [00:00<00:00, 5.23MB/s]\n", - "[INFO|configuration_utils.py:733] 2024-07-15 14:34:44,448 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 14:34:44,451 >> Model config Qwen2Config {\n", - " \"_name_or_path\": \"Qwen/Qwen2-7B\",\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "07/15/2024 14:34:44 - INFO - llamafactory.model.model_utils.quantization - Quantizing model to 4 bit with bitsandbytes.\n", - "model.safetensors.index.json: 100% 27.8k/27.8k [00:00<00:00, 99.4MB/s]\n", - "[INFO|modeling_utils.py:3474] 2024-07-15 14:34:44,977 >> loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/model.safetensors.index.json\n", - "Downloading shards: 0% 0/4 [00:00> Instantiating Qwen2ForCausalLM model under default dtype torch.bfloat16.\n", - "[INFO|configuration_utils.py:962] 2024-07-15 14:36:16,412 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643\n", - "}\n", - "\n", - "Loading checkpoint shards: 100% 4/4 [00:06<00:00, 1.55s/it]\n", - "[INFO|modeling_utils.py:4280] 2024-07-15 14:36:26,291 >> All model checkpoint weights were used when initializing Qwen2ForCausalLM.\n", - "\n", - "[INFO|modeling_utils.py:4288] 2024-07-15 14:36:26,291 >> All the weights of Qwen2ForCausalLM were initialized from the model checkpoint at Qwen/Qwen2-7B.\n", - "If your task is similar to the task the model of the checkpoint was trained on, you can already use Qwen2ForCausalLM for predictions without further training.\n", - "generation_config.json: 100% 138/138 [00:00<00:00, 1.11MB/s]\n", - "[INFO|configuration_utils.py:917] 2024-07-15 14:36:26,489 >> loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/generation_config.json\n", - "[INFO|configuration_utils.py:962] 2024-07-15 14:36:26,489 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"max_new_tokens\": 2048\n", - "}\n", - "\n", - "07/15/2024 14:36:27 - INFO - llamafactory.model.model_utils.checkpointing - Gradient checkpointing enabled.\n", - "07/15/2024 14:36:27 - INFO - llamafactory.model.model_utils.attention - Using torch SDPA for faster training and inference.\n", - "07/15/2024 14:36:27 - INFO - llamafactory.model.adapter - Upcasting trainable params to float32.\n", - "07/15/2024 14:36:27 - INFO - llamafactory.model.adapter - Fine-tuning method: LoRA\n", - "07/15/2024 14:36:27 - INFO - llamafactory.model.model_utils.misc - Found linear modules: q_proj,down_proj,o_proj,gate_proj,v_proj,up_proj,k_proj\n", - "07/15/2024 14:36:27 - INFO - llamafactory.model.loader - trainable params: 20,185,088 || all params: 7,635,801,600 || trainable%: 0.2643\n", - "[INFO|trainer.py:641] 2024-07-15 14:36:27,732 >> Using auto half precision backend\n", - "07/15/2024 14:36:28 - INFO - llamafactory.train.trainer_utils - Using LoRA+ optimizer with loraplus lr ratio 16.00.\n", - "[INFO|trainer.py:2078] 2024-07-15 14:36:28,977 >> ***** Running training *****\n", - "[INFO|trainer.py:2079] 2024-07-15 14:36:28,977 >> Num examples = 4,500\n", - "[INFO|trainer.py:2080] 2024-07-15 14:36:28,977 >> Num Epochs = 6\n", - "[INFO|trainer.py:2081] 2024-07-15 14:36:28,977 >> Instantaneous batch size per device = 1\n", - "[INFO|trainer.py:2084] 2024-07-15 14:36:28,977 >> Total train batch size (w. parallel, distributed & accumulation) = 8\n", - "[INFO|trainer.py:2085] 2024-07-15 14:36:28,977 >> Gradient Accumulation steps = 8\n", - "[INFO|trainer.py:2086] 2024-07-15 14:36:28,977 >> Total optimization steps = 3,372\n", - "[INFO|trainer.py:2087] 2024-07-15 14:36:28,981 >> Number of trainable parameters = 20,185,088\n", - "[INFO|integration_utils.py:723] 2024-07-15 14:36:28,986 >> Automatic Weights & Biases logging enabled, to disable set os.environ[\"WANDB_DISABLED\"] = \"true\"\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: \u001b[33mWARNING\u001b[0m The `run_name` is currently set to the same value as `TrainingArguments.output_dir`. If this was not intended, please specify a different run name by setting the `TrainingArguments.run_name` parameter.\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Currently logged in as: \u001b[33minflaton-sg\u001b[0m (\u001b[33minflaton-ai\u001b[0m). Use \u001b[1m`wandb login --relogin`\u001b[0m to force relogin\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Tracking run with wandb version 0.17.4\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Run data is saved locally in \u001b[35m\u001b[1m/content/LLaMA-Factory/wandb/run-20240715_143630-ancw8jgs\u001b[0m\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Run \u001b[1m`wandb offline`\u001b[0m to turn off syncing.\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Syncing run \u001b[33m/content/qwen2-7b\u001b[0m\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: ⭐️ View project at \u001b[34m\u001b[4mhttps://wandb.ai/inflaton-ai/huggingface\u001b[0m\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: 🚀 View run at \u001b[34m\u001b[4mhttps://wandb.ai/inflaton-ai/huggingface/runs/ancw8jgs\u001b[0m\n", - "{'loss': 1.9143, 'grad_norm': 2.1186106204986572, 'learning_rate': 0.000986610734407955, 'epoch': 1.0}\n", - " 17% 562/3372 [1:01:09<5:02:54, 6.47s/it][INFO|trainer.py:3719] 2024-07-15 15:37:39,784 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 15:37:39,784 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 15:37:39,785 >> Batch size = 1\n", - "\n", - " 0% 0/500 [00:00> Saving model checkpoint to /content/qwen2-7b/checkpoint-562\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 15:39:50,101 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 15:39:50,102 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 15:39:50,298 >> tokenizer config file saved in /content/qwen2-7b/checkpoint-562/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 15:39:50,298 >> Special tokens file saved in /content/qwen2-7b/checkpoint-562/special_tokens_map.json\n", - "{'loss': 0.847, 'grad_norm': 0.33948227763175964, 'learning_rate': 0.0008433439152121052, 'epoch': 2.0}\n", - " 33% 1124/3372 [2:04:27<4:05:43, 6.56s/it][INFO|trainer.py:3719] 2024-07-15 16:40:58,115 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 16:40:58,115 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 16:40:58,115 >> Batch size = 1\n", - "\n", - " 0% 0/500 [00:00> Saving model checkpoint to /content/qwen2-7b/checkpoint-1124\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 16:43:11,686 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 16:43:11,687 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 16:43:11,872 >> tokenizer config file saved in /content/qwen2-7b/checkpoint-1124/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 16:43:11,873 >> Special tokens file saved in /content/qwen2-7b/checkpoint-1124/special_tokens_map.json\n", - "{'loss': 0.5831, 'grad_norm': 0.08739642798900604, 'learning_rate': 0.0005870506865895984, 'epoch': 3.0}\n", - " 50% 1686/3372 [3:08:20<3:02:02, 6.48s/it][INFO|trainer.py:3719] 2024-07-15 17:44:50,834 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 17:44:50,834 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 17:44:50,835 >> Batch size = 1\n", - "\n", - " 0% 0/500 [00:00> Saving model checkpoint to /content/qwen2-7b/checkpoint-1686\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 17:47:04,904 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 17:47:04,905 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 17:47:05,097 >> tokenizer config file saved in /content/qwen2-7b/checkpoint-1686/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 17:47:05,098 >> Special tokens file saved in /content/qwen2-7b/checkpoint-1686/special_tokens_map.json\n", - "{'loss': 0.5469, 'grad_norm': 0.5723391771316528, 'learning_rate': 0.00030210098232438424, 'epoch': 4.0}\n", - " 67% 2248/3372 [4:12:15<2:04:11, 6.63s/it][INFO|trainer.py:3719] 2024-07-15 18:48:45,813 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 18:48:45,813 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 18:48:45,814 >> Batch size = 1\n", - "\n", - " 0% 0/500 [00:00> Saving model checkpoint to /content/qwen2-7b/checkpoint-2248\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 18:50:59,783 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 18:50:59,784 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 18:50:59,970 >> tokenizer config file saved in /content/qwen2-7b/checkpoint-2248/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 18:50:59,970 >> Special tokens file saved in /content/qwen2-7b/checkpoint-2248/special_tokens_map.json\n", - "{'loss': 0.5323, 'grad_norm': 0.26129332184791565, 'learning_rate': 8.229824704832284e-05, 'epoch': 5.0}\n", - " 83% 2810/3372 [5:16:10<1:01:38, 6.58s/it][INFO|trainer.py:3719] 2024-07-15 19:52:41,540 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 19:52:41,541 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 19:52:41,541 >> Batch size = 1\n", - "\n", - " 0% 0/500 [00:00> Saving model checkpoint to /content/qwen2-7b/checkpoint-2810\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 19:54:55,909 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 19:54:55,910 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 19:54:56,097 >> tokenizer config file saved in /content/qwen2-7b/checkpoint-2810/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 19:54:56,098 >> Special tokens file saved in /content/qwen2-7b/checkpoint-2810/special_tokens_map.json\n", - "{'loss': 0.5229, 'grad_norm': 0.18251831829547882, 'learning_rate': 0.0, 'epoch': 5.99}\n", - "100% 3372/3372 [6:20:08<00:00, 6.71s/it][INFO|trainer.py:3719] 2024-07-15 20:56:39,578 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 20:56:39,578 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 20:56:39,578 >> Batch size = 1\n", - "\n", - " 0% 0/500 [00:00> Saving model checkpoint to /content/qwen2-7b/checkpoint-3372\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 20:58:53,452 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 20:58:53,453 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 20:58:53,632 >> tokenizer config file saved in /content/qwen2-7b/checkpoint-3372/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 20:58:53,633 >> Special tokens file saved in /content/qwen2-7b/checkpoint-3372/special_tokens_map.json\n", - "[INFO|trainer.py:2329] 2024-07-15 20:58:54,110 >> \n", - "\n", - "Training completed. Do not forget to share your model on huggingface.co/models =)\n", - "\n", - "\n", - "{'train_runtime': 22945.1289, 'train_samples_per_second': 1.177, 'train_steps_per_second': 0.147, 'train_loss': 0.8244021631786125, 'epoch': 5.99}\n", - "100% 3372/3372 [6:22:23<00:00, 6.80s/it]\n", - "[INFO|trainer.py:3410] 2024-07-15 20:58:54,115 >> Saving model checkpoint to /content/qwen2-7b\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-15 20:58:54,406 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-15 20:58:54,407 >> Model config Qwen2Config {\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "[INFO|tokenization_utils_base.py:2513] 2024-07-15 20:58:54,600 >> tokenizer config file saved in /content/qwen2-7b/tokenizer_config.json\n", - "[INFO|tokenization_utils_base.py:2522] 2024-07-15 20:58:54,600 >> Special tokens file saved in /content/qwen2-7b/special_tokens_map.json\n", - "***** train metrics *****\n", - " epoch = 5.9947\n", - " total_flos = 396658758GF\n", - " train_loss = 0.8244\n", - " train_runtime = 6:22:25.12\n", - " train_samples_per_second = 1.177\n", - " train_steps_per_second = 0.147\n", - "Figure saved at: /content/qwen2-7b/training_loss.png\n", - "Figure saved at: /content/qwen2-7b/training_eval_loss.png\n", - "Figure saved at: /content/qwen2-7b/training_eval_accuracy.png\n", - "[INFO|trainer.py:3719] 2024-07-15 20:58:55,263 >> ***** Running Evaluation *****\n", - "[INFO|trainer.py:3721] 2024-07-15 20:58:55,264 >> Num examples = 500\n", - "[INFO|trainer.py:3724] 2024-07-15 20:58:55,264 >> Batch size = 1\n", - "100% 500/500 [02:13<00:00, 3.74it/s]\n", - "***** eval metrics *****\n", - " epoch = 5.9947\n", - " eval_accuracy = 0.7747\n", - " eval_loss = 0.513\n", - " eval_runtime = 0:02:13.97\n", - " eval_samples_per_second = 3.732\n", - " eval_steps_per_second = 3.732\n", - "[INFO|modelcard.py:450] 2024-07-15 21:01:09,246 >> Dropping the following result as it does not have all the necessary fields:\n", - "{'task': {'name': 'Causal Language Modeling', 'type': 'text-generation'}, 'metrics': [{'name': 'Accuracy', 'type': 'accuracy', 'value': 0.7746666666666665}]}\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: \n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Run history:\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/accuracy ▁▅█████\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/loss █▄▁▁▁▁▁\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/runtime ▁▇█▇█▇█\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/samples_per_second █▂▁▂▁▂▁\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/steps_per_second █▂▁▂▁▂▁\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/epoch ▁▁▂▂▄▄▅▅▇▇████\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/global_step ▁▁▂▂▄▄▅▅▇▇████\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/grad_norm █▂▁▃▂▁\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/learning_rate █▇▅▃▂▁\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/loss █▃▁▁▁▁\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: \n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Run summary:\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/accuracy 0.77467\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/loss 0.51301\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/runtime 133.9784\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/samples_per_second 3.732\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: eval/steps_per_second 3.732\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: total_flos 4.259090989881262e+17\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/epoch 5.99467\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/global_step 3372\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/grad_norm 0.18252\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/learning_rate 0.0\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train/loss 0.5229\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train_loss 0.8244\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train_runtime 22945.1289\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train_samples_per_second 1.177\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: train_steps_per_second 0.147\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: \n", - "\u001b[34m\u001b[1mwandb\u001b[0m: 🚀 View run \u001b[33m/content/qwen2-7b\u001b[0m at: \u001b[34m\u001b[4mhttps://wandb.ai/inflaton-ai/huggingface/runs/ancw8jgs\u001b[0m\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: ⭐️ View project at: \u001b[34m\u001b[4mhttps://wandb.ai/inflaton-ai/huggingface\u001b[0m\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Synced 5 W&B file(s), 0 media file(s), 0 artifact file(s) and 0 other file(s)\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Find logs at: \u001b[35m\u001b[1m./wandb/run-20240715_143630-ancw8jgs/logs\u001b[0m\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: \u001b[33mWARNING\u001b[0m The new W&B backend becomes opt-out in version 0.18.0; try it out with `wandb.require(\"core\")`! See https://wandb.me/wandb-core for more information.\n" + "^C\n" ] } ], "source": [ - "!llamafactory-cli train train_qwen2_7b.json" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "background_save": true - }, - "id": "PPHT4JDoIvGk", - "outputId": "868b542e-3cf3-4f96-b3ff-944d48f66e9e" - }, - "outputs": [ - { - "data": { - "application/vnd.google.colaboratory.intrinsic+json": { - "type": "string" - }, - "text/plain": [ - "'/content/drive/MyDrive/runs/qwen2-7b'" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import shutil\n", - "shutil.move(\"/content/qwen2-7b\", \"/content/drive/MyDrive/runs\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 35 - }, - "id": "KQolpdAGpUqx", - "outputId": "23443a95-ec97-4633-87d6-18d2f27d979e" - }, - "outputs": [ - { - "data": { - "application/vnd.google.colaboratory.intrinsic+json": { - "type": "string" - }, - "text/plain": [ - "'/content/qwen2-7b'" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import shutil\n", - "shutil.move(\"/content/drive/MyDrive/runs\", \"/content/qwen2-7b\", )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "id": "mzmNMevzVer3" - }, - "outputs": [], - "source": [ - "def evaluate_model_all_epochs(model_name, adapter_path_base, num_train_epochs, start_epoch=0, load_in_4bit=True, num_of_entries=-1):\n", - " os.environ[\"MODEL_NAME\"] = model_name\n", - " os.environ[\"LOAD_IN_4BIT\"] = \"true\" if load_in_4bit else \"false\"\n", - " for i in range(start_epoch, num_train_epochs + 1):\n", - " print(f\"Epoch {i}\")\n", - " if i == 0:\n", - " os.unsetenv(\"ADAPTER_NAME_OR_PATH\")\n", - " else:\n", - " adapter_path = f\"{adapter_path_base}/checkpoint-{562 * i}\"\n", - " os.environ[\"ADAPTER_NAME_OR_PATH\"] = adapter_path\n", - "\n", - " !python llm_toolkit/eval_logical_reasoning.py {num_of_entries}" + "!llamafactory-cli train config/qwen2_0.5b_lora_sft_4bit.yaml" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "3THuVusvVtt8", - "outputId": "2095b621-3aff-4215-f61d-0711acd42e63" - }, + "execution_count": 1, + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch 0\n", - "loading env vars from: /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/.env\n", - "Adding /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning to sys.path\n", - "2024-07-16 03:59:05.588323: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", - "2024-07-16 03:59:05.639368: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2024-07-16 03:59:05.639412: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2024-07-16 03:59:05.640960: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2024-07-16 03:59:05.648585: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2024-07-16 03:59:06.864846: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", - "loading /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\n", - "Qwen/Qwen2-7B None False datasets/mgtv results/mgtv-results_02_qwen2_7b_colab.csv\n", - "(1) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "0.0 GB of memory reserved.\n", - "loading model: Qwen/Qwen2-7B\n", - "tokenizer_config.json: 100% 1.29k/1.29k [00:00<00:00, 8.65MB/s]\n", - "vocab.json: 100% 2.78M/2.78M [00:00<00:00, 8.33MB/s]\n", - "merges.txt: 100% 1.67M/1.67M [00:00<00:00, 6.20MB/s]\n", - "tokenizer.json: 100% 7.03M/7.03M [00:00<00:00, 15.8MB/s]\n", - "Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", - "config.json: 100% 664/664 [00:00<00:00, 4.79MB/s]\n", - "model.safetensors.index.json: 100% 27.8k/27.8k [00:00<00:00, 74.4MB/s]\n", - "Downloading shards: 0% 0/4 [00:00system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|endoftext|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中���任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "--------------------------------------------------\n", - "text: 死者受伤了吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 甄庄哭声\n", - "--------------------------------------------------\n", - "puzzle: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "--------------------------------------------------\n", - "truth: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|endoftext|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "Evaluating model: Qwen/Qwen2-7B\n", - " 0% 0/3000 [00:00\n", - " predictions = eval_model(model, tokenizer, datasets[\"test\"])\n", - " File \"/content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\", line 215, in eval_model\n", - " outputs = model.generate(**inputs, max_new_tokens=4096, use_cache=False)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py\", line 115, in decorate_context\n", - " return func(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py\", line 1758, in generate\n", - " result = self._sample(\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py\", line 2397, in _sample\n", - " outputs = self(\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\", line 1532, in _wrapped_call_impl\n", - " return self._call_impl(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\", line 1541, in _call_impl\n", - " return forward_call(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py\", line 1163, in forward\n", - " logits = logits.float()\n", - "torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 GiB. GPU \n", - "Epoch 1\n", - "loading env vars from: /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/.env\n", - "Adding /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning to sys.path\n", - "2024-07-16 04:36:42.030763: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", - "2024-07-16 04:36:42.082994: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2024-07-16 04:36:42.083052: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2024-07-16 04:36:42.084468: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2024-07-16 04:36:42.092383: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2024-07-16 04:36:43.353969: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", - "loading /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\n", - "Qwen/Qwen2-7B /content/qwen2-7b/qwen2-7b/checkpoint-562 False datasets/mgtv results/mgtv-results_02_qwen2_7b_colab.csv\n", - "(1) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "0.0 GB of memory reserved.\n", - "loading model: Qwen/Qwen2-7B\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 04:36:49,648 >> loading file vocab.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/vocab.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 04:36:49,648 >> loading file merges.txt from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/merges.txt\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 04:36:49,648 >> loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 04:36:49,648 >> loading file added_tokens.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 04:36:49,648 >> loading file special_tokens_map.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 04:36:49,648 >> loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer_config.json\n", - "[WARNING|logging.py:314] 2024-07-16 04:36:49,914 >> Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", - "07/16/2024 04:36:49 - INFO - llamafactory.data.template - Replace eos token: <|im_end|>\n", - "07/16/2024 04:36:49 - INFO - llamafactory.data.template - Add <|im_start|> to stop words.\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-16 04:36:50,018 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-16 04:36:50,019 >> Model config Qwen2Config {\n", - " \"_name_or_path\": \"Qwen/Qwen2-7B\",\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "07/16/2024 04:36:50 - INFO - llamafactory.model.patcher - Using KV cache for faster generation.\n", - "[INFO|modeling_utils.py:3474] 2024-07-16 04:36:50,051 >> loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/model.safetensors.index.json\n", - "[INFO|modeling_utils.py:1519] 2024-07-16 04:36:50,054 >> Instantiating Qwen2ForCausalLM model under default dtype torch.bfloat16.\n", - "[INFO|configuration_utils.py:962] 2024-07-16 04:36:50,055 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643\n", - "}\n", - "\n", - "Loading checkpoint shards: 100% 4/4 [00:06<00:00, 1.66s/it]\n", - "[INFO|modeling_utils.py:4280] 2024-07-16 04:36:59,526 >> All model checkpoint weights were used when initializing Qwen2ForCausalLM.\n", - "\n", - "[INFO|modeling_utils.py:4288] 2024-07-16 04:36:59,526 >> All the weights of Qwen2ForCausalLM were initialized from the model checkpoint at Qwen/Qwen2-7B.\n", - "If your task is similar to the task the model of the checkpoint was trained on, you can already use Qwen2ForCausalLM for predictions without further training.\n", - "[INFO|configuration_utils.py:917] 2024-07-16 04:36:59,673 >> loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/generation_config.json\n", - "[INFO|configuration_utils.py:962] 2024-07-16 04:36:59,673 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"max_new_tokens\": 2048\n", - "}\n", - "\n", - "07/16/2024 04:37:00 - INFO - llamafactory.model.model_utils.attention - Using torch SDPA for faster training and inference.\n", - "07/16/2024 04:37:01 - INFO - llamafactory.model.adapter - Merged 1 adapter(s).\n", - "07/16/2024 04:37:01 - INFO - llamafactory.model.adapter - Loaded adapter(s): /content/qwen2-7b/qwen2-7b/checkpoint-562\n", - "07/16/2024 04:37:01 - INFO - llamafactory.model.loader - all params: 7,615,616,512\n", - "(2) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "16.521 GB of memory reserved.\n", - "loading train/test data files\n", - "Map: 100% 25000/25000 [00:01<00:00, 22266.31 examples/s]\n", - "Map: 100% 3000/3000 [00:00<00:00, 22229.64 examples/s]\n", - "DatasetDict({\n", - " train: Dataset({\n", - " features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n", - " num_rows: 25000\n", - " })\n", - " test: Dataset({\n", - " features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n", - " num_rows: 3000\n", - " })\n", - "})\n", - "--------------------------------------------------\n", - "text: 甄加索是自杀吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 海岸之谜\n", - "--------------------------------------------------\n", - "puzzle: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "--------------------------------------------------\n", - "truth: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "--------------------------------------------------\n", - "text: 死者受伤了吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 甄庄哭声\n", - "--------------------------------------------------\n", - "puzzle: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "--------------------------------------------------\n", - "truth: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "Evaluating model: Qwen/Qwen2-7B\n", - " 0% 0/3000 [00:00> Setting `pad_token_id` to `eos_token_id`:151643 for open-end generation.\n", - " 0% 0/3000 [30:43\n", - " predictions = eval_model(model, tokenizer, datasets[\"test\"])\n", - " File \"/content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\", line 215, in eval_model\n", - " outputs = model.generate(**inputs, max_new_tokens=4096, use_cache=False)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py\", line 115, in decorate_context\n", - " return func(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py\", line 1758, in generate\n", - " result = self._sample(\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py\", line 2397, in _sample\n", - " outputs = self(\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\", line 1532, in _wrapped_call_impl\n", - " return self._call_impl(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\", line 1541, in _call_impl\n", - " return forward_call(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py\", line 1163, in forward\n", - " logits = logits.float()\n", - "torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.92 GiB. GPU \n", - "Epoch 2\n", - "loading env vars from: /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/.env\n", - "Adding /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning to sys.path\n", - "2024-07-16 05:07:51.574401: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", - "2024-07-16 05:07:51.624732: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2024-07-16 05:07:51.624785: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2024-07-16 05:07:51.626182: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2024-07-16 05:07:51.633853: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2024-07-16 05:07:52.903770: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", - "loading /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\n", - "Qwen/Qwen2-7B /content/qwen2-7b/qwen2-7b/checkpoint-1124 False datasets/mgtv results/mgtv-results_02_qwen2_7b_colab.csv\n", - "(1) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "0.0 GB of memory reserved.\n", - "loading model: Qwen/Qwen2-7B\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:07:59,358 >> loading file vocab.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/vocab.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:07:59,358 >> loading file merges.txt from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/merges.txt\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:07:59,358 >> loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:07:59,358 >> loading file added_tokens.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:07:59,358 >> loading file special_tokens_map.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:07:59,358 >> loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer_config.json\n", - "[WARNING|logging.py:314] 2024-07-16 05:07:59,635 >> Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", - "07/16/2024 05:07:59 - INFO - llamafactory.data.template - Replace eos token: <|im_end|>\n", - "07/16/2024 05:07:59 - INFO - llamafactory.data.template - Add <|im_start|> to stop words.\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-16 05:07:59,725 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-16 05:07:59,727 >> Model config Qwen2Config {\n", - " \"_name_or_path\": \"Qwen/Qwen2-7B\",\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "07/16/2024 05:07:59 - INFO - llamafactory.model.patcher - Using KV cache for faster generation.\n", - "[INFO|modeling_utils.py:3474] 2024-07-16 05:07:59,758 >> loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/model.safetensors.index.json\n", - "[INFO|modeling_utils.py:1519] 2024-07-16 05:07:59,761 >> Instantiating Qwen2ForCausalLM model under default dtype torch.bfloat16.\n", - "[INFO|configuration_utils.py:962] 2024-07-16 05:07:59,762 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643\n", - "}\n", - "\n", - "Loading checkpoint shards: 100% 4/4 [00:05<00:00, 1.44s/it]\n", - "[INFO|modeling_utils.py:4280] 2024-07-16 05:08:08,371 >> All model checkpoint weights were used when initializing Qwen2ForCausalLM.\n", - "\n", - "[INFO|modeling_utils.py:4288] 2024-07-16 05:08:08,371 >> All the weights of Qwen2ForCausalLM were initialized from the model checkpoint at Qwen/Qwen2-7B.\n", - "If your task is similar to the task the model of the checkpoint was trained on, you can already use Qwen2ForCausalLM for predictions without further training.\n", - "[INFO|configuration_utils.py:917] 2024-07-16 05:08:08,465 >> loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/generation_config.json\n", - "[INFO|configuration_utils.py:962] 2024-07-16 05:08:08,465 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"max_new_tokens\": 2048\n", - "}\n", - "\n", - "07/16/2024 05:08:09 - INFO - llamafactory.model.model_utils.attention - Using torch SDPA for faster training and inference.\n", - "07/16/2024 05:08:09 - INFO - llamafactory.model.adapter - Merged 1 adapter(s).\n", - "07/16/2024 05:08:09 - INFO - llamafactory.model.adapter - Loaded adapter(s): /content/qwen2-7b/qwen2-7b/checkpoint-1124\n", - "07/16/2024 05:08:09 - INFO - llamafactory.model.loader - all params: 7,615,616,512\n", - "(2) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "16.521 GB of memory reserved.\n", - "loading train/test data files\n", - "DatasetDict({\n", - " train: Dataset({\n", - " features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n", - " num_rows: 25000\n", - " })\n", - " test: Dataset({\n", - " features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n", - " num_rows: 3000\n", - " })\n", - "})\n", - "--------------------------------------------------\n", - "text: 甄加索是自杀吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 海岸之谜\n", - "--------------------------------------------------\n", - "puzzle: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "--------------------------------------------------\n", - "truth: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "--------------------------------------------------\n", - "text: 死者受伤了吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 甄庄哭声\n", - "--------------------------------------------------\n", - "puzzle: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "--------------------------------------------------\n", - "truth: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但��有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "Evaluating model: Qwen/Qwen2-7B\n", - " 0% 0/3000 [00:00> Setting `pad_token_id` to `eos_token_id`:151643 for open-end generation.\n", - " 0% 0/3000 [31:25\n", - " predictions = eval_model(model, tokenizer, datasets[\"test\"])\n", - " File \"/content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\", line 215, in eval_model\n", - " outputs = model.generate(**inputs, max_new_tokens=4096, use_cache=False)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py\", line 115, in decorate_context\n", - " return func(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py\", line 1758, in generate\n", - " result = self._sample(\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py\", line 2397, in _sample\n", - " outputs = self(\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\", line 1532, in _wrapped_call_impl\n", - " return self._call_impl(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py\", line 1541, in _call_impl\n", - " return forward_call(*args, **kwargs)\n", - " File \"/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py\", line 1163, in forward\n", - " logits = logits.float()\n", - "torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.92 GiB. GPU \n", - "Epoch 3\n", - "loading env vars from: /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/.env\n", - "Adding /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning to sys.path\n", - "2024-07-16 05:39:41.116319: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", - "2024-07-16 05:39:41.166809: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", - "2024-07-16 05:39:41.166878: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", - "2024-07-16 05:39:41.168319: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", - "2024-07-16 05:39:41.175971: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", - "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n", - "2024-07-16 05:39:42.445909: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", - "loading /content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\n", - "Qwen/Qwen2-7B /content/qwen2-7b/qwen2-7b/checkpoint-1686 False datasets/mgtv results/mgtv-results_02_qwen2_7b_colab.csv\n", - "(1) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "0.0 GB of memory reserved.\n", - "loading model: Qwen/Qwen2-7B\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:39:48,848 >> loading file vocab.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/vocab.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:39:48,849 >> loading file merges.txt from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/merges.txt\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:39:48,849 >> loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer.json\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:39:48,849 >> loading file added_tokens.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:39:48,849 >> loading file special_tokens_map.json from cache at None\n", - "[INFO|tokenization_utils_base.py:2108] 2024-07-16 05:39:48,849 >> loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/tokenizer_config.json\n", - "[WARNING|logging.py:314] 2024-07-16 05:39:49,128 >> Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.\n", - "07/16/2024 05:39:49 - INFO - llamafactory.data.template - Replace eos token: <|im_end|>\n", - "07/16/2024 05:39:49 - INFO - llamafactory.data.template - Add <|im_start|> to stop words.\n", - "/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n", - "[INFO|configuration_utils.py:733] 2024-07-16 05:39:49,227 >> loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/config.json\n", - "[INFO|configuration_utils.py:796] 2024-07-16 05:39:49,228 >> Model config Qwen2Config {\n", - " \"_name_or_path\": \"Qwen/Qwen2-7B\",\n", - " \"architectures\": [\n", - " \"Qwen2ForCausalLM\"\n", - " ],\n", - " \"attention_dropout\": 0.0,\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"hidden_act\": \"silu\",\n", - " \"hidden_size\": 3584,\n", - " \"initializer_range\": 0.02,\n", - " \"intermediate_size\": 18944,\n", - " \"max_position_embeddings\": 131072,\n", - " \"max_window_layers\": 28,\n", - " \"model_type\": \"qwen2\",\n", - " \"num_attention_heads\": 28,\n", - " \"num_hidden_layers\": 28,\n", - " \"num_key_value_heads\": 4,\n", - " \"rms_norm_eps\": 1e-06,\n", - " \"rope_theta\": 1000000.0,\n", - " \"sliding_window\": 131072,\n", - " \"tie_word_embeddings\": false,\n", - " \"torch_dtype\": \"bfloat16\",\n", - " \"transformers_version\": \"4.41.2\",\n", - " \"use_cache\": true,\n", - " \"use_sliding_window\": false,\n", - " \"vocab_size\": 152064\n", - "}\n", - "\n", - "07/16/2024 05:39:49 - INFO - llamafactory.model.patcher - Using KV cache for faster generation.\n", - "[INFO|modeling_utils.py:3474] 2024-07-16 05:39:49,260 >> loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/model.safetensors.index.json\n", - "[INFO|modeling_utils.py:1519] 2024-07-16 05:39:49,263 >> Instantiating Qwen2ForCausalLM model under default dtype torch.bfloat16.\n", - "[INFO|configuration_utils.py:962] 2024-07-16 05:39:49,264 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643\n", - "}\n", - "\n", - "Loading checkpoint shards: 100% 4/4 [00:05<00:00, 1.43s/it]\n", - "[INFO|modeling_utils.py:4280] 2024-07-16 05:39:57,929 >> All model checkpoint weights were used when initializing Qwen2ForCausalLM.\n", - "\n", - "[INFO|modeling_utils.py:4288] 2024-07-16 05:39:57,929 >> All the weights of Qwen2ForCausalLM were initialized from the model checkpoint at Qwen/Qwen2-7B.\n", - "If your task is similar to the task the model of the checkpoint was trained on, you can already use Qwen2ForCausalLM for predictions without further training.\n", - "[INFO|configuration_utils.py:917] 2024-07-16 05:39:58,030 >> loading configuration file generation_config.json from cache at /root/.cache/huggingface/hub/models--Qwen--Qwen2-7B/snapshots/453ed1575b739b5b03ce3758b23befdb0967f40e/generation_config.json\n", - "[INFO|configuration_utils.py:962] 2024-07-16 05:39:58,030 >> Generate config GenerationConfig {\n", - " \"bos_token_id\": 151643,\n", - " \"eos_token_id\": 151643,\n", - " \"max_new_tokens\": 2048\n", - "}\n", - "\n", - "07/16/2024 05:39:58 - INFO - llamafactory.model.model_utils.attention - Using torch SDPA for faster training and inference.\n", - "07/16/2024 05:39:59 - INFO - llamafactory.model.adapter - Merged 1 adapter(s).\n", - "07/16/2024 05:39:59 - INFO - llamafactory.model.adapter - Loaded adapter(s): /content/qwen2-7b/qwen2-7b/checkpoint-1686\n", - "07/16/2024 05:39:59 - INFO - llamafactory.model.loader - all params: 7,615,616,512\n", - "(2) GPU = NVIDIA L4. Max memory = 22.168 GB.\n", - "16.521 GB of memory reserved.\n", - "loading train/test data files\n", - "DatasetDict({\n", - " train: Dataset({\n", - " features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n", - " num_rows: 25000\n", - " })\n", - " test: Dataset({\n", - " features: ['text', 'label', 'answer', 'title', 'puzzle', 'truth', 'train_text', 'prompt'],\n", - " num_rows: 3000\n", - " })\n", - "})\n", - "--------------------------------------------------\n", - "text: 甄加索是自杀吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 海岸之谜\n", - "--------------------------------------------------\n", - "puzzle: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "--------------------------------------------------\n", - "truth: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在远离城市喧嚣的海边小屋,一天清晨,邻居发现甄加索僵卧在沙滩上,已无生命迹象。现场没有发现任何打斗的迹象。请问甄加索的死因是什么?\n", - "\n", - "实际情况: 甄加索是一位热爱自然的画家,他每年都会来到这个海边小屋寻找灵感。在他生命的最后几天,他一直在创作一幅描绘海洋生物的画作。在画即将完成的前一天晚上,他骑着自行车外出,打算在海边观赏夜景。然而,他在沙滩上意外发现了一只搁浅的海豚,为了救助这只海豚,他耗费了极大的体力,最终成功将其送回海中。筋疲力尽的甄加索在沙滩上睡着了,由于他患有严重的心脏病,却未告知旁人,在寒冷的海风中,他的心脏停止了跳动。因此,警方在现场只发现了车轮痕迹和未完成的画作,而没有发现任何他杀的迹象。\n", - "\n", - "参与者提出的问题: 甄加索是自杀吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "--------------------------------------------------\n", - "text: 死者受伤了吗\n", - "--------------------------------------------------\n", - "label: 不是\n", - "--------------------------------------------------\n", - "answer: nan\n", - "--------------------------------------------------\n", - "title: 甄庄哭声\n", - "--------------------------------------------------\n", - "puzzle: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "--------------------------------------------------\n", - "truth: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "--------------------------------------------------\n", - "train_text: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "不是<|im_end|>\n", - "--------------------------------------------------\n", - "prompt: <|im_start|>system\n", - "You are an expert in logical reasoning.<|im_end|>\n", - "<|im_start|>user\n", - "你是一个逻辑游戏的主持人。游戏规则如下:\n", - "\n", - "1. 参与者会得到一个谜题。\n", - "2. 参与者可以通过提问来获取线索,尝试解开谜题。\n", - "3. 对于每个问题,主持人将根据实际情况回答以下五个选项之一:是、不是、不重要、回答正确、问法错误。\n", - "4. 回答中不能添加任何其它信息,也不能省略选项中的任何一个字。例如,不可以把“不是”省略成“不”。\n", - "5. 参与者需要根据回答来推理,并最终找出谜题的正确答案。\n", - "\n", - "请严格按照这些规则回答参与者提出的问题。\n", - "\n", - "谜题: 在一个安静的夜晚,小村庄的湖边突然传来了阵阵哭泣声。第二天早晨,村长甄锐发现湖边的石头上放着一顶破旧的帽子,但没有人知道这顶帽子是从哪里来的,哭泣声又是为何。请还原故事真相。\n", - "\n", - "实际情况: 原来,这顶破旧的帽子属于一个小男孩,他小时候与爷爷在湖边生活。爷爷教他钓鱼、游泳,还告诉他湖中的海龟是他们的朋友。后来,小男孩随父母去了城市生活,但每年夏天都会回到村子探望爷爷。然而,去年夏天,爷爷因病去世,小男孩伤心欲绝。今年夏天,他回到村子,来到湖边,想起和爷爷的美好回忆,忍不住哭泣。他将爷爷的帽子放在湖边的石头上,希望能让爷爷的在天之灵得到安慰。那晚的哭泣声正是小男孩在祭莫他亲爱的爷爷。\n", - "\n", - "参与者提出的问题: 死者受伤了吗\n", - "<|im_end|>\n", - "<|im_start|>assistant\n", - "\n", - "Evaluating model: Qwen/Qwen2-7B\n", - " 0% 0/3000 [00:00> Setting `pad_token_id` to `eos_token_id`:151643 for open-end generation.\n", - " 0% 0/3000 [01:33\n", - " predictions = eval_model(model, tokenizer, datasets[\"test\"])\n", - " File \"/content/drive/.shortcut-targets-by-id/1E09lTnfbsjtTgQg65dQ3y9D2R6l8waxR/logical-reasoning/llm_toolkit/logical_reasoning_utils.py\", line 215, in eval_model\n", - " outputs = model.generate(**inputs, max_new_tokens=4096, use_cache=False)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py\", line 114, in decorate_context\n", - " with ctx_factory():\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/autograd/grad_mode.py\", line 84, in __exit__\n", - " torch.set_grad_enabled(self.prev)\n", - " File \"/usr/local/lib/python3.10/dist-packages/torch/autograd/grad_mode.py\", line 183, in __init__\n", - " def __init__(self, mode: bool) -> None:\n", - "KeyboardInterrupt\n", - "Epoch 4\n" + "2.4.0\n", + "True\n", + "0\n", + "NVIDIA GeForce RTX 3070 Laptop GPU\n" ] } ], "source": [ - "%%time\n", - "\n", - "evaluate_model_all_epochs(\"Qwen/Qwen2-7B\", \"/content/qwen2-7b/qwen2-7b\", 4, start_epoch=0, load_in_4bit=False, num_of_entries=-1)" + "import torch\n", + "print(torch.__version__) # Should show the PyTorch version\n", + "print(torch.cuda.is_available()) # Should be True if CUDA is available\n", + "print(torch.cuda.current_device()) # Should not raise an exception\n", + "print(torch.cuda.get_device_name(0)) # Should print the GPU name" ] } ], @@ -6688,7 +3529,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "undefined.undefined.undefined" } }, "nbformat": 4,