diff --git a/.cache/pip/http-v2/0/0/e/9/d/00e9d7b0e1cda3bfdf322be5b79cb9c4a6e6ef6159d3de3d7d3abe87 b/.cache/pip/http-v2/0/0/e/9/d/00e9d7b0e1cda3bfdf322be5b79cb9c4a6e6ef6159d3de3d7d3abe87 new file mode 100644 index 0000000000000000000000000000000000000000..28ea21acafec44e6c94f49d44b5b3f8d592c6712 Binary files /dev/null and b/.cache/pip/http-v2/0/0/e/9/d/00e9d7b0e1cda3bfdf322be5b79cb9c4a6e6ef6159d3de3d7d3abe87 differ diff --git a/.cache/pip/http-v2/0/0/e/9/d/00e9d7b0e1cda3bfdf322be5b79cb9c4a6e6ef6159d3de3d7d3abe87.body b/.cache/pip/http-v2/0/0/e/9/d/00e9d7b0e1cda3bfdf322be5b79cb9c4a6e6ef6159d3de3d7d3abe87.body new file mode 100644 index 0000000000000000000000000000000000000000..534aa4400883ef98a63430e296d6c63bad589fb8 Binary files /dev/null and b/.cache/pip/http-v2/0/0/e/9/d/00e9d7b0e1cda3bfdf322be5b79cb9c4a6e6ef6159d3de3d7d3abe87.body differ diff --git a/.cache/pip/http-v2/0/1/f/2/0/01f2082df50502ba9492d64e69db99d1fdb5730707a16c6264b355b8 b/.cache/pip/http-v2/0/1/f/2/0/01f2082df50502ba9492d64e69db99d1fdb5730707a16c6264b355b8 new file mode 100644 index 0000000000000000000000000000000000000000..fd8a21e98f3df8fc5b60fd34a7f3f2d3560c10a7 Binary files /dev/null and b/.cache/pip/http-v2/0/1/f/2/0/01f2082df50502ba9492d64e69db99d1fdb5730707a16c6264b355b8 differ diff --git a/.cache/pip/http-v2/0/2/d/4/2/02d4221e858694abc22129c65515f1df2c4c326330eb1a34ceb0b382 b/.cache/pip/http-v2/0/2/d/4/2/02d4221e858694abc22129c65515f1df2c4c326330eb1a34ceb0b382 new file mode 100644 index 0000000000000000000000000000000000000000..83b20e887daa4d33091b7d49633ba28e1d2fd881 Binary files /dev/null and b/.cache/pip/http-v2/0/2/d/4/2/02d4221e858694abc22129c65515f1df2c4c326330eb1a34ceb0b382 differ diff --git a/.cache/pip/http-v2/0/2/f/8/e/02f8e820ca8231526982c4a2b93baef519d0948ff85c925acd226f06 b/.cache/pip/http-v2/0/2/f/8/e/02f8e820ca8231526982c4a2b93baef519d0948ff85c925acd226f06 new file mode 100644 index 0000000000000000000000000000000000000000..bf1f58537c73c5141723fd93473409391efeb159 Binary files /dev/null and b/.cache/pip/http-v2/0/2/f/8/e/02f8e820ca8231526982c4a2b93baef519d0948ff85c925acd226f06 differ diff --git a/.cache/pip/http-v2/0/2/f/8/e/02f8e820ca8231526982c4a2b93baef519d0948ff85c925acd226f06.body b/.cache/pip/http-v2/0/2/f/8/e/02f8e820ca8231526982c4a2b93baef519d0948ff85c925acd226f06.body new file mode 100644 index 0000000000000000000000000000000000000000..33b35eef3aeca2b356b8ef41838156ffbbc0f628 Binary files /dev/null and b/.cache/pip/http-v2/0/2/f/8/e/02f8e820ca8231526982c4a2b93baef519d0948ff85c925acd226f06.body differ diff --git a/.cache/pip/http-v2/0/5/8/9/6/0589682f53f4c502330bc0fa01138806ce0467c549c2af469b6afb31 b/.cache/pip/http-v2/0/5/8/9/6/0589682f53f4c502330bc0fa01138806ce0467c549c2af469b6afb31 new file mode 100644 index 0000000000000000000000000000000000000000..6873a4a89d07a4b65f1f7d885750cf91e9d51880 Binary files /dev/null and b/.cache/pip/http-v2/0/5/8/9/6/0589682f53f4c502330bc0fa01138806ce0467c549c2af469b6afb31 differ diff --git a/.cache/pip/http-v2/0/5/8/9/6/0589682f53f4c502330bc0fa01138806ce0467c549c2af469b6afb31.body b/.cache/pip/http-v2/0/5/8/9/6/0589682f53f4c502330bc0fa01138806ce0467c549c2af469b6afb31.body new file mode 100644 index 0000000000000000000000000000000000000000..23662ce7ee5c52367ac94a50e4d94c6b39645938 Binary files /dev/null and b/.cache/pip/http-v2/0/5/8/9/6/0589682f53f4c502330bc0fa01138806ce0467c549c2af469b6afb31.body differ diff --git a/.cache/pip/http-v2/0/5/e/2/2/05e22b8b3169eed822187b7e670f3dc47b0666b777d95f87de8fb5e9 b/.cache/pip/http-v2/0/5/e/2/2/05e22b8b3169eed822187b7e670f3dc47b0666b777d95f87de8fb5e9 new file mode 100644 index 0000000000000000000000000000000000000000..bc333cf392843167a8e7f7b79a8ea5c0830d3717 Binary files /dev/null and b/.cache/pip/http-v2/0/5/e/2/2/05e22b8b3169eed822187b7e670f3dc47b0666b777d95f87de8fb5e9 differ diff --git a/.cache/pip/http-v2/0/5/e/2/2/05e22b8b3169eed822187b7e670f3dc47b0666b777d95f87de8fb5e9.body b/.cache/pip/http-v2/0/5/e/2/2/05e22b8b3169eed822187b7e670f3dc47b0666b777d95f87de8fb5e9.body new file mode 100644 index 0000000000000000000000000000000000000000..2d625b1103171bf5397fd7ba37963210a8e6f6e9 Binary files /dev/null and b/.cache/pip/http-v2/0/5/e/2/2/05e22b8b3169eed822187b7e670f3dc47b0666b777d95f87de8fb5e9.body differ diff --git a/.cache/pip/http-v2/0/6/7/2/6/06726d442b7e33afe35f1740674b6dee72357a95eef3aca0ef7abf21 b/.cache/pip/http-v2/0/6/7/2/6/06726d442b7e33afe35f1740674b6dee72357a95eef3aca0ef7abf21 new file mode 100644 index 0000000000000000000000000000000000000000..c703fe868ee150948c2bcb44726e577449527249 Binary files /dev/null and b/.cache/pip/http-v2/0/6/7/2/6/06726d442b7e33afe35f1740674b6dee72357a95eef3aca0ef7abf21 differ diff --git a/.cache/pip/http-v2/0/6/7/2/6/06726d442b7e33afe35f1740674b6dee72357a95eef3aca0ef7abf21.body b/.cache/pip/http-v2/0/6/7/2/6/06726d442b7e33afe35f1740674b6dee72357a95eef3aca0ef7abf21.body new file mode 100644 index 0000000000000000000000000000000000000000..9f0e284a0eccae8fb6dc5ab447fb27e0e8e93f73 Binary files /dev/null and b/.cache/pip/http-v2/0/6/7/2/6/06726d442b7e33afe35f1740674b6dee72357a95eef3aca0ef7abf21.body differ diff --git a/.cache/pip/http-v2/0/7/5/9/3/07593bb905dded4b84aacb1d96c1e64704669d6bab658dcaeab79c36.body b/.cache/pip/http-v2/0/7/5/9/3/07593bb905dded4b84aacb1d96c1e64704669d6bab658dcaeab79c36.body new file mode 100644 index 0000000000000000000000000000000000000000..9af5f813f65b21398d2b76b66ad1c3c1bb5464e4 Binary files /dev/null and b/.cache/pip/http-v2/0/7/5/9/3/07593bb905dded4b84aacb1d96c1e64704669d6bab658dcaeab79c36.body differ diff --git a/.cache/pip/http-v2/0/9/4/f/4/094f40b0ad6510929406d8b53f0945758a4aefd5c45857fe8adf0b03 b/.cache/pip/http-v2/0/9/4/f/4/094f40b0ad6510929406d8b53f0945758a4aefd5c45857fe8adf0b03 new file mode 100644 index 0000000000000000000000000000000000000000..a7eb19904f8c328f49f850cb42f9ea6fe082c60c Binary files /dev/null and b/.cache/pip/http-v2/0/9/4/f/4/094f40b0ad6510929406d8b53f0945758a4aefd5c45857fe8adf0b03 differ diff --git a/.cache/pip/http-v2/0/9/4/f/4/094f40b0ad6510929406d8b53f0945758a4aefd5c45857fe8adf0b03.body b/.cache/pip/http-v2/0/9/4/f/4/094f40b0ad6510929406d8b53f0945758a4aefd5c45857fe8adf0b03.body new file mode 100644 index 0000000000000000000000000000000000000000..cc1b05c7091f761bfecbd1c72cfecdd4c81a4a6e Binary files /dev/null and b/.cache/pip/http-v2/0/9/4/f/4/094f40b0ad6510929406d8b53f0945758a4aefd5c45857fe8adf0b03.body differ diff --git a/.cache/pip/http-v2/0/9/a/c/f/09acf4ef8859e657a77d0079dea556ad6f8dc87031ab1c54b1a9cbfe b/.cache/pip/http-v2/0/9/a/c/f/09acf4ef8859e657a77d0079dea556ad6f8dc87031ab1c54b1a9cbfe new file mode 100644 index 0000000000000000000000000000000000000000..5f13b4db0bf70a74143dee6f7eb9fbd01d6b92d9 Binary files /dev/null and b/.cache/pip/http-v2/0/9/a/c/f/09acf4ef8859e657a77d0079dea556ad6f8dc87031ab1c54b1a9cbfe differ diff --git a/.cache/pip/http-v2/0/9/a/c/f/09acf4ef8859e657a77d0079dea556ad6f8dc87031ab1c54b1a9cbfe.body b/.cache/pip/http-v2/0/9/a/c/f/09acf4ef8859e657a77d0079dea556ad6f8dc87031ab1c54b1a9cbfe.body new file mode 100644 index 0000000000000000000000000000000000000000..1b49e3701ce69a5e4371f8ec8a968f48a2dfde3d Binary files /dev/null and b/.cache/pip/http-v2/0/9/a/c/f/09acf4ef8859e657a77d0079dea556ad6f8dc87031ab1c54b1a9cbfe.body differ diff --git a/.cache/pip/http-v2/0/e/a/4/f/0ea4f1b2570ca3d64073f1277e4faa435bd7c7cbadafd80723949de4 b/.cache/pip/http-v2/0/e/a/4/f/0ea4f1b2570ca3d64073f1277e4faa435bd7c7cbadafd80723949de4 new file mode 100644 index 0000000000000000000000000000000000000000..6dc6c6b3445995a5034d5462d0552c091e815e4f Binary files /dev/null and b/.cache/pip/http-v2/0/e/a/4/f/0ea4f1b2570ca3d64073f1277e4faa435bd7c7cbadafd80723949de4 differ diff --git a/.cache/pip/http-v2/0/e/a/4/f/0ea4f1b2570ca3d64073f1277e4faa435bd7c7cbadafd80723949de4.body b/.cache/pip/http-v2/0/e/a/4/f/0ea4f1b2570ca3d64073f1277e4faa435bd7c7cbadafd80723949de4.body new file mode 100644 index 0000000000000000000000000000000000000000..364212e23299a0667b2b7aa4d55882c745f0835b Binary files /dev/null and b/.cache/pip/http-v2/0/e/a/4/f/0ea4f1b2570ca3d64073f1277e4faa435bd7c7cbadafd80723949de4.body differ diff --git a/.cache/pip/http-v2/1/2/e/6/c/12e6c0100f8dcc54f1445b9845cfdcecf519882e2fe2767cf43bd439 b/.cache/pip/http-v2/1/2/e/6/c/12e6c0100f8dcc54f1445b9845cfdcecf519882e2fe2767cf43bd439 new file mode 100644 index 0000000000000000000000000000000000000000..856fe7fbaf29862adf7947d80997b1269f267fec Binary files /dev/null and b/.cache/pip/http-v2/1/2/e/6/c/12e6c0100f8dcc54f1445b9845cfdcecf519882e2fe2767cf43bd439 differ diff --git a/.cache/pip/http-v2/1/2/e/6/c/12e6c0100f8dcc54f1445b9845cfdcecf519882e2fe2767cf43bd439.body b/.cache/pip/http-v2/1/2/e/6/c/12e6c0100f8dcc54f1445b9845cfdcecf519882e2fe2767cf43bd439.body new file mode 100644 index 0000000000000000000000000000000000000000..4a14fa93e31d754d39f5833c265857abfef58b4b Binary files /dev/null and b/.cache/pip/http-v2/1/2/e/6/c/12e6c0100f8dcc54f1445b9845cfdcecf519882e2fe2767cf43bd439.body differ diff --git a/.cache/pip/http-v2/1/8/e/e/a/18eea207de73c88bb45229bed4bcc74fbcbddadf2aa9f49e4df1f66a b/.cache/pip/http-v2/1/8/e/e/a/18eea207de73c88bb45229bed4bcc74fbcbddadf2aa9f49e4df1f66a new file mode 100644 index 0000000000000000000000000000000000000000..1677ef4808d820976e251bbca3c765f631bc3a2f Binary files /dev/null and b/.cache/pip/http-v2/1/8/e/e/a/18eea207de73c88bb45229bed4bcc74fbcbddadf2aa9f49e4df1f66a differ diff --git a/.cache/pip/http-v2/1/8/e/e/a/18eea207de73c88bb45229bed4bcc74fbcbddadf2aa9f49e4df1f66a.body b/.cache/pip/http-v2/1/8/e/e/a/18eea207de73c88bb45229bed4bcc74fbcbddadf2aa9f49e4df1f66a.body new file mode 100644 index 0000000000000000000000000000000000000000..e5b45bdd68f8e6310663b6782665b50818fff579 --- /dev/null +++ b/.cache/pip/http-v2/1/8/e/e/a/18eea207de73c88bb45229bed4bcc74fbcbddadf2aa9f49e4df1f66a.body @@ -0,0 +1,88 @@ +Metadata-Version: 2.1 +Name: pip +Version: 24.0 +Summary: The PyPA recommended tool for installing Python packages. +Author-email: The pip developers +License: MIT +Project-URL: Homepage, https://pip.pypa.io/ +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Project-URL: Changelog, https://pip.pypa.io/en/stable/news/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.txt +License-File: AUTHORS.txt + +pip - The Python Package Installer +================================== + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + :alt: PyPI + +.. image:: https://img.shields.io/pypi/pyversions/pip + :target: https://pypi.org/project/pip + :alt: PyPI - Python Version + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + :alt: Documentation + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +We release updates regularly, with a new version every 3 months. Find more details in our documentation: + +* `Release notes`_ +* `Release process`_ + +If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Development documentation`_ +* `Development IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installation/ +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Development documentation: https://pip.pypa.io/en/latest/development +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md diff --git a/.cache/pip/http-v2/1/d/8/a/2/1d8a24cdff71edbc3f733b6b4d52640c1c1129289938aa4d1c7adcf5 b/.cache/pip/http-v2/1/d/8/a/2/1d8a24cdff71edbc3f733b6b4d52640c1c1129289938aa4d1c7adcf5 new file mode 100644 index 0000000000000000000000000000000000000000..beace0305cc336d8d7b7492fa69b3748f1b6d0c6 Binary files /dev/null and b/.cache/pip/http-v2/1/d/8/a/2/1d8a24cdff71edbc3f733b6b4d52640c1c1129289938aa4d1c7adcf5 differ diff --git a/.cache/pip/http-v2/1/d/8/a/2/1d8a24cdff71edbc3f733b6b4d52640c1c1129289938aa4d1c7adcf5.body b/.cache/pip/http-v2/1/d/8/a/2/1d8a24cdff71edbc3f733b6b4d52640c1c1129289938aa4d1c7adcf5.body new file mode 100644 index 0000000000000000000000000000000000000000..e8f4f4aab05329449347595b8440d8571e33d02d Binary files /dev/null and b/.cache/pip/http-v2/1/d/8/a/2/1d8a24cdff71edbc3f733b6b4d52640c1c1129289938aa4d1c7adcf5.body differ diff --git a/.cache/pip/http-v2/1/e/2/b/1/1e2b1734fc3c57a4733131da81a6167573fc77057172172ceee83a22 b/.cache/pip/http-v2/1/e/2/b/1/1e2b1734fc3c57a4733131da81a6167573fc77057172172ceee83a22 new file mode 100644 index 0000000000000000000000000000000000000000..f6e987e40804a3c3cc761af2a47987477e3b2dab Binary files /dev/null and b/.cache/pip/http-v2/1/e/2/b/1/1e2b1734fc3c57a4733131da81a6167573fc77057172172ceee83a22 differ diff --git a/.cache/pip/http-v2/2/e/2/d/8/2e2d841f57ca6a43dfc96af93374304dda5b757176df2cce7ca2b18d b/.cache/pip/http-v2/2/e/2/d/8/2e2d841f57ca6a43dfc96af93374304dda5b757176df2cce7ca2b18d new file mode 100644 index 0000000000000000000000000000000000000000..b4b50b954d3a643e883506566b0bca3827122b7a Binary files /dev/null and b/.cache/pip/http-v2/2/e/2/d/8/2e2d841f57ca6a43dfc96af93374304dda5b757176df2cce7ca2b18d differ diff --git a/.cache/pip/http-v2/4/0/2/3/b/4023be7b5b37a7a4144c804ce69828082d4fb2a124d9d8aabc855da8 b/.cache/pip/http-v2/4/0/2/3/b/4023be7b5b37a7a4144c804ce69828082d4fb2a124d9d8aabc855da8 new file mode 100644 index 0000000000000000000000000000000000000000..188c63aad18f029f21a6183439bb17af01ccb26f Binary files /dev/null and b/.cache/pip/http-v2/4/0/2/3/b/4023be7b5b37a7a4144c804ce69828082d4fb2a124d9d8aabc855da8 differ diff --git a/.cache/pip/http-v2/4/5/9/c/7/459c78bacdedb04c7e03d152081522ecf0ff46e1d14e7503997ea6c8 b/.cache/pip/http-v2/4/5/9/c/7/459c78bacdedb04c7e03d152081522ecf0ff46e1d14e7503997ea6c8 new file mode 100644 index 0000000000000000000000000000000000000000..04b4144fad4b56a24e82bc11adedcc5f0f852af8 Binary files /dev/null and b/.cache/pip/http-v2/4/5/9/c/7/459c78bacdedb04c7e03d152081522ecf0ff46e1d14e7503997ea6c8 differ diff --git a/.cache/pip/http-v2/7/1/9/e/0/719e0feaf94b669c315daed4ef76501cc929d482e4315b25fd3b0082.body b/.cache/pip/http-v2/7/1/9/e/0/719e0feaf94b669c315daed4ef76501cc929d482e4315b25fd3b0082.body new file mode 100644 index 0000000000000000000000000000000000000000..79bc6023146c58ed88843c17cdd9fb1772e6a0d2 --- /dev/null +++ b/.cache/pip/http-v2/7/1/9/e/0/719e0feaf94b669c315daed4ef76501cc929d482e4315b25fd3b0082.body @@ -0,0 +1,303 @@ +Metadata-Version: 2.1 +Name: huggingface-hub +Version: 0.25.1 +Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub +Home-page: https://github.com/huggingface/huggingface_hub +Author: Hugging Face, Inc. +Author-email: julien@huggingface.co +License: Apache +Keywords: model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.8.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock +Requires-Dist: fsspec>=2023.5.0 +Requires-Dist: packaging>=20.9 +Requires-Dist: pyyaml>=5.1 +Requires-Dist: requests +Requires-Dist: tqdm>=4.42.1 +Requires-Dist: typing-extensions>=3.7.4.3 +Provides-Extra: all +Requires-Dist: InquirerPy==0.3.4; extra == "all" +Requires-Dist: aiohttp; extra == "all" +Requires-Dist: minijinja>=1.0; extra == "all" +Requires-Dist: jedi; extra == "all" +Requires-Dist: Jinja2; extra == "all" +Requires-Dist: pytest<8.2.2,>=8.1.1; extra == "all" +Requires-Dist: pytest-cov; extra == "all" +Requires-Dist: pytest-env; extra == "all" +Requires-Dist: pytest-xdist; extra == "all" +Requires-Dist: pytest-vcr; extra == "all" +Requires-Dist: pytest-asyncio; extra == "all" +Requires-Dist: pytest-rerunfailures; extra == "all" +Requires-Dist: pytest-mock; extra == "all" +Requires-Dist: urllib3<2.0; extra == "all" +Requires-Dist: soundfile; extra == "all" +Requires-Dist: Pillow; extra == "all" +Requires-Dist: gradio; extra == "all" +Requires-Dist: numpy; extra == "all" +Requires-Dist: fastapi; extra == "all" +Requires-Dist: ruff>=0.5.0; extra == "all" +Requires-Dist: mypy==1.5.1; extra == "all" +Requires-Dist: typing-extensions>=4.8.0; extra == "all" +Requires-Dist: types-PyYAML; extra == "all" +Requires-Dist: types-requests; extra == "all" +Requires-Dist: types-simplejson; extra == "all" +Requires-Dist: types-toml; extra == "all" +Requires-Dist: types-tqdm; extra == "all" +Requires-Dist: types-urllib3; extra == "all" +Provides-Extra: cli +Requires-Dist: InquirerPy==0.3.4; extra == "cli" +Provides-Extra: dev +Requires-Dist: InquirerPy==0.3.4; extra == "dev" +Requires-Dist: aiohttp; extra == "dev" +Requires-Dist: minijinja>=1.0; extra == "dev" +Requires-Dist: jedi; extra == "dev" +Requires-Dist: Jinja2; extra == "dev" +Requires-Dist: pytest<8.2.2,>=8.1.1; extra == "dev" +Requires-Dist: pytest-cov; extra == "dev" +Requires-Dist: pytest-env; extra == "dev" +Requires-Dist: pytest-xdist; extra == "dev" +Requires-Dist: pytest-vcr; extra == "dev" +Requires-Dist: pytest-asyncio; extra == "dev" +Requires-Dist: pytest-rerunfailures; extra == "dev" +Requires-Dist: pytest-mock; extra == "dev" +Requires-Dist: urllib3<2.0; extra == "dev" +Requires-Dist: soundfile; extra == "dev" +Requires-Dist: Pillow; extra == "dev" +Requires-Dist: gradio; extra == "dev" +Requires-Dist: numpy; extra == "dev" +Requires-Dist: fastapi; extra == "dev" +Requires-Dist: ruff>=0.5.0; extra == "dev" +Requires-Dist: mypy==1.5.1; extra == "dev" +Requires-Dist: typing-extensions>=4.8.0; extra == "dev" +Requires-Dist: types-PyYAML; extra == "dev" +Requires-Dist: types-requests; extra == "dev" +Requires-Dist: types-simplejson; extra == "dev" +Requires-Dist: types-toml; extra == "dev" +Requires-Dist: types-tqdm; extra == "dev" +Requires-Dist: types-urllib3; extra == "dev" +Provides-Extra: fastai +Requires-Dist: toml; extra == "fastai" +Requires-Dist: fastai>=2.4; extra == "fastai" +Requires-Dist: fastcore>=1.3.27; extra == "fastai" +Provides-Extra: hf_transfer +Requires-Dist: hf-transfer>=0.1.4; extra == "hf-transfer" +Provides-Extra: inference +Requires-Dist: aiohttp; extra == "inference" +Requires-Dist: minijinja>=1.0; extra == "inference" +Provides-Extra: quality +Requires-Dist: ruff>=0.5.0; extra == "quality" +Requires-Dist: mypy==1.5.1; extra == "quality" +Provides-Extra: tensorflow +Requires-Dist: tensorflow; extra == "tensorflow" +Requires-Dist: pydot; extra == "tensorflow" +Requires-Dist: graphviz; extra == "tensorflow" +Provides-Extra: tensorflow-testing +Requires-Dist: tensorflow; extra == "tensorflow-testing" +Requires-Dist: keras<3.0; extra == "tensorflow-testing" +Provides-Extra: testing +Requires-Dist: InquirerPy==0.3.4; extra == "testing" +Requires-Dist: aiohttp; extra == "testing" +Requires-Dist: minijinja>=1.0; extra == "testing" +Requires-Dist: jedi; extra == "testing" +Requires-Dist: Jinja2; extra == "testing" +Requires-Dist: pytest<8.2.2,>=8.1.1; extra == "testing" +Requires-Dist: pytest-cov; extra == "testing" +Requires-Dist: pytest-env; extra == "testing" +Requires-Dist: pytest-xdist; extra == "testing" +Requires-Dist: pytest-vcr; extra == "testing" +Requires-Dist: pytest-asyncio; extra == "testing" +Requires-Dist: pytest-rerunfailures; extra == "testing" +Requires-Dist: pytest-mock; extra == "testing" +Requires-Dist: urllib3<2.0; extra == "testing" +Requires-Dist: soundfile; extra == "testing" +Requires-Dist: Pillow; extra == "testing" +Requires-Dist: gradio; extra == "testing" +Requires-Dist: numpy; extra == "testing" +Requires-Dist: fastapi; extra == "testing" +Provides-Extra: torch +Requires-Dist: torch; extra == "torch" +Requires-Dist: safetensors[torch]; extra == "torch" +Provides-Extra: typing +Requires-Dist: typing-extensions>=4.8.0; extra == "typing" +Requires-Dist: types-PyYAML; extra == "typing" +Requires-Dist: types-requests; extra == "typing" +Requires-Dist: types-simplejson; extra == "typing" +Requires-Dist: types-toml; extra == "typing" +Requires-Dist: types-tqdm; extra == "typing" +Requires-Dist: types-urllib3; extra == "typing" + +

+
+ huggingface_hub library logo +
+

+ +

+ The official Python client for the Huggingface Hub. +

+ +

+ Documentation + GitHub release + PyPi version + PyPI - Downloads + Code coverage +

+ +

+

+ English | + Deutsch | + हिंदी | + 한국어 | + 中文(简体) +

+

+ +--- + +**Documentation**: https://hf.co/docs/huggingface_hub + +**Source Code**: https://github.com/huggingface/huggingface_hub + +--- + +## Welcome to the huggingface_hub library + +The `huggingface_hub` library allows you to interact with the [Hugging Face Hub](https://huggingface.co/), a platform democratizing open-source Machine Learning for creators and collaborators. Discover pre-trained models and datasets for your projects or play with the thousands of machine learning apps hosted on the Hub. You can also create and share your own models, datasets and demos with the community. The `huggingface_hub` library provides a simple way to do all these things with Python. + +## Key features + +- [Download files](https://huggingface.co/docs/huggingface_hub/en/guides/download) from the Hub. +- [Upload files](https://huggingface.co/docs/huggingface_hub/en/guides/upload) to the Hub. +- [Manage your repositories](https://huggingface.co/docs/huggingface_hub/en/guides/repository). +- [Run Inference](https://huggingface.co/docs/huggingface_hub/en/guides/inference) on deployed models. +- [Search](https://huggingface.co/docs/huggingface_hub/en/guides/search) for models, datasets and Spaces. +- [Share Model Cards](https://huggingface.co/docs/huggingface_hub/en/guides/model-cards) to document your models. +- [Engage with the community](https://huggingface.co/docs/huggingface_hub/en/guides/community) through PRs and comments. + +## Installation + +Install the `huggingface_hub` package with [pip](https://pypi.org/project/huggingface-hub/): + +```bash +pip install huggingface_hub +``` + +If you prefer, you can also install it with [conda](https://huggingface.co/docs/huggingface_hub/en/installation#install-with-conda). + +In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want have a complete experience for Inference, run: + +```bash +pip install huggingface_hub[inference] +``` + +To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation). + +## Quick start + +### Download files + +Download a single file + +```py +from huggingface_hub import hf_hub_download + +hf_hub_download(repo_id="tiiuae/falcon-7b-instruct", filename="config.json") +``` + +Or an entire repository + +```py +from huggingface_hub import snapshot_download + +snapshot_download("stabilityai/stable-diffusion-2-1") +``` + +Files will be downloaded in a local cache folder. More details in [this guide](https://huggingface.co/docs/huggingface_hub/en/guides/manage-cache). + +### Login + +The Hugging Face Hub uses tokens to authenticate applications (see [docs](https://huggingface.co/docs/hub/security-tokens)). To log in your machine, run the following CLI: + +```bash +huggingface-cli login +# or using an environment variable +huggingface-cli login --token $HUGGINGFACE_TOKEN +``` + +### Create a repository + +```py +from huggingface_hub import create_repo + +create_repo(repo_id="super-cool-model") +``` + +### Upload files + +Upload a single file + +```py +from huggingface_hub import upload_file + +upload_file( + path_or_fileobj="/home/lysandre/dummy-test/README.md", + path_in_repo="README.md", + repo_id="lysandre/test-model", +) +``` + +Or an entire folder + +```py +from huggingface_hub import upload_folder + +upload_folder( + folder_path="/path/to/local/space", + repo_id="username/my-cool-space", + repo_type="space", +) +``` + +For details in the [upload guide](https://huggingface.co/docs/huggingface_hub/en/guides/upload). + +## Integrating to the Hub. + +We're partnering with cool open source ML libraries to provide free model hosting and versioning. You can find the existing integrations [here](https://huggingface.co/docs/hub/libraries). + +The advantages are: + +- Free model or dataset hosting for libraries and their users. +- Built-in file versioning, even with very large files, thanks to a git-based approach. +- Serverless inference API for all models publicly available. +- In-browser widgets to play with the uploaded models. +- Anyone can upload a new model for your library, they just need to add the corresponding tag for the model to be discoverable. +- Fast downloads! We use Cloudfront (a CDN) to geo-replicate downloads so they're blazing fast from anywhere on the globe. +- Usage stats and more features to come. + +If you would like to integrate your library, feel free to open an issue to begin the discussion. We wrote a [step-by-step guide](https://huggingface.co/docs/hub/adding-a-library) with ❤️ showing how to do this integration. + +## Contributions (feature requests, bugs, etc.) are super welcome 💙💚💛💜🧡❤️ + +Everyone is welcome to contribute, and we value everybody's contribution. Code is not the only way to help the community. +Answering questions, helping others, reaching out and improving the documentations are immensely valuable to the community. +We wrote a [contribution guide](https://github.com/huggingface/huggingface_hub/blob/main/CONTRIBUTING.md) to summarize +how to get started to contribute to this repository. + + diff --git a/.cache/pip/http-v2/7/1/b/9/d/71b9df22187d5c54f1147d4ac0849d1438ec19aedc20363a3478b854.body b/.cache/pip/http-v2/7/1/b/9/d/71b9df22187d5c54f1147d4ac0849d1438ec19aedc20363a3478b854.body new file mode 100644 index 0000000000000000000000000000000000000000..f242f009c225b7773415bd4c4fd07d6ab8c4588d --- /dev/null +++ b/.cache/pip/http-v2/7/1/b/9/d/71b9df22187d5c54f1147d4ac0849d1438ec19aedc20363a3478b854.body @@ -0,0 +1,62 @@ +Metadata-Version: 2.1 +Name: docker-pycreds +Version: 0.4.0 +Summary: Python bindings for the docker credentials store API +Home-page: https://github.com/shin-/dockerpy-creds +Author: UNKNOWN +Author-email: UNKNOWN +License: Apache License 2.0 +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Topic :: Utilities +Classifier: License :: OSI Approved :: Apache Software License +Requires-Dist: six (>=1.4.0) + +# docker-pycreds + +[![CircleCI](https://circleci.com/gh/shin-/dockerpy-creds/tree/master.svg?style=svg)](https://circleci.com/gh/shin-/dockerpy-creds/tree/master) + +Python bindings for the docker credentials store API + +## Credentials store info + +[Docker documentation page](https://docs.docker.com/engine/reference/commandline/login/#/credentials-store) + +## Requirements + +On top of the dependencies in `requirements.txt`, the `docker-credential` +executable for the platform must be installed on the user's system. + +## API usage + +```python + +import dockerpycreds + +store = dockerpycreds.Store('secretservice') +store.store( + server='https://index.docker.io/v1/', username='johndoe', + secret='hunter2' +) + +print(store.list()) + +print(store.get('https://index.docker.io/v1/')) + + +store.erase('https://index.docker.io/v1/') +``` + + diff --git a/.cache/pip/http-v2/7/2/2/9/f/7229fb50bdca3f16cb03ca953b540cb67fb07fb971b675db32ae3239 b/.cache/pip/http-v2/7/2/2/9/f/7229fb50bdca3f16cb03ca953b540cb67fb07fb971b675db32ae3239 new file mode 100644 index 0000000000000000000000000000000000000000..c4c6124412fe3ff906a4541ec7b34d1401407a22 Binary files /dev/null and b/.cache/pip/http-v2/7/2/2/9/f/7229fb50bdca3f16cb03ca953b540cb67fb07fb971b675db32ae3239 differ diff --git a/.cache/pip/http-v2/7/2/2/9/f/7229fb50bdca3f16cb03ca953b540cb67fb07fb971b675db32ae3239.body b/.cache/pip/http-v2/7/2/2/9/f/7229fb50bdca3f16cb03ca953b540cb67fb07fb971b675db32ae3239.body new file mode 100644 index 0000000000000000000000000000000000000000..986b3b46b7c465f39edfcfefb921430ae9a58082 --- /dev/null +++ b/.cache/pip/http-v2/7/2/2/9/f/7229fb50bdca3f16cb03ca953b540cb67fb07fb971b675db32ae3239.body @@ -0,0 +1,522 @@ +Metadata-Version: 2.1 +Name: xxhash +Version: 3.5.0 +Summary: Python binding for xxHash +Home-page: https://github.com/ifduyue/python-xxhash +Author: Yue Du +Author-email: ifduyue@gmail.com +License: BSD +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE + +python-xxhash +============= + +.. image:: https://github.com/ifduyue/python-xxhash/actions/workflows/test.yml/badge.svg + :target: https://github.com/ifduyue/python-xxhash/actions/workflows/test.yml + :alt: Github Actions Status + +.. image:: https://img.shields.io/pypi/v/xxhash.svg + :target: https://pypi.org/project/xxhash/ + :alt: Latest Version + +.. image:: https://img.shields.io/pypi/pyversions/xxhash.svg + :target: https://pypi.org/project/xxhash/ + :alt: Supported Python versions + +.. image:: https://img.shields.io/pypi/l/xxhash.svg + :target: https://pypi.org/project/xxhash/ + :alt: License + + +.. _HMAC: http://en.wikipedia.org/wiki/Hash-based_message_authentication_code +.. _xxHash: https://github.com/Cyan4973/xxHash +.. _Cyan4973: https://github.com/Cyan4973 + + +xxhash is a Python binding for the xxHash_ library by `Yann Collet`__. + +__ Cyan4973_ + +Installation +------------ + +.. code-block:: bash + + $ pip install xxhash + +You can also install using conda: + +.. code-block:: bash + + $ conda install -c conda-forge python-xxhash + + +Installing From Source +~~~~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: bash + + $ pip install --no-binary xxhash xxhash + +Prerequisites +++++++++++++++ + +On Debian/Ubuntu: + +.. code-block:: bash + + $ apt-get install python-dev gcc + +On CentOS/Fedora: + +.. code-block:: bash + + $ yum install python-devel gcc redhat-rpm-config + +Linking to libxxhash.so +~~~~~~~~~~~~~~~~~~~~~~~~ + +By default python-xxhash will use bundled xxHash, +we can change this by specifying ENV var ``XXHASH_LINK_SO``: + +.. code-block:: bash + + $ XXHASH_LINK_SO=1 pip install --no-binary xxhash xxhash + +Usage +-------- + +Module version and its backend xxHash library version can be retrieved using +the module properties ``VERSION`` AND ``XXHASH_VERSION`` respectively. + +.. code-block:: python + + >>> import xxhash + >>> xxhash.VERSION + '2.0.0' + >>> xxhash.XXHASH_VERSION + '0.8.0' + +This module is hashlib-compliant, which means you can use it in the same way as ``hashlib.md5``. + + | update() -- update the current digest with an additional string + | digest() -- return the current digest value + | hexdigest() -- return the current digest as a string of hexadecimal digits + | intdigest() -- return the current digest as an integer + | copy() -- return a copy of the current xxhash object + | reset() -- reset state + +md5 digest returns bytes, but the original xxh32 and xxh64 C APIs return integers. +While this module is made hashlib-compliant, ``intdigest()`` is also provided to +get the integer digest. + +Constructors for hash algorithms provided by this module are ``xxh32()`` and ``xxh64()``. + +For example, to obtain the digest of the byte string ``b'Nobody inspects the spammish repetition'``: + +.. code-block:: python + + >>> import xxhash + >>> x = xxhash.xxh32() + >>> x.update(b'Nobody inspects') + >>> x.update(b' the spammish repetition') + >>> x.digest() + b'\xe2);/' + >>> x.digest_size + 4 + >>> x.block_size + 16 + +More condensed: + +.. code-block:: python + + >>> xxhash.xxh32(b'Nobody inspects the spammish repetition').hexdigest() + 'e2293b2f' + >>> xxhash.xxh32(b'Nobody inspects the spammish repetition').digest() == x.digest() + True + +An optional seed (default is 0) can be used to alter the result predictably: + +.. code-block:: python + + >>> import xxhash + >>> xxhash.xxh64('xxhash').hexdigest() + '32dd38952c4bc720' + >>> xxhash.xxh64('xxhash', seed=20141025).hexdigest() + 'b559b98d844e0635' + >>> x = xxhash.xxh64(seed=20141025) + >>> x.update('xxhash') + >>> x.hexdigest() + 'b559b98d844e0635' + >>> x.intdigest() + 13067679811253438005 + +Be careful that xxh32 takes an unsigned 32-bit integer as seed, while xxh64 +takes an unsigned 64-bit integer. Although unsigned integer overflow is +defined behavior, it's better not to make it happen: + +.. code-block:: python + + >>> xxhash.xxh32('I want an unsigned 32-bit seed!', seed=0).hexdigest() + 'f7a35af8' + >>> xxhash.xxh32('I want an unsigned 32-bit seed!', seed=2**32).hexdigest() + 'f7a35af8' + >>> xxhash.xxh32('I want an unsigned 32-bit seed!', seed=1).hexdigest() + 'd8d4b4ba' + >>> xxhash.xxh32('I want an unsigned 32-bit seed!', seed=2**32+1).hexdigest() + 'd8d4b4ba' + >>> + >>> xxhash.xxh64('I want an unsigned 64-bit seed!', seed=0).hexdigest() + 'd4cb0a70a2b8c7c1' + >>> xxhash.xxh64('I want an unsigned 64-bit seed!', seed=2**64).hexdigest() + 'd4cb0a70a2b8c7c1' + >>> xxhash.xxh64('I want an unsigned 64-bit seed!', seed=1).hexdigest() + 'ce5087f12470d961' + >>> xxhash.xxh64('I want an unsigned 64-bit seed!', seed=2**64+1).hexdigest() + 'ce5087f12470d961' + + +``digest()`` returns bytes of the **big-endian** representation of the integer +digest: + +.. code-block:: python + + >>> import xxhash + >>> h = xxhash.xxh64() + >>> h.digest() + b'\xefF\xdb7Q\xd8\xe9\x99' + >>> h.intdigest().to_bytes(8, 'big') + b'\xefF\xdb7Q\xd8\xe9\x99' + >>> h.hexdigest() + 'ef46db3751d8e999' + >>> format(h.intdigest(), '016x') + 'ef46db3751d8e999' + >>> h.intdigest() + 17241709254077376921 + >>> int(h.hexdigest(), 16) + 17241709254077376921 + +Besides xxh32/xxh64 mentioned above, oneshot functions are also provided, +so we can avoid allocating XXH32/64 state on heap: + + | xxh32_digest(bytes, seed=0) + | xxh32_intdigest(bytes, seed=0) + | xxh32_hexdigest(bytes, seed=0) + | xxh64_digest(bytes, seed=0) + | xxh64_intdigest(bytes, seed=0) + | xxh64_hexdigest(bytes, seed=0) + +.. code-block:: python + + >>> import xxhash + >>> xxhash.xxh64('a').digest() == xxhash.xxh64_digest('a') + True + >>> xxhash.xxh64('a').intdigest() == xxhash.xxh64_intdigest('a') + True + >>> xxhash.xxh64('a').hexdigest() == xxhash.xxh64_hexdigest('a') + True + >>> xxhash.xxh64_hexdigest('xxhash', seed=20141025) + 'b559b98d844e0635' + >>> xxhash.xxh64_intdigest('xxhash', seed=20141025) + 13067679811253438005L + >>> xxhash.xxh64_digest('xxhash', seed=20141025) + '\xb5Y\xb9\x8d\x84N\x065' + +.. code-block:: python + + In [1]: import xxhash + + In [2]: %timeit xxhash.xxh64_hexdigest('xxhash') + 268 ns ± 24.1 ns per loop (mean ± std. dev. of 7 runs, 1000000 loops each) + + In [3]: %timeit xxhash.xxh64('xxhash').hexdigest() + 416 ns ± 17.3 ns per loop (mean ± std. dev. of 7 runs, 1000000 loops each) + + +XXH3 hashes are available since v2.0.0 (xxHash v0.8.0), they are: + +Streaming classes: + + | xxh3_64 + | xxh3_128 + +Oneshot functions: + + | xxh3_64_digest(bytes, seed=0) + | xxh3_64_intdigest(bytes, seed=0) + | xxh3_64_hexdigest(bytes, seed=0) + | xxh3_128_digest(bytes, seed=0) + | xxh3_128_intdigest(bytes, seed=0) + | xxh3_128_hexdigest(bytes, seed=0) + +And aliases: + + | xxh128 = xxh3_128 + | xxh128_digest = xxh3_128_digest + | xxh128_intdigest = xxh3_128_intdigest + | xxh128_hexdigest = xxh3_128_hexdigest + +Caveats +------- + +SEED OVERFLOW +~~~~~~~~~~~~~~ + +xxh32 takes an unsigned 32-bit integer as seed, and xxh64 takes +an unsigned 64-bit integer as seed. Make sure that the seed is greater than +or equal to ``0``. + +ENDIANNESS +~~~~~~~~~~~ + +As of python-xxhash 0.3.0, ``digest()`` returns bytes of the +**big-endian** representation of the integer digest. It used +to be little-endian. + +DONT USE XXHASH IN HMAC +~~~~~~~~~~~~~~~~~~~~~~~ +Though you can use xxhash as an HMAC_ hash function, but it's +highly recommended not to. + +xxhash is **NOT** a cryptographic hash function, it is a +non-cryptographic hash algorithm aimed at speed and quality. +Do not put xxhash in any position where cryptographic hash +functions are required. + + +Copyright and License +--------------------- + +Copyright (c) 2014-2024 Yue Du - https://github.com/ifduyue + +Licensed under `BSD 2-Clause License `_ + +CHANGELOG +----------- + +v3.5.0 2024-08-17 +~~~~~~~~~~~~~~~~~ + +- Build wheels for Python 3.13 + +v3.4.1 2023-10-05 +~~~~~~~~~~~~~~~~~ + +- Build wheels for Python 3.12 +- Remove setuptools_scm + +v3.4.0 2023-10-05 +~~~~~~~~~~~~~~~~~ + +*Yanked* due to wheels building problem. + +v3.3.0 2023-07-29 +~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.8.2 +- Drop support for Python 3.6 + +v3.2.0 2022-12-28 +~~~~~~~~~~~~~~~~~ + +This is the last version to support Python 3.6 + +- Build Python 3.11 wheels. +- Remove setup.py test_suites, call unittest directly + +v3.1.0 2022-10-19 +~~~~~~~~~~~~~~~~~ + +- Type annotations. +- Enabled muslinux wheels building. + +v3.0.0 2022-02-25 +~~~~~~~~~~~~~~~~~ + +- New set `algorithms_available` lists all implemented algorithms in `xxhash` + package. +- Upgrade xxHash to v0.8.1. +- Drop support for EOL Python versions, require python >= 3.6 from now on. +- Migrate to github actions and build arm64 wheels for macOS. +- Always release GIL. + + +v2.0.2 2021-04-15 +~~~~~~~~~~~~~~~~~ + +- Fix Travis CI OSX dpl python2.7 get-pip.py error + +v2.0.1 2021-04-15 +~~~~~~~~~~~~~~~~~ + +- Only to trigger Python 3.9 wheels building. + +v2.0.0 2020-08-03 +~~~~~~~~~~~~~~~~~ + +- **Require xxHash version >= v0.8.0** +- Upgrade xxHash to v0.8.0 +- XXH3 hashes: `xxh3_64`, `xxh3_128`, and their oneshot functions + +v1.4.4 2020-06-20 +~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.7.3 +- Stop using PEP393 deprecated APIs +- Use XXH(32|64)_canonicalFromHash to replace u2bytes and ull2bytes + +v1.4.3 2019-11-12 +~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.7.2 +- Python 3.8 wheels + +v1.4.2 2019-10-13 +~~~~~~~~~~~~~~~~~ + +- Fixed: setup.py fails when reading README.rst and the default encoding is not UTF-8 + +v1.4.1 2019-08-27 +~~~~~~~~~~~~~~~~~ + +- Fixed: xxh3.h in missing from source tarball + +v1.4.0 2019-08-25 +~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.7.1 + +v1.3.0 2018-10-21 +~~~~~~~~~~~~~~~~~ + +- Wheels are now built automatically +- Split CFFI variant into a separate package `ifduyue/python-xxhash-cffi `_ + +v1.2.0 2018-07-13 +~~~~~~~~~~~~~~~~~ + +- Add oneshot functions xxh{32,64}_{,int,hex}digest + +v1.1.0 2018-07-05 +~~~~~~~~~~~~~~~~~ + +- Allow input larger than 2GB +- Release the GIL on sufficiently large input +- Drop support for Python 3.2 + +v1.0.1 2017-03-02 +~~~~~~~~~~~~~~~~~~ + +- Free state actively, instead of delegating it to ffi.gc + +v1.0.0 2017-02-10 +~~~~~~~~~~~~~~~~~~ + +- Fixed copy() segfault +- Added CFFI variant + +v0.6.3 2017-02-10 +~~~~~~~~~~~~~~~~~~ + +- Fixed copy() segfault + +v0.6.2 2017-02-10 +~~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.6.2 + +v0.6.1 2016-06-26 +~~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.6.1 + +v0.5.0 2016-03-02 +~~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to v0.5.0 + +v0.4.3 2015-08-21 +~~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to r42 + +v0.4.1 2015-08-16 +~~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to r41 + +v0.4.0 2015-08-05 +~~~~~~~~~~~~~~~~~~ + +- Added method reset +- Upgrade xxHash to r40 + +v0.3.2 2015-01-27 +~~~~~~~~~~~~~~~~~~ + +- Fixed some typos in docstrings + +v0.3.1 2015-01-24 +~~~~~~~~~~~~~~~~~~ + +- Upgrade xxHash to r39 + +v0.3.0 2014-11-11 +~~~~~~~~~~~~~~~~~~ + +- Change digest() from little-endian representation to big-endian representation of the integer digest. + This change breaks compatibility (digest() results are different). + +v0.2.0 2014-10-25 +~~~~~~~~~~~~~~~~~~ + +- Make this package hashlib-compliant + +v0.1.3 2014-10-23 +~~~~~~~~~~~~~~~~~~ + +- Update xxHash to r37 + +v0.1.2 2014-10-19 +~~~~~~~~~~~~~~~~~~ + +- Improve: Check XXHnn_init() return value. +- Update xxHash to r36 + +v0.1.1 2014-08-07 +~~~~~~~~~~~~~~~~~~ + +- Improve: Can now be built with Visual C++ Compiler. + +v0.1.0 2014-08-05 +~~~~~~~~~~~~~~~~~~ + +- New: XXH32 and XXH64 type, which support partially update. +- Fix: build under Python 3.4 + +v0.0.2 2014-08-03 +~~~~~~~~~~~~~~~~~~ + +- NEW: Support Python 3 + +v0.0.1 2014-07-30 +~~~~~~~~~~~~~~~~~~ + +- NEW: xxh32 and xxh64 diff --git a/.cache/pip/http-v2/7/3/5/4/e/7354e15b7a2b590d713d4782bd16917fac9db3c087fa80d4d8dc0db5 b/.cache/pip/http-v2/7/3/5/4/e/7354e15b7a2b590d713d4782bd16917fac9db3c087fa80d4d8dc0db5 new file mode 100644 index 0000000000000000000000000000000000000000..1fdd6054fecbe50c342712dd007b5b9588fd9c6b Binary files /dev/null and b/.cache/pip/http-v2/7/3/5/4/e/7354e15b7a2b590d713d4782bd16917fac9db3c087fa80d4d8dc0db5 differ diff --git a/.cache/pip/http-v2/7/3/5/4/e/7354e15b7a2b590d713d4782bd16917fac9db3c087fa80d4d8dc0db5.body b/.cache/pip/http-v2/7/3/5/4/e/7354e15b7a2b590d713d4782bd16917fac9db3c087fa80d4d8dc0db5.body new file mode 100644 index 0000000000000000000000000000000000000000..b693d7fbe074534658f95f66b015b75b785023c9 --- /dev/null +++ b/.cache/pip/http-v2/7/3/5/4/e/7354e15b7a2b590d713d4782bd16917fac9db3c087fa80d4d8dc0db5.body @@ -0,0 +1,212 @@ +Metadata-Version: 2.1 +Name: pytest +Version: 8.3.3 +Summary: pytest: simple powerful testing with Python +Author: Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin, Others (See AUTHORS) +License: MIT +Project-URL: Changelog, https://docs.pytest.org/en/stable/changelog.html +Project-URL: Homepage, https://docs.pytest.org/en/latest/ +Project-URL: Source, https://github.com/pytest-dev/pytest +Project-URL: Tracker, https://github.com/pytest-dev/pytest/issues +Project-URL: Twitter, https://twitter.com/pytestdotorg +Keywords: test,unittest +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: Utilities +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: iniconfig +Requires-Dist: packaging +Requires-Dist: pluggy <2,>=1.5 +Requires-Dist: exceptiongroup >=1.0.0rc8 ; python_version < "3.11" +Requires-Dist: tomli >=1 ; python_version < "3.11" +Requires-Dist: colorama ; sys_platform == "win32" +Provides-Extra: dev +Requires-Dist: argcomplete ; extra == 'dev' +Requires-Dist: attrs >=19.2 ; extra == 'dev' +Requires-Dist: hypothesis >=3.56 ; extra == 'dev' +Requires-Dist: mock ; extra == 'dev' +Requires-Dist: pygments >=2.7.2 ; extra == 'dev' +Requires-Dist: requests ; extra == 'dev' +Requires-Dist: setuptools ; extra == 'dev' +Requires-Dist: xmlschema ; extra == 'dev' + +.. image:: https://github.com/pytest-dev/pytest/raw/main/doc/en/img/pytest_logo_curves.svg + :target: https://docs.pytest.org/en/stable/ + :align: center + :height: 200 + :alt: pytest + + +------ + +.. image:: https://img.shields.io/pypi/v/pytest.svg + :target: https://pypi.org/project/pytest/ + +.. image:: https://img.shields.io/conda/vn/conda-forge/pytest.svg + :target: https://anaconda.org/conda-forge/pytest + +.. image:: https://img.shields.io/pypi/pyversions/pytest.svg + :target: https://pypi.org/project/pytest/ + +.. image:: https://codecov.io/gh/pytest-dev/pytest/branch/main/graph/badge.svg + :target: https://codecov.io/gh/pytest-dev/pytest + :alt: Code coverage Status + +.. image:: https://github.com/pytest-dev/pytest/actions/workflows/test.yml/badge.svg + :target: https://github.com/pytest-dev/pytest/actions?query=workflow%3Atest + +.. image:: https://results.pre-commit.ci/badge/github/pytest-dev/pytest/main.svg + :target: https://results.pre-commit.ci/latest/github/pytest-dev/pytest/main + :alt: pre-commit.ci status + +.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg + :target: https://www.codetriage.com/pytest-dev/pytest + +.. image:: https://readthedocs.org/projects/pytest/badge/?version=latest + :target: https://pytest.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/badge/Discord-pytest--dev-blue + :target: https://discord.com/invite/pytest-dev + :alt: Discord + +.. image:: https://img.shields.io/badge/Libera%20chat-%23pytest-orange + :target: https://web.libera.chat/#pytest + :alt: Libera chat + + +The ``pytest`` framework makes it easy to write small tests, yet +scales to support complex functional testing for applications and libraries. + +An example of a simple test: + +.. code-block:: python + + # content of test_sample.py + def inc(x): + return x + 1 + + + def test_answer(): + assert inc(3) == 5 + + +To execute it:: + + $ pytest + ============================= test session starts ============================= + collected 1 items + + test_sample.py F + + ================================== FAILURES =================================== + _________________________________ test_answer _________________________________ + + def test_answer(): + > assert inc(3) == 5 + E assert 4 == 5 + E + where 4 = inc(3) + + test_sample.py:5: AssertionError + ========================== 1 failed in 0.04 seconds =========================== + + +Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples. + + +Features +-------- + +- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names) + +- `Auto-discovery + `_ + of test modules and functions + +- `Modular fixtures `_ for + managing small or parametrized long-lived test resources + +- Can run `unittest `_ (or trial) + test suites out of the box + +- Python 3.8+ or PyPy3 + +- Rich plugin architecture, with over 1300+ `external plugins `_ and thriving community + + +Documentation +------------- + +For full documentation, including installation, tutorials and PDF documents, please see https://docs.pytest.org/en/stable/. + + +Bugs/Requests +------------- + +Please use the `GitHub issue tracker `_ to submit bugs or request features. + + +Changelog +--------- + +Consult the `Changelog `__ page for fixes and enhancements of each version. + + +Support pytest +-------------- + +`Open Collective`_ is an online funding platform for open and transparent communities. +It provides tools to raise money and share your finances in full transparency. + +It is the platform of choice for individuals and companies that want to make one-time or +monthly donations directly to the project. + +See more details in the `pytest collective`_. + +.. _Open Collective: https://opencollective.com +.. _pytest collective: https://opencollective.com/pytest + + +pytest for enterprise +--------------------- + +Available as part of the Tidelift Subscription. + +The maintainers of pytest and thousands of other packages are working with Tidelift to deliver commercial support and +maintenance for the open source dependencies you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. + +`Learn more. `_ + +Security +^^^^^^^^ + +pytest has never been associated with a security vulnerability, but in any case, to report a +security vulnerability please use the `Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + + +License +------- + +Copyright Holger Krekel and others, 2004. + +Distributed under the terms of the `MIT`_ license, pytest is free and open source software. + +.. _`MIT`: https://github.com/pytest-dev/pytest/blob/main/LICENSE diff --git a/.cache/pip/http-v2/7/7/6/7/2/7767287c95ec4491394e4204f2ba3be9eb9e3ed9ca0ffd45d421b772 b/.cache/pip/http-v2/7/7/6/7/2/7767287c95ec4491394e4204f2ba3be9eb9e3ed9ca0ffd45d421b772 new file mode 100644 index 0000000000000000000000000000000000000000..16632e465483fae9ce90cb959bf7a668924667b0 Binary files /dev/null and b/.cache/pip/http-v2/7/7/6/7/2/7767287c95ec4491394e4204f2ba3be9eb9e3ed9ca0ffd45d421b772 differ diff --git a/.cache/pip/http-v2/7/7/6/7/2/7767287c95ec4491394e4204f2ba3be9eb9e3ed9ca0ffd45d421b772.body b/.cache/pip/http-v2/7/7/6/7/2/7767287c95ec4491394e4204f2ba3be9eb9e3ed9ca0ffd45d421b772.body new file mode 100644 index 0000000000000000000000000000000000000000..38ed885c6d92ad5564a56fa0612b41be6897009e Binary files /dev/null and b/.cache/pip/http-v2/7/7/6/7/2/7767287c95ec4491394e4204f2ba3be9eb9e3ed9ca0ffd45d421b772.body differ diff --git a/.cache/pip/http-v2/7/c/c/b/9/7ccb994e5cb08d1aee93acb83ae5881083c4a349402f61c9f7d304f8 b/.cache/pip/http-v2/7/c/c/b/9/7ccb994e5cb08d1aee93acb83ae5881083c4a349402f61c9f7d304f8 new file mode 100644 index 0000000000000000000000000000000000000000..775353995ec755f8ae79a20c6a41a87606187da9 Binary files /dev/null and b/.cache/pip/http-v2/7/c/c/b/9/7ccb994e5cb08d1aee93acb83ae5881083c4a349402f61c9f7d304f8 differ diff --git a/.cache/pip/http-v2/7/c/c/b/9/7ccb994e5cb08d1aee93acb83ae5881083c4a349402f61c9f7d304f8.body b/.cache/pip/http-v2/7/c/c/b/9/7ccb994e5cb08d1aee93acb83ae5881083c4a349402f61c9f7d304f8.body new file mode 100644 index 0000000000000000000000000000000000000000..f0cb60b4db354adaab867dcf72cd432a765c7a80 Binary files /dev/null and b/.cache/pip/http-v2/7/c/c/b/9/7ccb994e5cb08d1aee93acb83ae5881083c4a349402f61c9f7d304f8.body differ diff --git a/.cache/pip/http-v2/a/1/5/a/3/a15a3a227f3412287f03121d8de35a7e4b8889a19d0448af719d96c1 b/.cache/pip/http-v2/a/1/5/a/3/a15a3a227f3412287f03121d8de35a7e4b8889a19d0448af719d96c1 new file mode 100644 index 0000000000000000000000000000000000000000..276a223f72132467c9809df9c648839bc2e10344 Binary files /dev/null and b/.cache/pip/http-v2/a/1/5/a/3/a15a3a227f3412287f03121d8de35a7e4b8889a19d0448af719d96c1 differ diff --git a/.cache/pip/http-v2/a/1/5/a/3/a15a3a227f3412287f03121d8de35a7e4b8889a19d0448af719d96c1.body b/.cache/pip/http-v2/a/1/5/a/3/a15a3a227f3412287f03121d8de35a7e4b8889a19d0448af719d96c1.body new file mode 100644 index 0000000000000000000000000000000000000000..7b6a4a953c6ca27715f705685c2f8c9fbb00ea03 Binary files /dev/null and b/.cache/pip/http-v2/a/1/5/a/3/a15a3a227f3412287f03121d8de35a7e4b8889a19d0448af719d96c1.body differ diff --git a/.cache/pip/http-v2/a/1/9/5/3/a19537d3cf37c122db841d6fe4cd322bc10d1a558bb00d146b85cb9a.body b/.cache/pip/http-v2/a/1/9/5/3/a19537d3cf37c122db841d6fe4cd322bc10d1a558bb00d146b85cb9a.body new file mode 100644 index 0000000000000000000000000000000000000000..0f7a2056e909d98daa6cf0afb9c8fdc41e825c74 Binary files /dev/null and b/.cache/pip/http-v2/a/1/9/5/3/a19537d3cf37c122db841d6fe4cd322bc10d1a558bb00d146b85cb9a.body differ diff --git a/.cache/pip/http-v2/a/6/6/7/4/a6674e44f8dbb270324765d1fb568b86858877aed299a2428f81e802.body b/.cache/pip/http-v2/a/6/6/7/4/a6674e44f8dbb270324765d1fb568b86858877aed299a2428f81e802.body new file mode 100644 index 0000000000000000000000000000000000000000..dc57539c41851ef838541e586f029dc345374abb Binary files /dev/null and b/.cache/pip/http-v2/a/6/6/7/4/a6674e44f8dbb270324765d1fb568b86858877aed299a2428f81e802.body differ diff --git a/.cache/pip/http-v2/a/8/1/3/d/a813d38208b26d1643cfecf26bd5ddeb869c95933e4bf304b8f6f1bb.body b/.cache/pip/http-v2/a/8/1/3/d/a813d38208b26d1643cfecf26bd5ddeb869c95933e4bf304b8f6f1bb.body new file mode 100644 index 0000000000000000000000000000000000000000..344345f4c22cf91766ee26bc3bf91d625e7330f7 --- /dev/null +++ b/.cache/pip/http-v2/a/8/1/3/d/a813d38208b26d1643cfecf26bd5ddeb869c95933e4bf304b8f6f1bb.body @@ -0,0 +1,1573 @@ +Metadata-Version: 2.1 +Name: pandas +Version: 2.2.3 +Summary: Powerful data structures for data analysis, time series, and statistics +Home-page: https://pandas.pydata.org +Author-Email: The Pandas Development Team +License: BSD 3-Clause License + + Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team + All rights reserved. + + Copyright (c) 2011-2023, Open source contributors. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Copyright (c) 2010-2019 Keith Goodman + Copyright (c) 2019 Bottleneck Developers + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE.Copyright 2017- Paul Ganssle + Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + The above license applies to all contributions after 2017-12-01, as well as + all contributions that have been re-licensed (see AUTHORS file for the list of + contributors who have re-licensed their code). + -------------------------------------------------------------------------------- + dateutil - Extensions to the standard Python datetime module. + + Copyright (c) 2003-2011 - Gustavo Niemeyer + Copyright (c) 2012-2014 - Tomi Pieviläinen + Copyright (c) 2014-2016 - Yaron de Leeuw + Copyright (c) 2015- - Paul Ganssle + Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + The above BSD License Applies to all code, even that also covered by Apache 2.0.# MIT License + + Copyright (c) 2019 Hadley Wickham; RStudio; and Evan Miller + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + Based on http://opensource.org/licenses/MIT + + This is a template. Complete and ship as file LICENSE the following 2 + lines (only) + + YEAR: + COPYRIGHT HOLDER: + + and specify as + + License: MIT + file LICENSE + + Copyright (c) , + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + The MIT License + + Copyright (c) 2008- Attractive Chaos + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE.musl as a whole is licensed under the following standard MIT license: + + ---------------------------------------------------------------------- + Copyright © 2005-2020 Rich Felker, et al. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + ---------------------------------------------------------------------- + + Authors/contributors include: + + A. Wilcox + Ada Worcester + Alex Dowad + Alex Suykov + Alexander Monakov + Andre McCurdy + Andrew Kelley + Anthony G. Basile + Aric Belsito + Arvid Picciani + Bartosz Brachaczek + Benjamin Peterson + Bobby Bingham + Boris Brezillon + Brent Cook + Chris Spiegel + Clément Vasseur + Daniel Micay + Daniel Sabogal + Daurnimator + David Carlier + David Edelsohn + Denys Vlasenko + Dmitry Ivanov + Dmitry V. Levin + Drew DeVault + Emil Renner Berthing + Fangrui Song + Felix Fietkau + Felix Janda + Gianluca Anzolin + Hauke Mehrtens + He X + Hiltjo Posthuma + Isaac Dunham + Jaydeep Patil + Jens Gustedt + Jeremy Huntwork + Jo-Philipp Wich + Joakim Sindholt + John Spencer + Julien Ramseier + Justin Cormack + Kaarle Ritvanen + Khem Raj + Kylie McClain + Leah Neukirchen + Luca Barbato + Luka Perkov + M Farkas-Dyck (Strake) + Mahesh Bodapati + Markus Wichmann + Masanori Ogino + Michael Clark + Michael Forney + Mikhail Kremnyov + Natanael Copa + Nicholas J. Kain + orc + Pascal Cuoq + Patrick Oppenlander + Petr Hosek + Petr Skocik + Pierre Carrier + Reini Urban + Rich Felker + Richard Pennington + Ryan Fairfax + Samuel Holland + Segev Finer + Shiz + sin + Solar Designer + Stefan Kristiansson + Stefan O'Rear + Szabolcs Nagy + Timo Teräs + Trutz Behn + Valentin Ochs + Will Dietz + William Haddon + William Pitcock + + Portions of this software are derived from third-party works licensed + under terms compatible with the above MIT license: + + The TRE regular expression implementation (src/regex/reg* and + src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed + under a 2-clause BSD license (license text in the source files). The + included version has been heavily modified by Rich Felker in 2012, in + the interests of size, simplicity, and namespace cleanliness. + + Much of the math library code (src/math/* and src/complex/*) is + Copyright © 1993,2004 Sun Microsystems or + Copyright © 2003-2011 David Schultz or + Copyright © 2003-2009 Steven G. Kargl or + Copyright © 2003-2009 Bruce D. Evans or + Copyright © 2008 Stephen L. Moshier or + Copyright © 2017-2018 Arm Limited + and labelled as such in comments in the individual source files. All + have been licensed under extremely permissive terms. + + The ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008 + The Android Open Source Project and is licensed under a two-clause BSD + license. It was taken from Bionic libc, used on Android. + + The AArch64 memcpy and memset code (src/string/aarch64/*) are + Copyright © 1999-2019, Arm Limited. + + The implementation of DES for crypt (src/crypt/crypt_des.c) is + Copyright © 1994 David Burren. It is licensed under a BSD license. + + The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was + originally written by Solar Designer and placed into the public + domain. The code also comes with a fallback permissive license for use + in jurisdictions that may not recognize the public domain. + + The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011 + Valentin Ochs and is licensed under an MIT-style license. + + The x86_64 port was written by Nicholas J. Kain and is licensed under + the standard MIT terms. + + The mips and microblaze ports were originally written by Richard + Pennington for use in the ellcc project. The original code was adapted + by Rich Felker for build system and code conventions during upstream + integration. It is licensed under the standard MIT terms. + + The mips64 port was contributed by Imagination Technologies and is + licensed under the standard MIT terms. + + The powerpc port was also originally written by Richard Pennington, + and later supplemented and integrated by John Spencer. It is licensed + under the standard MIT terms. + + All other files which have no copyright comments are original works + produced specifically for use as part of this library, written either + by Rich Felker, the main author of the library, or by one or more + contibutors listed above. Details on authorship of individual files + can be found in the git version control history of the project. The + omission of copyright and license comments in each file is in the + interest of source tree size. + + In addition, permission is hereby granted for all public header files + (include/* and arch/*/bits/*) and crt files intended to be linked into + applications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit + the copyright notice and permission notice otherwise required by the + license, and to use these files without any requirement of + attribution. These files include substantial contributions from: + + Bobby Bingham + John Spencer + Nicholas J. Kain + Rich Felker + Richard Pennington + Stefan Kristiansson + Szabolcs Nagy + + all of whom have explicitly granted such permission. + + This file previously contained text expressing a belief that most of + the files covered by the above exception were sufficiently trivial not + to be subject to copyright, resulting in confusion over whether it + negated the permissions granted in the license. In the spirit of + permissive licensing, and of not having licensing issues being an + obstacle to adoption, that text has been removed.Copyright (c) 2005-2023, NumPy Developers. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the NumPy Developers nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + + Copyright (c) Donald Stufft and individual contributors. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.A. HISTORY OF THE SOFTWARE + ========================== + + Python was created in the early 1990s by Guido van Rossum at Stichting + Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands + as a successor of a language called ABC. Guido remains Python's + principal author, although it includes many contributions from others. + + In 1995, Guido continued his work on Python at the Corporation for + National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) + in Reston, Virginia where he released several versions of the + software. + + In May 2000, Guido and the Python core development team moved to + BeOpen.com to form the BeOpen PythonLabs team. In October of the same + year, the PythonLabs team moved to Digital Creations, which became + Zope Corporation. In 2001, the Python Software Foundation (PSF, see + https://www.python.org/psf/) was formed, a non-profit organization + created specifically to own Python-related Intellectual Property. + Zope Corporation was a sponsoring member of the PSF. + + All Python releases are Open Source (see https://opensource.org for + the Open Source Definition). Historically, most, but not all, Python + releases have also been GPL-compatible; the table below summarizes + the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + + Footnotes: + + (1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + + (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + + Thanks to the many outside volunteers who have worked under Guido's + direction to make these releases possible. + + + B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON + =============================================================== + + Python software and documentation are licensed under the + Python Software Foundation License Version 2. + + Starting with Python 3.8.6, examples, recipes, and other code in + the documentation are dual licensed under the PSF License Version 2 + and the Zero-Clause BSD license. + + Some software incorporated into Python is under different licenses. + The licenses are listed with code falling under that license. + + + PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + -------------------------------------------- + + 1. This LICENSE AGREEMENT is between the Python Software Foundation + ("PSF"), and the Individual or Organization ("Licensee") accessing and + otherwise using this software ("Python") in source or binary form and + its associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, PSF hereby + grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, + analyze, test, perform and/or display publicly, prepare derivative works, + distribute, and otherwise use Python alone or in any derivative version, + provided, however, that PSF's License Agreement and PSF's notice of copyright, + i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, + 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; + All Rights Reserved" are retained in Python alone or in any derivative version + prepared by Licensee. + + 3. In the event Licensee prepares a derivative work that is based on + or incorporates Python or any part thereof, and wants to make + the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to Python. + + 4. PSF is making Python available to Licensee on an "AS IS" + basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS + A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, + OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between PSF and + Licensee. This License Agreement does not grant permission to use PSF + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using Python, Licensee + agrees to be bound by the terms and conditions of this License + Agreement. + + + BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 + ------------------------------------------- + + BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + + 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an + office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the + Individual or Organization ("Licensee") accessing and otherwise using + this software in source or binary form and its associated + documentation ("the Software"). + + 2. Subject to the terms and conditions of this BeOpen Python License + Agreement, BeOpen hereby grants Licensee a non-exclusive, + royalty-free, world-wide license to reproduce, analyze, test, perform + and/or display publicly, prepare derivative works, distribute, and + otherwise use the Software alone or in any derivative version, + provided, however, that the BeOpen Python License is retained in the + Software, alone or in any derivative version prepared by Licensee. + + 3. BeOpen is making the Software available to Licensee on an "AS IS" + basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE + SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS + AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY + DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 5. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 6. This License Agreement shall be governed by and interpreted in all + respects by the law of the State of California, excluding conflict of + law provisions. Nothing in this License Agreement shall be deemed to + create any relationship of agency, partnership, or joint venture + between BeOpen and Licensee. This License Agreement does not grant + permission to use BeOpen trademarks or trade names in a trademark + sense to endorse or promote products or services of Licensee, or any + third party. As an exception, the "BeOpen Python" logos available at + http://www.pythonlabs.com/logos.html may be used according to the + permissions granted on that web page. + + 7. By copying, installing or otherwise using the software, Licensee + agrees to be bound by the terms and conditions of this License + Agreement. + + + CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 + --------------------------------------- + + 1. This LICENSE AGREEMENT is between the Corporation for National + Research Initiatives, having an office at 1895 Preston White Drive, + Reston, VA 20191 ("CNRI"), and the Individual or Organization + ("Licensee") accessing and otherwise using Python 1.6.1 software in + source or binary form and its associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, CNRI + hereby grants Licensee a nonexclusive, royalty-free, world-wide + license to reproduce, analyze, test, perform and/or display publicly, + prepare derivative works, distribute, and otherwise use Python 1.6.1 + alone or in any derivative version, provided, however, that CNRI's + License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) + 1995-2001 Corporation for National Research Initiatives; All Rights + Reserved" are retained in Python 1.6.1 alone or in any derivative + version prepared by Licensee. Alternately, in lieu of CNRI's License + Agreement, Licensee may substitute the following text (omitting the + quotes): "Python 1.6.1 is made available subject to the terms and + conditions in CNRI's License Agreement. This Agreement together with + Python 1.6.1 may be located on the internet using the following + unique, persistent identifier (known as a handle): 1895.22/1013. This + Agreement may also be obtained from a proxy server on the internet + using the following URL: http://hdl.handle.net/1895.22/1013". + + 3. In the event Licensee prepares a derivative work that is based on + or incorporates Python 1.6.1 or any part thereof, and wants to make + the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to Python 1.6.1. + + 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" + basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON + 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS + A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, + OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. This License Agreement shall be governed by the federal + intellectual property law of the United States, including without + limitation the federal copyright law, and, to the extent such + U.S. federal law does not apply, by the law of the Commonwealth of + Virginia, excluding Virginia's conflict of law provisions. + Notwithstanding the foregoing, with regard to derivative works based + on Python 1.6.1 that incorporate non-separable material that was + previously distributed under the GNU General Public License (GPL), the + law of the Commonwealth of Virginia shall govern this License + Agreement only as to issues arising under or with respect to + Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this + License Agreement shall be deemed to create any relationship of + agency, partnership, or joint venture between CNRI and Licensee. This + License Agreement does not grant permission to use CNRI trademarks or + trade name in a trademark sense to endorse or promote products or + services of Licensee, or any third party. + + 8. By clicking on the "ACCEPT" button where indicated, or by copying, + installing or otherwise using Python 1.6.1, Licensee agrees to be + bound by the terms and conditions of this License Agreement. + + ACCEPT + + + CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 + -------------------------------------------------- + + Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, + The Netherlands. All rights reserved. + + Permission to use, copy, modify, and distribute this software and its + documentation for any purpose and without fee is hereby granted, + provided that the above copyright notice appear in all copies and that + both that copyright notice and this permission notice appear in + supporting documentation, and that the name of Stichting Mathematisch + Centrum or CWI not be used in advertising or publicity pertaining to + distribution of the software without specific, written prior + permission. + + STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO + THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND + FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE + FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT + OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION + ---------------------------------------------------------------------- + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + Copyright (c) 2014, Al Sweigart + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.Copyright (c) 2017 Anthony Sottile + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE.Copyright (c) 2015-2019 Jared Hobbs + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + of the Software, and to permit persons to whom the Software is furnished to do + so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE.Developed by ESN, an Electronic Arts Inc. studio. + Copyright (c) 2014, Electronic Arts Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of ESN, Electronic Arts Inc. nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS INC. BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + ---- + + Portions of code from MODP_ASCII - Ascii transformations (upper/lower, etc) + https://github.com/client9/stringencoders + + Copyright 2005, 2006, 2007 + Nick Galbreath -- nickg [at] modp [dot] com + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + Neither the name of the modp.com nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + This is the standard "new" BSD license: + http://www.opensource.org/licenses/bsd-license.php + + https://github.com/client9/stringencoders/blob/cfd5c1507325ae497ea9bacdacba12c0ffd79d30/COPYING + + ---- + + Numeric decoder derived from from TCL library + https://opensource.apple.com/source/tcl/tcl-14/tcl/license.terms + * Copyright (c) 1988-1993 The Regents of the University of California. + * Copyright (c) 1994 Sun Microsystems, Inc. + + This software is copyrighted by the Regents of the University of + California, Sun Microsystems, Inc., Scriptics Corporation, ActiveState + Corporation and other parties. The following terms apply to all files + associated with the software unless explicitly disclaimed in + individual files. + + The authors hereby grant permission to use, copy, modify, distribute, + and license this software and its documentation for any purpose, provided + that existing copyright notices are retained in all copies and that this + notice is included verbatim in any distributions. No written agreement, + license, or royalty fee is required for any of the authorized uses. + Modifications to this software may be copyrighted by their authors + and need not follow the licensing terms described here, provided that + the new terms are clearly indicated on the first page of each file where + they apply. + + IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY + FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY + DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES, + INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE + IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE + NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR + MODIFICATIONS. + + GOVERNMENT USE: If you are acquiring this software on behalf of the + U.S. government, the Government shall have only "Restricted Rights" + in the software and related documentation as defined in the Federal + Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2). If you + are acquiring the software on behalf of the Department of Defense, the + software shall be classified as "Commercial Computer Software" and the + Government shall have only "Restricted Rights" as defined in Clause + 252.227-7013 (c) (1) of DFARs. Notwithstanding the foregoing, the + authors grant the U.S. Government and others acting in its behalf + permission to use and distribute the software in accordance with the + terms specified in this license.Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, and + distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by the copyright + owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all other entities + that control, are controlled by, or are under common control with that entity. + For the purposes of this definition, "control" means (i) the power, direct or + indirect, to cause the direction or management of such entity, whether by + contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity exercising + permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, including + but not limited to software source code, documentation source, and configuration + files. + + "Object" form shall mean any form resulting from mechanical transformation or + translation of a Source form, including but not limited to compiled object code, + generated documentation, and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or Object form, made + available under the License, as indicated by a copyright notice that is included + in or attached to the work (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object form, that + is based on (or derived from) the Work and for which the editorial revisions, + annotations, elaborations, or other modifications represent, as a whole, an + original work of authorship. For the purposes of this License, Derivative Works + shall not include works that remain separable from, or merely link (or bind by + name) to the interfaces of, the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including the original version + of the Work and any modifications or additions to that Work or Derivative Works + thereof, that is intentionally submitted to Licensor for inclusion in the Work + by the copyright owner or by an individual or Legal Entity authorized to submit + on behalf of the copyright owner. For the purposes of this definition, + "submitted" means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, and + issue tracking systems that are managed by, or on behalf of, the Licensor for + the purpose of discussing and improving the Work, but excluding communication + that is conspicuously marked or otherwise designated in writing by the copyright + owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity on behalf + of whom a Contribution has been received by Licensor and subsequently + incorporated within the Work. + + 2. Grant of Copyright License. + + Subject to the terms and conditions of this License, each Contributor hereby + grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, + irrevocable copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the Work and such + Derivative Works in Source or Object form. + + 3. Grant of Patent License. + + Subject to the terms and conditions of this License, each Contributor hereby + grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, + irrevocable (except as stated in this section) patent license to make, have + made, use, offer to sell, sell, import, and otherwise transfer the Work, where + such license applies only to those patent claims licensable by such Contributor + that are necessarily infringed by their Contribution(s) alone or by combination + of their Contribution(s) with the Work to which such Contribution(s) was + submitted. If You institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a + Contribution incorporated within the Work constitutes direct or contributory + patent infringement, then any patent licenses granted to You under this License + for that Work shall terminate as of the date such litigation is filed. + + 4. Redistribution. + + You may reproduce and distribute copies of the Work or Derivative Works thereof + in any medium, with or without modifications, and in Source or Object form, + provided that You meet the following conditions: + + You must give any other recipients of the Work or Derivative Works a copy of + this License; and + You must cause any modified files to carry prominent notices stating that You + changed the files; and + You must retain, in the Source form of any Derivative Works that You distribute, + all copyright, patent, trademark, and attribution notices from the Source form + of the Work, excluding those notices that do not pertain to any part of the + Derivative Works; and + If the Work includes a "NOTICE" text file as part of its distribution, then any + Derivative Works that You distribute must include a readable copy of the + attribution notices contained within such NOTICE file, excluding those notices + that do not pertain to any part of the Derivative Works, in at least one of the + following places: within a NOTICE text file distributed as part of the + Derivative Works; within the Source form or documentation, if provided along + with the Derivative Works; or, within a display generated by the Derivative + Works, if and wherever such third-party notices normally appear. The contents of + the NOTICE file are for informational purposes only and do not modify the + License. You may add Your own attribution notices within Derivative Works that + You distribute, alongside or as an addendum to the NOTICE text from the Work, + provided that such additional attribution notices cannot be construed as + modifying the License. + You may add Your own copyright statement to Your modifications and may provide + additional or different license terms and conditions for use, reproduction, or + distribution of Your modifications, or for any such Derivative Works as a whole, + provided Your use, reproduction, and distribution of the Work otherwise complies + with the conditions stated in this License. + + 5. Submission of Contributions. + + Unless You explicitly state otherwise, any Contribution intentionally submitted + for inclusion in the Work by You to the Licensor shall be under the terms and + conditions of this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify the terms of + any separate license agreement you may have executed with Licensor regarding + such Contributions. + + 6. Trademarks. + + This License does not grant permission to use the trade names, trademarks, + service marks, or product names of the Licensor, except as required for + reasonable and customary use in describing the origin of the Work and + reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. + + Unless required by applicable law or agreed to in writing, Licensor provides the + Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, + including, without limitation, any warranties or conditions of TITLE, + NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are + solely responsible for determining the appropriateness of using or + redistributing the Work and assume any risks associated with Your exercise of + permissions under this License. + + 8. Limitation of Liability. + + In no event and under no legal theory, whether in tort (including negligence), + contract, or otherwise, unless required by applicable law (such as deliberate + and grossly negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, incidental, + or consequential damages of any character arising as a result of this License or + out of the use or inability to use the Work (including but not limited to + damages for loss of goodwill, work stoppage, computer failure or malfunction, or + any and all other commercial damages or losses), even if such Contributor has + been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. + + While redistributing the Work or Derivative Works thereof, You may choose to + offer, and charge a fee for, acceptance of support, warranty, indemnity, or + other liability obligations and/or rights consistent with this License. However, + in accepting such obligations, You may act only on Your own behalf and on Your + sole responsibility, not on behalf of any other Contributor, and only if You + agree to indemnify, defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason of your + accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work + + To apply the Apache License to your work, attach the following boilerplate + notice, with the fields enclosed by brackets "[]" replaced with your own + identifying information. (Don't include the brackets!) The text should be + enclosed in the appropriate comment syntax for the file format. We also + recommend that a file or class name and description of purpose be included on + the same "printed page" as the copyright notice for easier identification within + third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Scientific/Engineering +Project-URL: Homepage, https://pandas.pydata.org +Project-URL: Documentation, https://pandas.pydata.org/docs/ +Project-URL: Repository, https://github.com/pandas-dev/pandas +Requires-Python: >=3.9 +Requires-Dist: numpy>=1.22.4; python_version < "3.11" +Requires-Dist: numpy>=1.23.2; python_version == "3.11" +Requires-Dist: numpy>=1.26.0; python_version >= "3.12" +Requires-Dist: python-dateutil>=2.8.2 +Requires-Dist: pytz>=2020.1 +Requires-Dist: tzdata>=2022.7 +Requires-Dist: hypothesis>=6.46.1; extra == "test" +Requires-Dist: pytest>=7.3.2; extra == "test" +Requires-Dist: pytest-xdist>=2.2.0; extra == "test" +Requires-Dist: pyarrow>=10.0.1; extra == "pyarrow" +Requires-Dist: bottleneck>=1.3.6; extra == "performance" +Requires-Dist: numba>=0.56.4; extra == "performance" +Requires-Dist: numexpr>=2.8.4; extra == "performance" +Requires-Dist: scipy>=1.10.0; extra == "computation" +Requires-Dist: xarray>=2022.12.0; extra == "computation" +Requires-Dist: fsspec>=2022.11.0; extra == "fss" +Requires-Dist: s3fs>=2022.11.0; extra == "aws" +Requires-Dist: gcsfs>=2022.11.0; extra == "gcp" +Requires-Dist: pandas-gbq>=0.19.0; extra == "gcp" +Requires-Dist: odfpy>=1.4.1; extra == "excel" +Requires-Dist: openpyxl>=3.1.0; extra == "excel" +Requires-Dist: python-calamine>=0.1.7; extra == "excel" +Requires-Dist: pyxlsb>=1.0.10; extra == "excel" +Requires-Dist: xlrd>=2.0.1; extra == "excel" +Requires-Dist: xlsxwriter>=3.0.5; extra == "excel" +Requires-Dist: pyarrow>=10.0.1; extra == "parquet" +Requires-Dist: pyarrow>=10.0.1; extra == "feather" +Requires-Dist: tables>=3.8.0; extra == "hdf5" +Requires-Dist: pyreadstat>=1.2.0; extra == "spss" +Requires-Dist: SQLAlchemy>=2.0.0; extra == "postgresql" +Requires-Dist: psycopg2>=2.9.6; extra == "postgresql" +Requires-Dist: adbc-driver-postgresql>=0.8.0; extra == "postgresql" +Requires-Dist: SQLAlchemy>=2.0.0; extra == "mysql" +Requires-Dist: pymysql>=1.0.2; extra == "mysql" +Requires-Dist: SQLAlchemy>=2.0.0; extra == "sql-other" +Requires-Dist: adbc-driver-postgresql>=0.8.0; extra == "sql-other" +Requires-Dist: adbc-driver-sqlite>=0.8.0; extra == "sql-other" +Requires-Dist: beautifulsoup4>=4.11.2; extra == "html" +Requires-Dist: html5lib>=1.1; extra == "html" +Requires-Dist: lxml>=4.9.2; extra == "html" +Requires-Dist: lxml>=4.9.2; extra == "xml" +Requires-Dist: matplotlib>=3.6.3; extra == "plot" +Requires-Dist: jinja2>=3.1.2; extra == "output-formatting" +Requires-Dist: tabulate>=0.9.0; extra == "output-formatting" +Requires-Dist: PyQt5>=5.15.9; extra == "clipboard" +Requires-Dist: qtpy>=2.3.0; extra == "clipboard" +Requires-Dist: zstandard>=0.19.0; extra == "compression" +Requires-Dist: dataframe-api-compat>=0.1.7; extra == "consortium-standard" +Requires-Dist: adbc-driver-postgresql>=0.8.0; extra == "all" +Requires-Dist: adbc-driver-sqlite>=0.8.0; extra == "all" +Requires-Dist: beautifulsoup4>=4.11.2; extra == "all" +Requires-Dist: bottleneck>=1.3.6; extra == "all" +Requires-Dist: dataframe-api-compat>=0.1.7; extra == "all" +Requires-Dist: fastparquet>=2022.12.0; extra == "all" +Requires-Dist: fsspec>=2022.11.0; extra == "all" +Requires-Dist: gcsfs>=2022.11.0; extra == "all" +Requires-Dist: html5lib>=1.1; extra == "all" +Requires-Dist: hypothesis>=6.46.1; extra == "all" +Requires-Dist: jinja2>=3.1.2; extra == "all" +Requires-Dist: lxml>=4.9.2; extra == "all" +Requires-Dist: matplotlib>=3.6.3; extra == "all" +Requires-Dist: numba>=0.56.4; extra == "all" +Requires-Dist: numexpr>=2.8.4; extra == "all" +Requires-Dist: odfpy>=1.4.1; extra == "all" +Requires-Dist: openpyxl>=3.1.0; extra == "all" +Requires-Dist: pandas-gbq>=0.19.0; extra == "all" +Requires-Dist: psycopg2>=2.9.6; extra == "all" +Requires-Dist: pyarrow>=10.0.1; extra == "all" +Requires-Dist: pymysql>=1.0.2; extra == "all" +Requires-Dist: PyQt5>=5.15.9; extra == "all" +Requires-Dist: pyreadstat>=1.2.0; extra == "all" +Requires-Dist: pytest>=7.3.2; extra == "all" +Requires-Dist: pytest-xdist>=2.2.0; extra == "all" +Requires-Dist: python-calamine>=0.1.7; extra == "all" +Requires-Dist: pyxlsb>=1.0.10; extra == "all" +Requires-Dist: qtpy>=2.3.0; extra == "all" +Requires-Dist: scipy>=1.10.0; extra == "all" +Requires-Dist: s3fs>=2022.11.0; extra == "all" +Requires-Dist: SQLAlchemy>=2.0.0; extra == "all" +Requires-Dist: tables>=3.8.0; extra == "all" +Requires-Dist: tabulate>=0.9.0; extra == "all" +Requires-Dist: xarray>=2022.12.0; extra == "all" +Requires-Dist: xlrd>=2.0.1; extra == "all" +Requires-Dist: xlsxwriter>=3.0.5; extra == "all" +Requires-Dist: zstandard>=0.19.0; extra == "all" +Provides-Extra: test +Provides-Extra: pyarrow +Provides-Extra: performance +Provides-Extra: computation +Provides-Extra: fss +Provides-Extra: aws +Provides-Extra: gcp +Provides-Extra: excel +Provides-Extra: parquet +Provides-Extra: feather +Provides-Extra: hdf5 +Provides-Extra: spss +Provides-Extra: postgresql +Provides-Extra: mysql +Provides-Extra: sql-other +Provides-Extra: html +Provides-Extra: xml +Provides-Extra: plot +Provides-Extra: output-formatting +Provides-Extra: clipboard +Provides-Extra: compression +Provides-Extra: consortium-standard +Provides-Extra: all +Description-Content-Type: text/markdown + +
+
+
+ +----------------- + +# pandas: powerful Python data analysis toolkit + +| | | +| --- | --- | +| Testing | [![CI - Test](https://github.com/pandas-dev/pandas/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/pandas-dev/pandas/actions/workflows/unit-tests.yml) [![Coverage](https://codecov.io/github/pandas-dev/pandas/coverage.svg?branch=main)](https://codecov.io/gh/pandas-dev/pandas) | +| Package | [![PyPI Latest Release](https://img.shields.io/pypi/v/pandas.svg)](https://pypi.org/project/pandas/) [![PyPI Downloads](https://img.shields.io/pypi/dm/pandas.svg?label=PyPI%20downloads)](https://pypi.org/project/pandas/) [![Conda Latest Release](https://anaconda.org/conda-forge/pandas/badges/version.svg)](https://anaconda.org/conda-forge/pandas) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pandas.svg?label=Conda%20downloads)](https://anaconda.org/conda-forge/pandas) | +| Meta | [![Powered by NumFOCUS](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3509134.svg)](https://doi.org/10.5281/zenodo.3509134) [![License - BSD 3-Clause](https://img.shields.io/pypi/l/pandas.svg)](https://github.com/pandas-dev/pandas/blob/main/LICENSE) [![Slack](https://img.shields.io/badge/join_Slack-information-brightgreen.svg?logo=slack)](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack) | + + +## What is it? + +**pandas** is a Python package that provides fast, flexible, and expressive data +structures designed to make working with "relational" or "labeled" data both +easy and intuitive. It aims to be the fundamental high-level building block for +doing practical, **real world** data analysis in Python. Additionally, it has +the broader goal of becoming **the most powerful and flexible open source data +analysis / manipulation tool available in any language**. It is already well on +its way towards this goal. + +## Table of Contents + +- [Main Features](#main-features) +- [Where to get it](#where-to-get-it) +- [Dependencies](#dependencies) +- [Installation from sources](#installation-from-sources) +- [License](#license) +- [Documentation](#documentation) +- [Background](#background) +- [Getting Help](#getting-help) +- [Discussion and Development](#discussion-and-development) +- [Contributing to pandas](#contributing-to-pandas) + +## Main Features +Here are just a few of the things that pandas does well: + + - Easy handling of [**missing data**][missing-data] (represented as + `NaN`, `NA`, or `NaT`) in floating point as well as non-floating point data + - Size mutability: columns can be [**inserted and + deleted**][insertion-deletion] from DataFrame and higher dimensional + objects + - Automatic and explicit [**data alignment**][alignment]: objects can + be explicitly aligned to a set of labels, or the user can simply + ignore the labels and let `Series`, `DataFrame`, etc. automatically + align the data for you in computations + - Powerful, flexible [**group by**][groupby] functionality to perform + split-apply-combine operations on data sets, for both aggregating + and transforming data + - Make it [**easy to convert**][conversion] ragged, + differently-indexed data in other Python and NumPy data structures + into DataFrame objects + - Intelligent label-based [**slicing**][slicing], [**fancy + indexing**][fancy-indexing], and [**subsetting**][subsetting] of + large data sets + - Intuitive [**merging**][merging] and [**joining**][joining] data + sets + - Flexible [**reshaping**][reshape] and [**pivoting**][pivot-table] of + data sets + - [**Hierarchical**][mi] labeling of axes (possible to have multiple + labels per tick) + - Robust IO tools for loading data from [**flat files**][flat-files] + (CSV and delimited), [**Excel files**][excel], [**databases**][db], + and saving/loading data from the ultrafast [**HDF5 format**][hdfstore] + - [**Time series**][timeseries]-specific functionality: date range + generation and frequency conversion, moving window statistics, + date shifting and lagging + + + [missing-data]: https://pandas.pydata.org/pandas-docs/stable/user_guide/missing_data.html + [insertion-deletion]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html#column-selection-addition-deletion + [alignment]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html?highlight=alignment#intro-to-data-structures + [groupby]: https://pandas.pydata.org/pandas-docs/stable/user_guide/groupby.html#group-by-split-apply-combine + [conversion]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html#dataframe + [slicing]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#slicing-ranges + [fancy-indexing]: https://pandas.pydata.org/pandas-docs/stable/user_guide/advanced.html#advanced + [subsetting]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#boolean-indexing + [merging]: https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html#database-style-dataframe-or-named-series-joining-merging + [joining]: https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html#joining-on-index + [reshape]: https://pandas.pydata.org/pandas-docs/stable/user_guide/reshaping.html + [pivot-table]: https://pandas.pydata.org/pandas-docs/stable/user_guide/reshaping.html + [mi]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#hierarchical-indexing-multiindex + [flat-files]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#csv-text-files + [excel]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#excel-files + [db]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#sql-queries + [hdfstore]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#hdf5-pytables + [timeseries]: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#time-series-date-functionality + +## Where to get it +The source code is currently hosted on GitHub at: +https://github.com/pandas-dev/pandas + +Binary installers for the latest released version are available at the [Python +Package Index (PyPI)](https://pypi.org/project/pandas) and on [Conda](https://docs.conda.io/en/latest/). + +```sh +# conda +conda install -c conda-forge pandas +``` + +```sh +# or PyPI +pip install pandas +``` + +The list of changes to pandas between each release can be found +[here](https://pandas.pydata.org/pandas-docs/stable/whatsnew/index.html). For full +details, see the commit logs at https://github.com/pandas-dev/pandas. + +## Dependencies +- [NumPy - Adds support for large, multi-dimensional arrays, matrices and high-level mathematical functions to operate on these arrays](https://www.numpy.org) +- [python-dateutil - Provides powerful extensions to the standard datetime module](https://dateutil.readthedocs.io/en/stable/index.html) +- [pytz - Brings the Olson tz database into Python which allows accurate and cross platform timezone calculations](https://github.com/stub42/pytz) + +See the [full installation instructions](https://pandas.pydata.org/pandas-docs/stable/install.html#dependencies) for minimum supported versions of required, recommended and optional dependencies. + +## Installation from sources +To install pandas from source you need [Cython](https://cython.org/) in addition to the normal +dependencies above. Cython can be installed from PyPI: + +```sh +pip install cython +``` + +In the `pandas` directory (same one where you found this file after +cloning the git repo), execute: + +```sh +pip install . +``` + +or for installing in [development mode](https://pip.pypa.io/en/latest/cli/pip_install/#install-editable): + + +```sh +python -m pip install -ve . --no-build-isolation --config-settings=editable-verbose=true +``` + +See the full instructions for [installing from source](https://pandas.pydata.org/docs/dev/development/contributing_environment.html). + +## License +[BSD 3](LICENSE) + +## Documentation +The official documentation is hosted on [PyData.org](https://pandas.pydata.org/pandas-docs/stable/). + +## Background +Work on ``pandas`` started at [AQR](https://www.aqr.com/) (a quantitative hedge fund) in 2008 and +has been under active development since then. + +## Getting Help + +For usage questions, the best place to go to is [StackOverflow](https://stackoverflow.com/questions/tagged/pandas). +Further, general questions and discussions can also take place on the [pydata mailing list](https://groups.google.com/forum/?fromgroups#!forum/pydata). + +## Discussion and Development +Most development discussions take place on GitHub in this repo, via the [GitHub issue tracker](https://github.com/pandas-dev/pandas/issues). + +Further, the [pandas-dev mailing list](https://mail.python.org/mailman/listinfo/pandas-dev) can also be used for specialized discussions or design issues, and a [Slack channel](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack) is available for quick development related questions. + +There are also frequent [community meetings](https://pandas.pydata.org/docs/dev/development/community.html#community-meeting) for project maintainers open to the community as well as monthly [new contributor meetings](https://pandas.pydata.org/docs/dev/development/community.html#new-contributor-meeting) to help support new contributors. + +Additional information on the communication channels can be found on the [contributor community](https://pandas.pydata.org/docs/development/community.html) page. + +## Contributing to pandas + +[![Open Source Helpers](https://www.codetriage.com/pandas-dev/pandas/badges/users.svg)](https://www.codetriage.com/pandas-dev/pandas) + +All contributions, bug reports, bug fixes, documentation improvements, enhancements, and ideas are welcome. + +A detailed overview on how to contribute can be found in the **[contributing guide](https://pandas.pydata.org/docs/dev/development/contributing.html)**. + +If you are simply looking to start working with the pandas codebase, navigate to the [GitHub "issues" tab](https://github.com/pandas-dev/pandas/issues) and start looking through interesting issues. There are a number of issues listed under [Docs](https://github.com/pandas-dev/pandas/issues?labels=Docs&sort=updated&state=open) and [good first issue](https://github.com/pandas-dev/pandas/issues?labels=good+first+issue&sort=updated&state=open) where you could start out. + +You can also triage issues which may include reproducing bug reports, or asking for vital information such as version numbers or reproduction instructions. If you would like to start triaging issues, one easy way to get started is to [subscribe to pandas on CodeTriage](https://www.codetriage.com/pandas-dev/pandas). + +Or maybe through using pandas you have an idea of your own or are looking for something in the documentation and thinking ‘this can be improved’...you can do something about it! + +Feel free to ask questions on the [mailing list](https://groups.google.com/forum/?fromgroups#!forum/pydata) or on [Slack](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack). + +As contributors and maintainers to this project, you are expected to abide by pandas' code of conduct. More information can be found at: [Contributor Code of Conduct](https://github.com/pandas-dev/.github/blob/master/CODE_OF_CONDUCT.md) + +
+ +[Go to Top](#table-of-contents) diff --git a/.cache/pip/http-v2/a/e/1/3/6/ae1360498e2a7e78c3695d71a4132c94b805a9c41d71ad8f62ad998f.body b/.cache/pip/http-v2/a/e/1/3/6/ae1360498e2a7e78c3695d71a4132c94b805a9c41d71ad8f62ad998f.body new file mode 100644 index 0000000000000000000000000000000000000000..d802d01b1149e5a0b8ed94e5a5ef323219fff260 Binary files /dev/null and b/.cache/pip/http-v2/a/e/1/3/6/ae1360498e2a7e78c3695d71a4132c94b805a9c41d71ad8f62ad998f.body differ diff --git a/.cache/pip/http-v2/a/e/7/a/2/ae7a241673cf118ca18eca030dc29d2715b1980127dd0e2949514433 b/.cache/pip/http-v2/a/e/7/a/2/ae7a241673cf118ca18eca030dc29d2715b1980127dd0e2949514433 new file mode 100644 index 0000000000000000000000000000000000000000..5477435d801bff24fcc19fa69dcfcae7d50e7e1d Binary files /dev/null and b/.cache/pip/http-v2/a/e/7/a/2/ae7a241673cf118ca18eca030dc29d2715b1980127dd0e2949514433 differ diff --git a/.cache/pip/http-v2/b/6/8/7/d/b687d90e2a44328db9c9ecd9af0a9c577a4e68f9d239bbb73aebc319.body b/.cache/pip/http-v2/b/6/8/7/d/b687d90e2a44328db9c9ecd9af0a9c577a4e68f9d239bbb73aebc319.body new file mode 100644 index 0000000000000000000000000000000000000000..d73e4958d741d6d243f093e3ad16927530159d03 --- /dev/null +++ b/.cache/pip/http-v2/b/6/8/7/d/b687d90e2a44328db9c9ecd9af0a9c577a4e68f9d239bbb73aebc319.body @@ -0,0 +1,62 @@ +Metadata-Version: 2.1 +Name: wheel +Version: 0.42.0 +Summary: A built-package format for Python +Keywords: wheel,packaging +Author-email: Daniel Holth +Maintainer-email: Alex Grönholm +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Dist: pytest >= 6.0.0 ; extra == "test" +Requires-Dist: setuptools >= 65 ; extra == "test" +Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html +Project-URL: Documentation, https://wheel.readthedocs.io/ +Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues +Project-URL: Source, https://github.com/pypa/wheel +Provides-Extra: test + +wheel +===== + +This library is the reference implementation of the Python wheel packaging +standard, as defined in `PEP 427`_. + +It has two different roles: + +#. A setuptools_ extension for building wheels that provides the + ``bdist_wheel`` setuptools command +#. A command line tool for working with wheel files + +It should be noted that wheel is **not** intended to be used as a library, and +as such there is no stable, public API. + +.. _PEP 427: https://www.python.org/dev/peps/pep-0427/ +.. _setuptools: https://pypi.org/project/setuptools/ + +Documentation +------------- + +The documentation_ can be found on Read The Docs. + +.. _documentation: https://wheel.readthedocs.io/ + +Code of Conduct +--------------- + +Everyone interacting in the wheel project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + diff --git a/.config/wandb/settings b/.config/wandb/settings new file mode 100644 index 0000000000000000000000000000000000000000..8dae664872e647e4bf829a45038781fb33b43d92 --- /dev/null +++ b/.config/wandb/settings @@ -0,0 +1,2 @@ +[default] + diff --git a/.ipynb_checkpoints/train-checkpoint.py b/.ipynb_checkpoints/train-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..e297c7a79dd2402ac4a2b6f62caf850f6350339f --- /dev/null +++ b/.ipynb_checkpoints/train-checkpoint.py @@ -0,0 +1,546 @@ +# torchrun --standalone --nproc_per_node=2 train.py --batch_size=96 + +# train.py +import os +import time +import math +from contextlib import nullcontext +import json + +import numpy as np +import torch +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +import pandas as pd + +import tiktoken +from model import GPTConfig, GPT + +# Import wandb and tqdm +import wandb +from tqdm.auto import tqdm + +# ----------------------------------------------------------------------------- +# Default configuration with added positional encoding options +# I/O +out_dir = 'out' +eval_interval = 100 # Evaluate every 100 iterations +log_interval = 1 # Log every iteration +eval_iters = 100 +eval_only = False +always_save_checkpoint = True +init_from = 'scratch' # 'scratch' | 'resume' | 'checkpoint' +checkpoint_path = '' # Path to a specific checkpoint to load +# wandb logging +wandb_log = True +wandb_project = 'gpt2_positional_encodings_10B' +wandb_run_name = 'experiment' +# data +dataset = 'fineweb' +gradient_accumulation_steps = 40 +batch_size = 12 +block_size = 512 +# model +n_layer = 4 +n_head = 4 +n_embd = 256 +dropout = 0.0 +bias = False +# adamw optimizer +learning_rate = 6e-4 +max_iters = 10000 +weight_decay = 1e-1 +beta1 = 0.9 +beta2 = 0.95 +grad_clip = 1.0 +# learning rate decay settings +decay_lr = True +warmup_iters = 100 +lr_decay_iters = 10000 +min_lr = 6e-5 +# DDP settings +backend = 'nccl' +# system +device = 'cuda' +dtype = 'bfloat16' if torch.cuda.is_available() and torch.cuda.is_bf16_supported() else 'float16' +compile = True +# Positional Encodings +embedding_types = ['sinusoidal', 'polynomial_legendre', + 'polynomial_chebyshev'] +attention_types = ['default'] +# Data collection options +collect_attention_patterns = False # Set to True to collect attention patterns +collect_activations = False # Set to True to collect activations +# Evaluation datasets +eval_datasets = ['wikitext-103-v1', 'ptb', 'lambada'] # WikiText-103 and Penn Treebank +seed = 1337 +# ----------------------------------------------------------------------------- +config_keys = [k for k, v in globals().items() if not k.startswith('_') and isinstance(v, (int, float, bool, str, list, tuple))] +exec(open('configurator.py').read()) +config = {k: globals()[k] for k in config_keys} +# ----------------------------------------------------------------------------- + +def is_compatible(embedding_type, attention_type): + # Incompatible combinations can be specified here + incompatible_combinations = [ + # If specific combinations are incompatible + ] + + # If embedding_type or attention_type is 'none', some attention methods may not function properly + if embedding_type == 'none' and attention_type in ['relative', 'rope']: + return False + + # 'rope' attention requires even dimension per head + if attention_type == 'rope' and ((n_embd // n_head) % 2 != 0): + return False + + return (embedding_type, attention_type) not in incompatible_combinations + +def main(): + # Initialize DDP if needed + global gradient_accumulation_steps + ddp = int(os.environ.get('RANK', -1)) != -1 + if ddp: + init_process_group(backend=backend) + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device_local = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device_local) + master_process = ddp_rank == 0 + seed_offset = ddp_rank + assert gradient_accumulation_steps % ddp_world_size == 0 + gradient_accumulation_steps //= ddp_world_size + else: + master_process = True + seed_offset = 0 + ddp_world_size = 1 + device_local = device # Use the default device + + tokens_per_iter = gradient_accumulation_steps * ddp_world_size * batch_size * block_size + if master_process: + print(f"Tokens per iteration will be: {tokens_per_iter:,}") + + if master_process: + os.makedirs(out_dir, exist_ok=True) + + # Set random seed + global seed + seed += seed_offset + torch.manual_seed(seed) + np.random.seed(seed) + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + device_type = 'cuda' if 'cuda' in device_local else 'cpu' + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[dtype] + ctx = nullcontext() if device_type == 'cpu' else torch.amp.autocast(device_type=device_type, dtype=ptdtype) + + # Load tokenizer using tiktoken + tokenizer = tiktoken.get_encoding("gpt2") + + # Prepare evaluation datasets + eval_data = {} + for eval_dataset in eval_datasets: + eval_data_path = os.path.join('data', eval_dataset) + if not os.path.exists(eval_data_path): + raise FileNotFoundError(f"Dataset {eval_dataset} not found. Please run prepare_evaluation_data.py first.") + + if eval_dataset in ['wikitext-2-v1', 'wikitext-103-v1']: + train_file = [f for f in os.listdir(eval_data_path) if f.startswith('train')][0] + val_file = [f for f in os.listdir(eval_data_path) if f.startswith('validation')][0] + + train_df = pd.read_parquet(os.path.join(eval_data_path, train_file)) + val_df = pd.read_parquet(os.path.join(eval_data_path, val_file)) + + train_text = '\n'.join(train_df['text']) + val_text = '\n'.join(val_df['text']) + + elif eval_dataset == 'ptb': + with open(os.path.join(eval_data_path, 'train.txt'), 'r') as f: + train_text = f.read() + with open(os.path.join(eval_data_path, 'valid.txt'), 'r') as f: + val_text = f.read() + + elif eval_dataset == 'lambada': + with open(os.path.join(eval_data_path, 'lambada_test.jsonl'), 'r') as f: + data = [json.loads(line) for line in f] + test_text = '\n'.join([item['text'] for item in data]) + train_text = test_text[:len(test_text)//2] # Use first half as pseudo-train + val_text = test_text[len(test_text)//2:] # Use second half as pseudo-val + + else: + raise ValueError(f"Unknown dataset: {eval_dataset}") + + # Tokenize + train_ids = tokenizer.encode_ordinary(train_text) + val_ids = tokenizer.encode_ordinary(val_text) + + # Convert to numpy arrays + train_ids = np.array(train_ids, dtype=np.uint16) + val_ids = np.array(val_ids, dtype=np.uint16) + + eval_data[eval_dataset] = {'train': train_ids, 'val': val_ids} + + # Data loading + data_dir = os.path.join('data', dataset) + # Update the get_batch function to handle evaluation datasets + def get_batch(split, dataset='main'): + if dataset == 'main': + if split == 'train': + data = np.memmap(os.path.join(data_dir, 'train.bin'), dtype=np.uint16, mode='r') + else: + data = np.memmap(os.path.join(data_dir, 'val.bin'), dtype=np.uint16, mode='r') + else: + data = eval_data[dataset][split] + + ix = torch.randint(len(data) - block_size, (batch_size,)) + x = torch.stack([torch.from_numpy((data[i:i+block_size]).astype(np.int64)) for i in ix]) + y = torch.stack([torch.from_numpy((data[i+1:i+1+block_size]).astype(np.int64)) for i in ix]) + if device_type == 'cuda': + x, y = x.pin_memory().to(device_local, non_blocking=True), y.pin_memory().to(device_local, non_blocking=True) + else: + x, y = x.to(device_local), y.to(device_local) + return x, y + + # Attempt to derive vocab_size from the dataset + meta_path = os.path.join(data_dir, 'meta.json') + meta_vocab_size = None + if os.path.exists(meta_path): + with open(meta_path, 'r') as f: + meta = json.load(f) + meta_vocab_size = meta['vocab_size'] + if master_process: + print(f"Found vocab_size = {meta_vocab_size} (inside {meta_path})") + + # Helps estimate loss and collect attention patterns and activations + @torch.no_grad() + def estimate_loss(model, collect_attention_patterns=False, collect_activations=False, save_dir=None, max_batches_to_save=None): + out = {} + model.eval() + # Access the underlying model if wrapped with DDP + raw_model = model.module if hasattr(model, 'module') else model + + # Set tracking flags on the underlying model + raw_model.config.track_attention_patterns = collect_attention_patterns + raw_model.config.track_activations = collect_activations + + if collect_attention_patterns or collect_activations: + if save_dir is None: + raise ValueError("save_dir must be specified when collecting attention patterns or activations.") + if master_process: + os.makedirs(save_dir, exist_ok=True) + + for split in ['train', 'val']: + losses = torch.zeros(eval_iters) + save_count = 0 # Counter for saved batches + for k in range(eval_iters): + X, Y = get_batch(split) + with ctx: + logits, loss = model(X, Y) + losses[k] = loss.item() + # Collect and save attention patterns and activations + if (collect_attention_patterns or collect_activations) and save_count < (max_batches_to_save or eval_iters): + if collect_attention_patterns or collect_activations: + if master_process: + batch_dir = os.path.join(save_dir, f"{split}_batch_{k}") + os.makedirs(batch_dir, exist_ok=True) + # Save activations + if collect_activations and hasattr(raw_model, 'activations'): + for idx, activation in enumerate(raw_model.activations): + activation_path = os.path.join(batch_dir, f"activation_layer_{idx}.pt") + torch.save(activation, activation_path) + # Save attention patterns + if collect_attention_patterns and hasattr(raw_model, 'attention_patterns'): + for idx, attention in enumerate(raw_model.attention_patterns): + attention_path = os.path.join(batch_dir, f"attention_layer_{idx}.pt") + torch.save(attention, attention_path) + # Clear activations and attention patterns from the model + raw_model.activations = [] + raw_model.attention_patterns = [] + save_count += 1 + out[split] = losses.mean().item() + + # Evaluate on additional datasets + for eval_dataset in eval_datasets: + split_losses = {} + for split in ['train', 'val']: + losses = torch.zeros(eval_iters) + save_count = 0 # Counter for saved batches + for k in range(eval_iters): + X, Y = get_batch(split, dataset=eval_dataset) + with ctx: + logits, loss = model(X, Y) + losses[k] = loss.item() + # Collect and save attention patterns and activations + if (collect_attention_patterns or collect_activations) and save_count < (max_batches_to_save or eval_iters): + if collect_attention_patterns or collect_activations: + if master_process: + batch_dir = os.path.join(save_dir, f"{eval_dataset}_{split}_batch_{k}") + os.makedirs(batch_dir, exist_ok=True) + # Save activations + if collect_activations and hasattr(raw_model, 'activations'): + for idx, activation in enumerate(raw_model.activations): + activation_path = os.path.join(batch_dir, f"activation_layer_{idx}.pt") + torch.save(activation, activation_path) + # Save attention patterns + if collect_attention_patterns and hasattr(raw_model, 'attention_patterns'): + for idx, attention in enumerate(raw_model.attention_patterns): + attention_path = os.path.join(batch_dir, f"attention_layer_{idx}.pt") + torch.save(attention, attention_path) + # Clear activations and attention patterns from the model + raw_model.activations = [] + raw_model.attention_patterns = [] + save_count += 1 + split_losses[split] = losses.mean().item() + out[eval_dataset] = split_losses + model.train() + # Reset tracking flags + raw_model.config.track_attention_patterns = False + raw_model.config.track_activations = False + return out + + # Learning rate decay scheduler + def get_lr(it): + if it < warmup_iters: + return learning_rate * it / warmup_iters + if it > lr_decay_iters: + return min_lr + decay_ratio = (it - warmup_iters) / (lr_decay_iters - warmup_iters) + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) + return min_lr + coeff * (learning_rate - min_lr) + + # Training loop over positional encoding combinations + for embedding_type in embedding_types: + for attention_type in attention_types: + if not is_compatible(embedding_type, attention_type): + if master_process: + print(f"Skipping incompatible combination: Embedding={embedding_type}, Attention={attention_type}") + continue + + # Configure model arguments + model_args = dict( + n_layer=n_layer, + n_head=n_head, + n_embd=n_embd, + block_size=block_size, + bias=bias, + vocab_size=None, + dropout=dropout, + embedding_type=embedding_type, + attention_type=attention_type, + track_activations=False, + track_attention_patterns=False, + ) + + # Initialize or resume model + iter_num = 0 + best_val_loss = 1e9 # initialize best val loss to a high value + checkpoint = None + run_id = None # Initialize run_id to None + + if init_from == 'scratch': + if master_process: + print(f"\nInitializing new model with embedding_type={embedding_type}, attention_type={attention_type}") + if meta_vocab_size is None: + if master_process: + print("Defaulting to vocab_size of GPT-2 to 50257") + model_args['vocab_size'] = meta_vocab_size if meta_vocab_size is not None else 50257 + gptconf = GPTConfig(**model_args) + model = GPT(gptconf) + elif init_from == 'resume': + # Resume from the latest checkpoint + ckpt_path = os.path.join(out_dir, f"ckpt_{embedding_type}_{attention_type}.pt") + if not os.path.exists(ckpt_path): + raise FileNotFoundError(f"Checkpoint not found at {ckpt_path}") + if master_process: + print(f"\nResuming training from checkpoint {ckpt_path}") + checkpoint = torch.load(ckpt_path, map_location=device_local) + gptconf = GPTConfig(**checkpoint['model_args']) + model = GPT(gptconf) + model.load_state_dict(checkpoint['model']) + iter_num = checkpoint['iter_num'] + best_val_loss = checkpoint['best_val_loss'] + seed = checkpoint.get('seed', seed) + run_id = checkpoint.get('wandb_run_id', None) + elif init_from == 'checkpoint': + # Resume from a specific checkpoint + if not checkpoint_path or not os.path.exists(checkpoint_path): + raise FileNotFoundError(f"Checkpoint not found at {checkpoint_path}") + if master_process: + print(f"\nLoading model from checkpoint {checkpoint_path}") + checkpoint = torch.load(checkpoint_path, map_location=device_local) + gptconf = GPTConfig(**checkpoint['model_args']) + model = GPT(gptconf) + model.load_state_dict(checkpoint['model']) + iter_num = checkpoint['iter_num'] + best_val_loss = checkpoint['best_val_loss'] + seed = checkpoint.get('seed', seed) + run_id = checkpoint.get('wandb_run_id', None) + else: + raise ValueError(f"Unknown init_from '{init_from}'") + + # Set random seed + seed += seed_offset + torch.manual_seed(seed) + np.random.seed(seed) + + model.to(device_local) + scaler = torch.cuda.amp.GradScaler(enabled=(dtype == 'float16')) + optimizer = model.configure_optimizers(weight_decay, learning_rate, (beta1, beta2), device_type) + + # Load optimizer state if resuming + if checkpoint is not None: + optimizer.load_state_dict(checkpoint['optimizer']) + + if compile: + if master_process: + print("Compiling the model... (takes a ~minute)") + unoptimized_model = model + model = torch.compile(model) + + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + + # Logging with WandB + if wandb_log and master_process: + run_name = f"{embedding_type}_{attention_type}_{wandb_run_name}" + # Initialize WandB + wandb.init(project=wandb_project, name=run_name, config=config, resume='allow', id=run_id) + # Save the run ID for resuming later + run_id = wandb.run.id + else: + run_id = None + + # Training loop + X, Y = get_batch('train') + t0 = time.time() + local_iter_num = 0 + raw_model = model.module if hasattr(model, 'module') else model + running_mfu = -1.0 + progress_bar = tqdm(total=max_iters, initial=iter_num, desc=f"Training {embedding_type} + {attention_type}", disable=not master_process) + progress_bar_update_freq = 1 # Update progress bar every iteration + + while True: + # Determine learning rate + lr = get_lr(iter_num) if decay_lr else learning_rate + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + # Evaluate and checkpoint + if iter_num % eval_interval == 0 and iter_num > 0: + # Define save_dir for collected data + eval_data_dir = os.path.join('data', 'eval_data', f"{embedding_type}_{attention_type}", f"step_{iter_num}") + # Set a limit on the number of batches to save during evaluation + max_batches_to_save = 10 # Adjust this number as needed to control storage usage + losses = estimate_loss(model, + collect_attention_patterns=collect_attention_patterns, + collect_activations=collect_activations, + save_dir=eval_data_dir, + max_batches_to_save=max_batches_to_save) + if master_process: + print(f"\nStep {iter_num}:") + print(f"Train loss: {losses['train']:.4f}, Val loss: {losses['val']:.4f}") + for eval_dataset in eval_datasets: + print(f"{eval_dataset} - Train loss: {losses[eval_dataset]['train']:.4f}, Val loss: {losses[eval_dataset]['val']:.4f}") + # Log to wandb + if wandb_log: + wandb_metrics = { + "iter": iter_num, + "train/loss": losses['train'], + "val/loss": losses['val'], + "lr": lr, + "mfu": running_mfu * 100, + } + for eval_dataset in eval_datasets: + wandb_metrics[f"{eval_dataset}/train_loss"] = losses[eval_dataset]['train'] + wandb_metrics[f"{eval_dataset}/val_loss"] = losses[eval_dataset]['val'] + wandb.log(wandb_metrics, step=iter_num) + if losses['val'] < best_val_loss or always_save_checkpoint: + best_val_loss = losses['val'] + if iter_num > 0: + checkpoint = { + 'model': raw_model.state_dict(), + 'optimizer': optimizer.state_dict(), + 'model_args': model_args, + 'iter_num': iter_num, + 'best_val_loss': best_val_loss, + 'config': config, + 'seed': seed, + 'wandb_run_id': run_id + } + ckpt_path = os.path.join(out_dir, f"ckpt_{embedding_type}_{attention_type}.pt") + if master_process: + print(f"Saving checkpoint to {ckpt_path}") + torch.save(checkpoint, ckpt_path) + # Update progress bar postfix + if master_process: + postfix_dict = { + 'train_loss': f"{losses['train']:.4f}", + 'val_loss': f"{losses['val']:.4f}" + } + for eval_dataset in eval_datasets: + postfix_dict[f"{eval_dataset}_val_loss"] = f"{losses[eval_dataset]['val']:.4f}" + progress_bar.set_postfix(postfix_dict) + + if eval_only: + break + + # Forward backward update + for micro_step in range(gradient_accumulation_steps): + if ddp: + model.require_backward_grad_sync = (micro_step == gradient_accumulation_steps - 1) + with ctx: + logits, loss = model(X, Y) + loss = loss / gradient_accumulation_steps + X, Y = get_batch('train') + scaler.scale(loss).backward() + if grad_clip != 0.0: + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip) + scaler.step(optimizer) + scaler.update() + optimizer.zero_grad(set_to_none=True) + + # Logging + t1 = time.time() + dt = t1 - t0 + t0 = t1 + if iter_num % log_interval == 0: + lossf = loss.item() * gradient_accumulation_steps + if local_iter_num >= 5: + mfu = raw_model.estimate_mfu(batch_size * gradient_accumulation_steps, dt) + running_mfu = mfu if running_mfu == -1.0 else 0.9 * running_mfu + 0.1 * mfu + if master_process: + progress_bar.set_postfix({ + 'loss': f"{lossf:.4f}", + 'lr': f"{lr:.2e}", + 'mfu': f"{running_mfu*100:.2f}%", + 'time_per_iter_ms': f"{dt * 1000:.2f}ms", + }) + if wandb_log: + wandb.log({ + "iter": iter_num, + "train/loss": lossf, + "lr": lr, + "mfu": running_mfu * 100, + "time_per_iter_ms": dt * 1000, + }, step=iter_num) + iter_num += 1 + local_iter_num += 1 + if master_process: + progress_bar.update(progress_bar_update_freq) + # Termination conditions + if iter_num > max_iters: + break + + if master_process: + progress_bar.close() + if wandb_log and master_process: + wandb.finish() + + # Destroy the process group after all models have been trained + if ddp: + destroy_process_group() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/.launchpadlib/api.launchpad.net/cache/api.launchpad.net,devel,-application,json,fc4e1e3a03117146fb1e9d492ab76690 b/.launchpadlib/api.launchpad.net/cache/api.launchpad.net,devel,-application,json,fc4e1e3a03117146fb1e9d492ab76690 new file mode 100644 index 0000000000000000000000000000000000000000..e64ef2b4339e2e9a4cc3e55bb5dd3eab678a342a --- /dev/null +++ b/.launchpadlib/api.launchpad.net/cache/api.launchpad.net,devel,-application,json,fc4e1e3a03117146fb1e9d492ab76690 @@ -0,0 +1,19 @@ +status: 200 +date: Mon, 05 Feb 2024 23:25:35 GMT +server: Apache/2.4.41 (Ubuntu) +content-location: index.json +vary: negotiate,accept,Accept-Encoding +tcn: choice +last-modified: Wed, 31 Jan 2024 08:46:52 GMT +etag: "8fa-61039eb973700-gzip" +accept-ranges: bytes +content-type: application/json; qs=0.9 +x-cache: MISS from juju-98d295-prod-launchpad-30 +x-cache-lookup: MISS from juju-98d295-prod-launchpad-30:3128 +via: 1.1 juju-98d295-prod-launchpad-30 (squid/4.10) +content-length: 2298 +-content-encoding: gzip +-varied-accept: application/json +-varied-accept-encoding: gzip, deflate + +{"resource_type_link": "https://api.launchpad.net/devel/#service-root", "temporary_blobs_collection_link": "https://api.launchpad.net/devel/temporary-blobs", "countries_collection_link": "https://api.launchpad.net/devel/+countries", "languages_collection_link": "https://api.launchpad.net/devel/+languages", "questions_collection_link": "https://api.launchpad.net/devel/questions", "specifications_collection_link": "https://api.launchpad.net/devel/", "bug_trackers_collection_link": "https://api.launchpad.net/devel/bugs/bugtrackers", "cves_collection_link": "https://api.launchpad.net/devel/bugs/cve", "bugs_collection_link": "https://api.launchpad.net/devel/bugs", "builders_collection_link": "https://api.launchpad.net/devel/builders", "processors_collection_link": "https://api.launchpad.net/devel/+processors", "charm_bases_collection_link": "https://api.launchpad.net/devel/+charm-bases", "charm_recipes_collection_link": "https://api.launchpad.net/devel/+charm-recipes", "branches_collection_link": "https://api.launchpad.net/devel/branches", "git_repositories_collection_link": "https://api.launchpad.net/devel/+git", "snap_bases_collection_link": "https://api.launchpad.net/devel/+snap-bases", "snaps_collection_link": "https://api.launchpad.net/devel/+snaps", "snappy_serieses_collection_link": "https://api.launchpad.net/devel/+snappy-series", "archives_collection_link": "https://api.launchpad.net/devel/archives", "livefses_collection_link": "https://api.launchpad.net/devel/livefses", "packagesets_collection_link": "https://api.launchpad.net/devel/package-sets", "translation_groups_collection_link": "https://api.launchpad.net/devel/+groups", "translation_import_queue_entries_collection_link": "https://api.launchpad.net/devel/+imports", "distributions_collection_link": "https://api.launchpad.net/devel/distros", "people_collection_link": "https://api.launchpad.net/devel/people", "polls_collection_link": "https://api.launchpad.net/devel/+polls", "projects_collection_link": "https://api.launchpad.net/devel/projects", "project_groups_collection_link": "https://api.launchpad.net/devel/projectgroups", "services_link": "https://api.launchpad.net/devel/services", "pillars_link": "https://api.launchpad.net/devel/pillars", "me_link": "https://api.launchpad.net/devel/people/+me"} \ No newline at end of file diff --git a/.local/share/jupyter/nbextensions/keyboard_shortcut_editor/readme_menu_item.png b/.local/share/jupyter/nbextensions/keyboard_shortcut_editor/readme_menu_item.png new file mode 100644 index 0000000000000000000000000000000000000000..008e082c421aa8d50b87cbac55e73dfd8479ad77 Binary files /dev/null and b/.local/share/jupyter/nbextensions/keyboard_shortcut_editor/readme_menu_item.png differ diff --git a/.local/share/jupyter/nbextensions/keyboard_shortcut_editor/readme_reset_disabled.png b/.local/share/jupyter/nbextensions/keyboard_shortcut_editor/readme_reset_disabled.png new file mode 100644 index 0000000000000000000000000000000000000000..97e9f476e706f68332a11107e40dfb1d1eca1dd3 Binary files /dev/null and b/.local/share/jupyter/nbextensions/keyboard_shortcut_editor/readme_reset_disabled.png differ diff --git a/.local/share/jupyter/nbextensions/navigation-hotkeys/icon.png b/.local/share/jupyter/nbextensions/navigation-hotkeys/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..16162d592ebb372fa9576ab4932729cdb736693c Binary files /dev/null and b/.local/share/jupyter/nbextensions/navigation-hotkeys/icon.png differ diff --git a/.local/share/jupyter/nbextensions/nbTranslate/languages.js b/.local/share/jupyter/nbextensions/nbTranslate/languages.js new file mode 100644 index 0000000000000000000000000000000000000000..cca62abbcf438fcdca3d03340255e61eef266da6 --- /dev/null +++ b/.local/share/jupyter/nbextensions/nbTranslate/languages.js @@ -0,0 +1,114 @@ +/** + * + * Generated from https://translate.google.com + * + * The languages that Google Translate supports (as of 5/15/16) alongside with their ISO 639-1 codes + * See https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes + */ + +var langs = { + 'auto': 'Automatic', + 'af': 'Afrikaans', + 'sq': 'Albanian', + 'ar': 'Arabic', + 'hy': 'Armenian', + 'az': 'Azerbaijani', + 'eu': 'Basque', + 'be': 'Belarusian', + 'bn': 'Bengali', + 'bs': 'Bosnian', + 'bg': 'Bulgarian', + 'ca': 'Catalan', + 'ceb': 'Cebuano', + 'ny': 'Chichewa', + 'zh-cn': 'Chinese Simplified', + 'zh-tw': 'Chinese Traditional', + 'co': 'Corsican', + 'hr': 'Croatian', + 'cs': 'Czech', + 'da': 'Danish', + 'nl': 'Dutch', + 'en': 'English', + 'eo': 'Esperanto', + 'et': 'Estonian', + 'tl': 'Filipino', + 'fi': 'Finnish', + 'fr': 'French', + 'fy': 'Frisian', + 'gl': 'Galician', + 'ka': 'Georgian', + 'de': 'German', + 'el': 'Greek', + 'gu': 'Gujarati', + 'ht': 'Haitian Creole', + 'ha': 'Hausa', + 'haw': 'Hawaiian', + 'iw': 'Hebrew', + 'hi': 'Hindi', + 'hmn': 'Hmong', + 'hu': 'Hungarian', + 'is': 'Icelandic', + 'ig': 'Igbo', + 'id': 'Indonesian', + 'ga': 'Irish', + 'it': 'Italian', + 'ja': 'Japanese', + 'jw': 'Javanese', + 'kn': 'Kannada', + 'kk': 'Kazakh', + 'km': 'Khmer', + 'ko': 'Korean', + 'ku': 'Kurdish (Kurmanji)', + 'ky': 'Kyrgyz', + 'lo': 'Lao', + 'la': 'Latin', + 'lv': 'Latvian', + 'lt': 'Lithuanian', + 'lb': 'Luxembourgish', + 'mk': 'Macedonian', + 'mg': 'Malagasy', + 'ms': 'Malay', + 'ml': 'Malayalam', + 'mt': 'Maltese', + 'mi': 'Maori', + 'mr': 'Marathi', + 'mn': 'Mongolian', + 'my': 'Myanmar (Burmese)', + 'ne': 'Nepali', + 'no': 'Norwegian', + 'ps': 'Pashto', + 'fa': 'Persian', + 'pl': 'Polish', + 'pt': 'Portuguese', + 'ma': 'Punjabi', + 'ro': 'Romanian', + 'ru': 'Russian', + 'sm': 'Samoan', + 'gd': 'Scots Gaelic', + 'sr': 'Serbian', + 'st': 'Sesotho', + 'sn': 'Shona', + 'sd': 'Sindhi', + 'si': 'Sinhala', + 'sk': 'Slovak', + 'sl': 'Slovenian', + 'so': 'Somali', + 'es': 'Spanish', + 'su': 'Sudanese', + 'sw': 'Swahili', + 'sv': 'Swedish', + 'tg': 'Tajik', + 'ta': 'Tamil', + 'te': 'Telugu', + 'th': 'Thai', + 'tr': 'Turkish', + 'uk': 'Ukrainian', + 'ur': 'Urdu', + 'uz': 'Uzbek', + 'vi': 'Vietnamese', + 'cy': 'Welsh', + 'xh': 'Xhosa', + 'yi': 'Yiddish', + 'yo': 'Yoruba', + 'zu': 'Zulu' +}; diff --git a/.local/share/jupyter/nbextensions/nbTranslate/mutils.js b/.local/share/jupyter/nbextensions/nbTranslate/mutils.js new file mode 100644 index 0000000000000000000000000000000000000000..6213923bb561245f19d641f53d95573a1c29fb4a --- /dev/null +++ b/.local/share/jupyter/nbextensions/nbTranslate/mutils.js @@ -0,0 +1,201 @@ +// Maths utilitary functions, +// adapted from latex_envs, see github.com/jfbercher/jupyter_latex_envs + +/**************************************************************************************************************** +* Series of elementary functions for manipulating nested environments +* needed to do that because standard regular expressions are not well suited for recursive things +****************************************************************************************************************/ +var OPENINGENV = '#!<', + OPENINGENVre = new RegExp(OPENINGENV, 'g'); +var CLOSINGENV = '#!>', + CLOSINGENVre = new RegExp(CLOSINGENV, 'g'); + +function envSearch(text, env_open, env_close) { + var reg = new RegExp(env_open + '[\\S\\s]*?' + env_close, 'gm'); + var start = text.match(reg); + var env_open_re = new RegExp(env_open); + var env_close_re = new RegExp(env_close); + var retval; + var r = ""; + if (typeof(start[0]) != 'undefined' && start[0] != null) { + var r = start[0].substr(1) + } + var out = env_open_re.test(r) //test if there exists an opening env at level +1 + //of the same kind inside + + if (out) { //in such case: replace the new opening at level +1 and the closing at level + var rnew = r.replace(env_close_re, CLOSINGENV).replace(env_open_re, OPENINGENV) + .replace(/\$\$/g,"!@$!@$") //last replace is because "$$" in the replacement string does not work + var text = text.replace(r, rnew).replace(/!@\$/g,"$"); + if (env_open_re.test(rnew)) { // if it remains nested envs, call the function again + retval = envSearch(text, env_open, env_close); + if (retval !== undefined) { + text = retval; + } + } + return text + } + return text +} + +function nestedEnvSearch(text, env_open, env_close) { + var regtest = new RegExp(env_open + '[\\S\\s]*?' + env_close); + var inmatches = text.match(regtest); + if (inmatches != null) { + for (i = 0; i < inmatches.length; i++) + inmatches[i] = inmatches[i].replace(/\*/g, '\\*') + var n = 0; + env_open = env_open.replace(/\([\\\+\S ]*?\)/g, function() { + return inmatches[++n] + }) + env_close = env_close.replace(/\\\d/g, function(x) { + return inmatches[parseInt(x.substr(1))] + }) + var output = envSearch(text, env_open, env_close) + var matches = output.match(env_open + '([\\S\\s]*?)' + env_close); + matches[0] = matches[0].replace(OPENINGENVre, env_open.replace('\\\\', '\\')) + .replace(CLOSINGENVre, env_close.replace('\\\\', '\\')) + matches[1] = matches[1].replace(OPENINGENVre, env_open.replace('\\\\', '\\')) + .replace(CLOSINGENVre, env_close.replace('\\\\', '\\')) + var result = [matches[0], inmatches[1], matches[1]] + for (i = 0; i < result.length; i++) + result[i] = result[i].replace(/\\\*\}/g, '*}') + return result; + } else return []; +} + + +function envReplaceApply(text, matches, replacement) { + var output; + if (matches.length != 0) { + if (replacement instanceof Function) { + output = text.replace(matches[0], + replacement(matches[0], matches[1], matches[2]) + .replace(/\$\$/g,"!@$!@$")).replace(/!@\$/g,"$") + //last line because "$$" in the replacement string does not work + } else if (typeof replacement == "string") { + output = text.replace(matches[0], replacement) + } + return output + } else { + return text; + } +} + +function nestedEnvReplace(text, env_open, env_close, replacement, flags) { + var list_of_matches = []; + var count = 200; //protection + var matches = nestedEnvSearch(text, env_open, env_close); + if (flags == undefined) { + return envReplaceApply(text, matches, replacement) + } else if (flags.indexOf('g') !== -1) { + var tmp_text = text; // tmp text + while (count-- > 0 & matches.length != 0) { + list_of_matches.push(matches[0]); + tmp_text = tmp_text.replace(matches[0], ""); //suppress from tmp_text + text = envReplaceApply(text, matches, replacement); + matches = nestedEnvSearch(tmp_text, env_open, env_close); + } + return text; + } else { + return text; + } +} + +var textEnvs = {'theorem':'theorem', 'lemma':'lemma', 'remark':'remark', +'example':'example', 'exercise':'exercise', 'corollary':'corollary', +'proposition':'proposition', 'definition':'definition','problem':'problem', +'proof':'proof', 'property':'property', 'itemize':'itemize', 'enumerate':'enumerate'} + +var textCmds = {'textbf':'textbf', 'textit':'textit', 'underline':'underline', +'texttt':'texttt', 'textem':'textem', 'emph':'emph'} +//label and ref not added because their content shall not be translated + +var OPENmath = 'mathid'//'\u003cmathid', + OPENmathRe = new RegExp(OPENmath, 'g'); +var CLOSEmath = ''//'\u003e', + CLOSEmathRe = new RegExp(CLOSEmath, 'g'); + +function removeMaths(text){ + var math=[]; + function replacement(m0,m1,m2) { + if (m1 in textEnvs){ + math.push('\\begin{'+m1+'}'); var id_beg = math.length; + math.push('\\end{'+m1+'}'); var id_end = math.length; + m2 = nestedEnvReplace(m2, '\\\\begin{(\\w+\\\*?)}', '\\\\end{\\1}', replacement, 'g') + return OPENmath + id_beg + CLOSEmath + m2 + OPENmath + id_end + CLOSEmath; + } + else if (m1 in textCmds){ + math.push('\\' + m1 + '{') + math.push('}') + return OPENmath + String(math.length - 1) + CLOSEmath + m2 + OPENmath + math.length + CLOSEmath; + } + else { + math.push(m0) + return OPENmath + math.length + CLOSEmath; + } + } + text = nestedEnvReplace(text, '\\\\begin{(\\w+\\\*?)}', '\\\\end{\\1}', replacement, 'g') + text = text.replace(/\\\[([\S\s]*?)\\\]/gm,replacement) + text = text.replace(/\\\(([\S\s]*?)\\\)/gm,replacement) + text = text.replace(/\$\$([\S\s]*?)\$\$/gm,replacement) + text = text.replace(/\$([\S\s]*?)\$/gm,replacement) + text = text.replace(/\\item/gm,replacement) + text = text.replace(/\\([\S]*?){([\S\s]*?)}/gm,replacement) //textcmd + return [math, text] +} + +function restoreMaths(math_and_text) { + var math = math_and_text[0]; + var text = math_and_text[1]; + var newtext; + var OPENmathUnicode = escape(OPENmath).replace(/%u([A-F0-9]{4})|%([A-F0-9]{2})/g, function(_, u, x) { return "\\\\u" + (u || '00' + x).toLowerCase() }); + var CLOSEmathUnicode = escape(CLOSEmath).replace(/%u([A-F0-9]{4})|%([A-F0-9]{2})/g, function(_, u, x) { return "\\\\u" + (u || '00' + x).toLowerCase() }); + var mathDetectRe = new RegExp(OPENmathUnicode+'\\s*?(\\d+)\\s*?'+CLOSEmathUnicode, 'gim'); + var cont = true; + while (cont) { + var newtext = text.replace(mathDetectRe, function(wholeMatch, n) { + return math[n - 1]; + }); + /*var newtext = newtext.replace(/\\u003cmathiid\s*?(\d+)\s*?\\u003e/gim, function(wholeMatch, n) { + return math[n - 1]; + }); */ + cont = text !== newtext; //recurse in text (possible nesting -- just one level) + text=newtext; + } + return text; +} + +var OPENhtml = 'htmlid' + OPENhtmlRe = new RegExp(OPENhtml, 'g'); +var CLOSEhtml = ''//'\u003e', + CLOSEhtmlRe = new RegExp(CLOSEhtml, 'g'); + +function removeHtml(text) { + var html = []; + function replacement(m0, m1) { + html.push(m0) + return OPENhtml + html.length + CLOSEhtml; + + } + text = text.replace(/<(\S[\S\s]*?)\S>/gm, replacement) + return [html, text] +} + + +function restoreHtml(html_and_text) { + var html = html_and_text[0]; + var text = html_and_text[1]; + var newtext; + var OPENhtmlUnicode = escape(OPENhtml).replace(/%u([A-F0-9]{4})|%([A-F0-9]{2})/g, function(_, u, x) { + return "\\\\u" + (u || '00' + x).toLowerCase() }); + var CLOSEhtmlUnicode = escape(CLOSEhtml).replace(/%u([A-F0-9]{4})|%([A-F0-9]{2})/g, function(_, u, x) { + return "\\\\u" + (u || '00' + x).toLowerCase() }); + var htmlDetectRe = new RegExp(OPENhtmlUnicode + '\\s*?(\\d+)\\s*?' + CLOSEhtmlUnicode, 'gim'); + text = text.replace(htmlDetectRe, function(wholeMatch, n) { + return html[n - 1]; + }); + + return text; +} + diff --git a/.local/share/jupyter/nbextensions/nbTranslate/nbTranslate.yaml b/.local/share/jupyter/nbextensions/nbTranslate/nbTranslate.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3432c01bc0efddcaa152dfde64a0264b1cf4c792 --- /dev/null +++ b/.local/share/jupyter/nbextensions/nbTranslate/nbTranslate.yaml @@ -0,0 +1,41 @@ +Type: IPython Notebook Extension +Name: nbTranslate +Description: Helps translate a notebook and/or select the display language. +Link: README.md +Main: main.js +Compatibility: 4.x, 5.x +Parameters: +- name: nbTranslate.hotkey + description: Converts current cell + input_type: hotkey + default: 'Alt-T' +- name: nbTranslate.useGoogleTranslate + description: | + Use Google translate engine + (it is advised to check the result); otherwise conversion will simply copy the current cell contents. + input_type: checkbox + default: true +- name: nbTranslate.sourceLang + description: Source language for conversion; see the list of available languages here. + input_type: text + default: 'en' +- name: nbTranslate.targetLang + description: Target language for conversion; see the list of available languages here. + input_type: text + default: 'fr' +- name: nbTranslate.displayLangs + description: Displayed language(s) in the notebook; a list of languages, e.g. ['en', 'fr'] or ['*'] for all + input_type: list + list_element: + input_type: text + description: Displayed language(s) in the notebook; a list of languages, e.g. ['en', 'fr'] or ['*'] for all + default: ["en", "fr"] +- name: nbTranslate.langInMainMenu + description: | + Dispay a menu for selecting languages to display (otherwise provides this + menu in the configuration toolbar). + input_type: checkbox + default: true + + + diff --git a/.local/share/jupyter/nbextensions/notify/notify.yaml b/.local/share/jupyter/nbextensions/notify/notify.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ed299a3449014ac93d5033763b1227a439d4713a --- /dev/null +++ b/.local/share/jupyter/nbextensions/notify/notify.yaml @@ -0,0 +1,18 @@ +Type: IPython Notebook Extension +Name: Notify +Description: > + Show a browser notification when kernel becomes idle again after being busy + for some time - configurable after 0, 5, 10, or 30 seconds busy. +Link: readme.md +Icon: notification.png +Main: notify.js +Compatibility: 4.x, 5.x +Parameters: +- name: notify.sticky + description: Require interactions on notifications to dismiss them. (Chrome only) + input_type: checkbox + default: false +- name: notify.play_sound + description: Play notification sound. + input_type: checkbox + default: false diff --git a/.local/share/jupyter/nbextensions/notify/readme.md b/.local/share/jupyter/nbextensions/notify/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..40f5583ec9f318166fd6e044c84b81955402db2c --- /dev/null +++ b/.local/share/jupyter/nbextensions/notify/readme.md @@ -0,0 +1,48 @@ +# Notebook web notifications + +Jupyter notebook extension to display a web notification to notify you when the +kernel becomes idle. +This can be useful when running tasks that take more than a couple of seconds +to complete. + +The extension has been tested with the most recent versions of Firefox, Chrome +and Safari. + +Initially, a button to request notification permissions is shown in the toolbar. +After notification permissions have been granted, this button is replaced by a +dropdown menu with five choices: Disabled, 0, 5, 10 and 30. +To activate notifications, select a minimum kernel busy time required to +trigger a notification (e.g. if selecting 5, a notification will only be shown +if the kernel was busy for more than 5 seconds). The selection is saved in the +notebook's metadata and restored when the notebook is re-opened. + +You may configure the plugin so that notifications require manual dismissal +before disappearing. Browser support is limited, see +[here](https://developer.mozilla.org/en-US/docs/Web/API/notification/requireInteraction) +to check if your browser supports this. You may also configure the plugin so +that notifications play a sound. + +![notification](notification.png "notification") + + +## Original Source +This extension originally comes from [@sjpfenniger](https://github.com/sjpfenninger)'s [GitHub repository](https://github.com/sjpfenninger/ipython-extensions). + +## Credits + +This extension contains sounds created by RSilveira_88 on fresound.org, licensed +under the CC-BY 3.0 License. Modifications by morrisjim. You may find the +modified version [here](https://freesound.org/people/morrisjm/sounds/268756/) and +the original [here](https://freesound.org/people/RSilveira_88/sounds/216306/). + +## License + +The MIT License (MIT) + +Copyright (c) 2014 Stefan Pfenninger + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.local/share/jupyter/nbextensions/python-markdown/main.css b/.local/share/jupyter/nbextensions/python-markdown/main.css new file mode 100644 index 0000000000000000000000000000000000000000..72604f3e391a3454b842260ec22fa60c0e0b6847 --- /dev/null +++ b/.local/share/jupyter/nbextensions/python-markdown/main.css @@ -0,0 +1,4 @@ + +.notebook-trusted { + color: #555555; +} diff --git a/.local/share/jupyter/nbextensions/python-markdown/python-markdown.png b/.local/share/jupyter/nbextensions/python-markdown/python-markdown.png new file mode 100644 index 0000000000000000000000000000000000000000..585a0038ac56eef5453de4e97875629bbe2bd357 Binary files /dev/null and b/.local/share/jupyter/nbextensions/python-markdown/python-markdown.png differ diff --git a/.local/share/jupyter/nbextensions/python-markdown/trusted.png b/.local/share/jupyter/nbextensions/python-markdown/trusted.png new file mode 100644 index 0000000000000000000000000000000000000000..6f4082d3ed9faad8cfacf52932fb6ff9b7453fbb Binary files /dev/null and b/.local/share/jupyter/nbextensions/python-markdown/trusted.png differ diff --git a/.local/share/jupyter/nbextensions/qtconsole/qtconsole.js b/.local/share/jupyter/nbextensions/qtconsole/qtconsole.js new file mode 100644 index 0000000000000000000000000000000000000000..0a8738d918ee372084ad8a01e57f024728801e85 --- /dev/null +++ b/.local/share/jupyter/nbextensions/qtconsole/qtconsole.js @@ -0,0 +1,24 @@ +// Launch QT Console attached to the current kernel + +define([ + 'base/js/namespace', + 'base/js/events' + ], function(Jupyter, events) { + var load_ipython_extension = function () { + Jupyter.toolbar.add_buttons_group([ + /** + * Button to launch QTConsole + */ + Jupyter.keyboard_manager.actions.register ({ + 'help' : 'Run QTConsole', + 'icon' : 'fa-terminal', + 'handler': function () { + Jupyter.notebook.kernel.execute('%qtconsole') + } + }, 'run-qtconsole', 'qtconsole') + ]); + }; + return { + load_ipython_extension : load_ipython_extension + }; +}); diff --git a/.local/share/jupyter/nbextensions/qtconsole/qtconsole.yaml b/.local/share/jupyter/nbextensions/qtconsole/qtconsole.yaml new file mode 100644 index 0000000000000000000000000000000000000000..102310d818d87493d7955bd2211d4cdbbad4e2e9 --- /dev/null +++ b/.local/share/jupyter/nbextensions/qtconsole/qtconsole.yaml @@ -0,0 +1,6 @@ +Type: IPython Notebook Extension +Name: Launch QTConsole +Link: README.md +Description: Launch a QTConsole attached to the running kernel +Main: qtconsole.js +Compatibility: 4.x diff --git a/.local/share/jupyter/nbextensions/rubberband/icon.png b/.local/share/jupyter/nbextensions/rubberband/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..eeac6b28dcb164ef57baa0ef5966dc756dca196c Binary files /dev/null and b/.local/share/jupyter/nbextensions/rubberband/icon.png differ diff --git a/.local/share/jupyter/nbextensions/rubberband/readme.md b/.local/share/jupyter/nbextensions/rubberband/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..f17a953f28a6fedad35c13b76df2712df017f7f4 --- /dev/null +++ b/.local/share/jupyter/nbextensions/rubberband/readme.md @@ -0,0 +1,26 @@ +Rubberband +========== +Multi-Cell selection using a rubberband. This extension is only available for IPython version 3.x. + +Description +----------- + +The *rubberband* extension allows selecting multiple cells. Cells are selected by pressing `shift` or `ctrl`+`shift` + left mouse button click and dragging the rubber band over the cells. + +* `shift` + left mouse button : select cells that are currently touched by the rubberband +* `ctrl` + `shift` + left mouse button : select cells that were touched by the rubberband + +The `ctrl`+`shift` action is useful when scrolling inside the notebook. Scrolling is activated when the mouse reaches the upper or lower boundary of the notebook area. For now, the mouse has to be moved to achieve continuous scrolling. + +A short video demonstrating the rubberband extension can be found here: +[![screenshot](https://cloud.githubusercontent.com/assets/2445216/4668769/b6dd5b72-5567-11e4-9b55-558da6da027c.jpg)](https://youtu.be/TOPfWhqa3oI) + + +Two other extensions make use of this feature: exercise and chrome_clipboard. + +Internals +--------- + +New metadata element added to each cell: +* `cell.metadata.selected` - means this cell is selected + diff --git a/.local/share/jupyter/nbextensions/ruler/readme.md b/.local/share/jupyter/nbextensions/ruler/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..d815e1853cfe73f272b675f7a838f1a647feeaae --- /dev/null +++ b/.local/share/jupyter/nbextensions/ruler/readme.md @@ -0,0 +1,56 @@ +Ruler +===== + +This extension enables the Ruler CodeMirror feature + + +Configuration +------------- + +You can set the number of characters in the notebook extensions configration page or use the ConfigManager: + +```Python +from IPython.html.services.config import ConfigManager +ip = get_ipython() +cm = ConfigManager(parent=ip) +cm.update('notebook', {"ruler_column": [80]}) +``` + + +#### CSS patch #### + +Notebook versions from 4.3.0 through 5.1.0dev show up a bug in their CodeMirror +CSS padding which causes the ruler to be misplaced (see +[jupyter/notebook#2869](https://github.com/jupyter/notebook/issues/2869) +for details). +This nbextension introduces a css patch to attempt to correct this, but if it +causes problems for you, you can disable it by setting the `ruler_do_css_patch` +config key to `false`. + + +#### Multiple Rulers #### + +To specify multiple rulers, set the `ruler_column` to a list of values, for example + +```Python +cm.update('notebook', {"ruler_column": [10, 20, 30, 40, 50, 60, 70, 80]}) +``` + +A separate color and style can be specified for each ruler. + +```Python +cm.update('notebook', {"color": ["#000000", "#111111", "#222222", "#333333", "#444444", + "#555555", "#666666", "#777777", "#888888", "#999999"]}) +``` + +Creating a repeating pattern for either color or style is as simple as giving a list shorter than the total number of rulers + +```Python +cm.update('notebook', {"ruler_column": [10, 20, 30, 40, 50, 60, 70, 80]}) +cm.update('notebook', {"color": ["#FF0000", "#00FF00", "#0000FF"]}) +cm.update('notebook', {"style": ["dashed", "dotted"]}) +``` + +will result in `red, green, blue, red, green, blue, red, green, blue, red` and alternating `dashed, dotted` + +See [here](https://www.w3schools.com/cssref/pr_border-left_style.asp) for other line styles. diff --git a/.local/share/jupyter/nbextensions/ruler/ruler.yaml b/.local/share/jupyter/nbextensions/ruler/ruler.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dee55c832df7e6dc981992e92da6f5e8e2ff300d --- /dev/null +++ b/.local/share/jupyter/nbextensions/ruler/ruler.yaml @@ -0,0 +1,32 @@ +Type: IPython Notebook Extension +Name: Ruler +Description: This extension enables the Ruler CodeMirror feature +Link: readme.md +Icon: icon.png +Main: main.js +Compatibility: 4.x, 5.x +Parameters: + +- name: ruler_column + input_type: list + list_element: + input_type: number + description: Column where ruler is displayed + default: [78] + +- name: ruler_color + input_type: list + list_element: + input_type: color + description: Ruler color + default: ["#ff0000"] + +- name: ruler_linestyle + description: 'Ruler style, e.g. solid, dashed' + input_type: list + default: ['dashed'] + +- name: ruler_do_css_patch + description: apply css patch for ruler padding bug in notebook >= 4.3 + input_type: checkbox + default: true diff --git a/.local/share/jupyter/nbextensions/runtools/cellstate.js b/.local/share/jupyter/nbextensions/runtools/cellstate.js new file mode 100644 index 0000000000000000000000000000000000000000..71a17c3736cd294b98350f5d9278bc831ac0ee6f --- /dev/null +++ b/.local/share/jupyter/nbextensions/runtools/cellstate.js @@ -0,0 +1,20 @@ + + + CodeMirror.defineOption("cellstate", false, function(cm, val, old) { + if (old && old != CodeMirror.Init) { + cm.clearGutter(cm.state.cellState.options.gutter); + cm.state.cellState = null; + cm.off("gutterClick", onGutterClick); + cm.off("change", onChange); + cm.off("viewportChange", onViewportChange); + cm.off("swapDoc", onChange); + } + if (val) { + cm.state.cellState = new State(parseOptions(val)); + updateInViewport(cm); + cm.on("gutterClick", onGutterClick); + cm.on("change", onChange); + cm.on("viewportChange", onViewportChange); + cm.on("swapDoc", onChange); + } + }); diff --git a/.local/share/jupyter/nbextensions/varInspector/README.md b/.local/share/jupyter/nbextensions/varInspector/README.md new file mode 100644 index 0000000000000000000000000000000000000000..35982ce3edd5b0997e02c57bdd697e3180ac6415 --- /dev/null +++ b/.local/share/jupyter/nbextensions/varInspector/README.md @@ -0,0 +1,36 @@ +# Variable Inspector + +## Description and main features + +The Variable Inspector extension, which currently supports python and R kernels, enables to collect all defined variables and display them in a floating window. The window not only display the name of variables but also their type, size in memory and content. The columns are sortable. The window is draggable, resizable, collapsable. The list of displayed variables is automatically updated at each cell execution. Variables can be deleted from workspace by clicking a link. Position and state (displayed/collapsed) are stored in the notebook's metadata and restored at startup. + +The extension supports multiple kernels. To add support for a new kernel, one has to +- provide a library which loads required modules and define a function which lists all variables, together with their name, type, size and content. The output of this function must be a JSON representation of a list of objects (one for each variable) with keys 'varName','varType', 'varSize', 'varContent', +- provide the command for deleting a variable, as `delete_cmd_prefix` and `delete_cmd_postfix`, eg. for `rm(variable)`, specify `rm(` and `)`. +- give the command to refresh the list of variables (usually this is a call to the function defined in the library above). This information can be provided either in the source file or in the yaml config file. + +In any case, contributions to support further kernels will be very welcome! + +#### Demo: +![](demo.gif) + + +## Configuration +The initial configuration can be given using the IPython-contrib nbextensions facility. It includes: + +- varInspector.window_display - Display at startup or not (default: false) +- varInspector.cols.lenName: (and .lenType, .lenVar) - Width of columns (actually the max number of character to display in each column) +- varInspector.kernels_config - json object defining the kernels specific code and commands. + + +## Notes +- The displayed size of variables use the `getsizeof()` python method. This method doesn't work for all types, so the reported size is to be considered with some caution. The extension includes some code to correctly return the size of numpy arrays, pandas Series and DataFrame but the size for some other types may be incorrect. +- The extension builds on some code provided [here](https://github.com/jupyter-widgets/ipywidgets/blob/master/docs/source/examples/Variable%20Inspector.ipynb) (essentially the `_fill` method) +- The extension uses Christian Bach's [table sorter jquery plugin](https://github.com/christianbach/tablesorter). License file is included. + + +## History + +- @jfbercher march 22, 2017 -- initial release +- @jfbercher april 03, 2017 -- multiple kernel support. added support for R kernels. +- @jfbercher june 30, 2017 -- fixed #1014 (use of `%reset` with IPython kernel) and #1015 printing with python 2 kernel. diff --git a/.local/share/jupyter/nbextensions/varInspector/icon.png b/.local/share/jupyter/nbextensions/varInspector/icon.png new file mode 100644 index 0000000000000000000000000000000000000000..47677fb48e9deeb85caae355b39254b93278c4d1 Binary files /dev/null and b/.local/share/jupyter/nbextensions/varInspector/icon.png differ diff --git a/.local/share/jupyter/nbextensions/varInspector/var_list.py b/.local/share/jupyter/nbextensions/varInspector/var_list.py new file mode 100644 index 0000000000000000000000000000000000000000..5b975b5d86703aa4a032cdd54a9ea109d15911d6 --- /dev/null +++ b/.local/share/jupyter/nbextensions/varInspector/var_list.py @@ -0,0 +1,63 @@ +import json +from sys import getsizeof + +from IPython import get_ipython +from IPython.core.magics.namespace import NamespaceMagics +_nms = NamespaceMagics() +_Jupyter = get_ipython() +_nms.shell = _Jupyter.kernel.shell + +try: + import numpy as np +except ImportError: + pass + +def _getsizeof(x): + # return the size of variable x. Amended version of sys.getsizeof + # which also supports ndarray, Series and DataFrame + if type(x).__name__ in ['ndarray', 'Series']: + return x.nbytes + elif type(x).__name__ == 'DataFrame': + return x.memory_usage().sum() + else: + return getsizeof(x) + +def _getshapeof(x): + #returns the shape of x if it has one + #returns None otherwise - might want to return an empty string for an empty column + try: + return x.shape + except AttributeError: #x does not have a shape + return None + +def _getcontentof(x): + length = 150 + if type(x).__name__ == 'DataFrame': + colnames = ', '.join(x.columns.map(str)) + content = "Column names: %s" % colnames + elif type(x).__name__ == 'Series': + content = "Series [%d rows]" % x.shape + elif type(x).__name__ == 'ndarray': + content = x.__repr__() + else: + if hasattr(x, '__len__'): + if len(x) > length: + content = str(x[:length]) + else: + content = str(x) + if len(content) > 150: + return content[:150] + " ..." + return content + +def var_dic_list(): + types_to_exclude = ['module', 'function', 'builtin_function_or_method', + 'instance', '_Feature', 'type', 'ufunc'] + values = _nms.who_ls() + vardic = [{'varName': v, 'varType': type(eval(v)).__name__, 'varSize': str(_getsizeof(eval(v))), 'varShape': str(_getshapeof(eval(v))) if _getshapeof(eval(v)) else '', 'varContent': _getcontentof(eval(v)) } # noqa + + for v in values if (v not in ['_html', '_nms', 'NamespaceMagics', '_Jupyter']) & (type(eval(v)).__name__ not in types_to_exclude)] # noqa + return json.dumps(vardic) + + +# command to refresh the list of variables +print(var_dic_list()) diff --git a/.local/share/jupyter/nbextensions/zenmode/images/ipynblogo0.png b/.local/share/jupyter/nbextensions/zenmode/images/ipynblogo0.png new file mode 100644 index 0000000000000000000000000000000000000000..e56eb185168d59534c94ffff9bfd10b30902991f Binary files /dev/null and b/.local/share/jupyter/nbextensions/zenmode/images/ipynblogo0.png differ diff --git a/.triton/cache/6e97c2a1f7a095255f6dd5de1807841d/cuda_utils.so b/.triton/cache/6e97c2a1f7a095255f6dd5de1807841d/cuda_utils.so new file mode 100644 index 0000000000000000000000000000000000000000..aab526846f2f5236dbf127b071e8ce2910f5142f Binary files /dev/null and b/.triton/cache/6e97c2a1f7a095255f6dd5de1807841d/cuda_utils.so differ diff --git a/.triton/dump/11759acf26ac56366b171628132485d6/triton_.cubin b/.triton/dump/11759acf26ac56366b171628132485d6/triton_.cubin new file mode 100644 index 0000000000000000000000000000000000000000..468cb5048b4d02c019ce8ea6d83e108833f200d5 Binary files /dev/null and b/.triton/dump/11759acf26ac56366b171628132485d6/triton_.cubin differ diff --git a/.triton/dump/c0c34db8f5ff22b79fc971c63187477a/triton_.ttir b/.triton/dump/c0c34db8f5ff22b79fc971c63187477a/triton_.ttir new file mode 100644 index 0000000000000000000000000000000000000000..0eae227863953d10aa2a47b69618c30ffc4b9683 --- /dev/null +++ b/.triton/dump/c0c34db8f5ff22b79fc971c63187477a/triton_.ttir @@ -0,0 +1,84 @@ +module { + tt.func public @triton__0d1d2d3d4d5d6d7d8d9d10de11de(%arg0: !tt.ptr {tt.divisibility = 16 : i32}, %arg1: !tt.ptr {tt.divisibility = 16 : i32}, %arg2: !tt.ptr {tt.divisibility = 16 : i32}, %arg3: !tt.ptr {tt.divisibility = 16 : i32}, %arg4: !tt.ptr {tt.divisibility = 16 : i32}, %arg5: !tt.ptr {tt.divisibility = 16 : i32}, %arg6: !tt.ptr {tt.divisibility = 16 : i32}, %arg7: !tt.ptr {tt.divisibility = 16 : i32}, %arg8: !tt.ptr {tt.divisibility = 16 : i32}, %arg9: !tt.ptr {tt.divisibility = 16 : i32}, %arg10: i32 {tt.divisibility = 16 : i32, tt.max_divisibility = 16 : i32}, %arg11: i32 {tt.divisibility = 16 : i32, tt.max_divisibility = 16 : i32}) attributes {noinline = false} { + %c256_i32 = arith.constant 256 : i32 + %cst = arith.constant dense<0.000000e+00> : tensor<256xbf16> + %cst_0 = arith.constant 0.000000e+00 : f32 + %cst_1 = arith.constant 2.560000e+02 : f32 + %cst_2 = arith.constant 9.99999974E-6 : f32 + %cst_3 = arith.constant dense<0.000000e+00> : tensor<256xf32> + %cst_4 = arith.constant dense<256> : tensor<256xi32> + %0 = tt.get_program_id x : i32 + %1 = tt.make_range {end = 256 : i32, start = 0 : i32} : tensor<256xi32> + %2 = arith.cmpi slt, %1, %cst_4 : tensor<256xi32> + %3 = arith.muli %0, %c256_i32 : i32 + %4 = tt.splat %3 : (i32) -> tensor<256xi32> + %5 = arith.addi %1, %4 : tensor<256xi32> + %6 = tt.splat %arg1 : (!tt.ptr) -> tensor<256x!tt.ptr> + %7 = tt.addptr %6, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + %8 = tt.load %7, %2, %cst_3 {cache = 1 : i32, evict = 1 : i32, isVolatile = false} : tensor<256xf32> + %9 = tt.splat %arg2 : (!tt.ptr) -> tensor<256x!tt.ptr> + %10 = tt.addptr %9, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + %11 = tt.load %10, %2, %cst {cache = 1 : i32, evict = 1 : i32, isVolatile = false} : tensor<256xbf16> + %12 = arith.extf %11 : tensor<256xbf16> to tensor<256xf32> + %13 = tt.splat %arg3 : (!tt.ptr) -> tensor<256x!tt.ptr> + %14 = tt.addptr %13, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + %15 = tt.load %14, %2, %cst {cache = 1 : i32, evict = 1 : i32, isVolatile = false} : tensor<256xbf16> + %16 = arith.extf %15 : tensor<256xbf16> to tensor<256xf32> + %17 = tt.splat %arg4 : (!tt.ptr) -> tensor<256x!tt.ptr> + %18 = tt.addptr %17, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + %19 = tt.load %18, %2, %cst {cache = 1 : i32, evict = 1 : i32, isVolatile = false} : tensor<256xbf16> + %20 = arith.extf %19 : tensor<256xbf16> to tensor<256xf32> + %21 = tt.splat %arg5 : (!tt.ptr) -> tensor<256x!tt.ptr> + %22 = tt.addptr %21, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + %23 = tt.load %22, %2, %cst {cache = 1 : i32, evict = 1 : i32, isVolatile = false} : tensor<256xbf16> + %24 = arith.extf %23 : tensor<256xbf16> to tensor<256xf32> + %25 = tt.splat %arg6 : (!tt.ptr) -> tensor<256x!tt.ptr> + %26 = tt.addptr %25, %1 : tensor<256x!tt.ptr>, tensor<256xi32> + %27 = tt.load %26, %2, %cst_3 {cache = 1 : i32, evict = 3 : i32, isVolatile = false} : tensor<256xf32> + %28 = arith.addf %8, %12 : tensor<256xf32> + %29 = arith.addf %28, %16 : tensor<256xf32> + %30 = arith.addf %29, %20 : tensor<256xf32> + %31 = arith.addf %30, %24 : tensor<256xf32> + %32 = arith.select %2, %31, %cst_3 : tensor<256xi1>, tensor<256xf32> + %33 = "tt.reduce"(%32) <{axis = 0 : i32}> ({ + ^bb0(%arg12: f32, %arg13: f32): + %59 = arith.addf %arg12, %arg13 : f32 + tt.reduce.return %59 : f32 + }) : (tensor<256xf32>) -> f32 + %34 = arith.addf %33, %cst_0 : f32 + %35 = arith.divf %34, %cst_1 : f32 + %36 = tt.splat %35 : (f32) -> tensor<1xf32> + %37 = tt.splat %35 : (f32) -> tensor<256xf32> + %38 = arith.subf %31, %37 : tensor<256xf32> + %39 = arith.mulf %38, %38 : tensor<256xf32> + %40 = arith.select %2, %39, %cst_3 : tensor<256xi1>, tensor<256xf32> + %41 = "tt.reduce"(%40) <{axis = 0 : i32}> ({ + ^bb0(%arg12: f32, %arg13: f32): + %59 = arith.addf %arg12, %arg13 : f32 + tt.reduce.return %59 : f32 + }) : (tensor<256xf32>) -> f32 + %42 = arith.addf %41, %cst_0 : f32 + %43 = arith.divf %42, %cst_1 : f32 + %44 = arith.addf %43, %cst_2 : f32 + %45 = tt.extern_elementwise %44 {libname = "libdevice", libpath = "/usr/local/lib/python3.10/dist-packages/triton/language/../third_party/cuda/lib/libdevice.10.bc", pure = true, symbol = "__nv_rsqrtf"} : (f32) -> f32 + %46 = tt.splat %45 : (f32) -> tensor<1xf32> + %47 = tt.splat %45 : (f32) -> tensor<256xf32> + %48 = arith.mulf %38, %47 : tensor<256xf32> + %49 = arith.mulf %48, %27 : tensor<256xf32> + %50 = tt.splat %arg7 : (!tt.ptr) -> tensor<256x!tt.ptr> + %51 = tt.addptr %50, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + tt.store %51, %31, %2 {cache = 1 : i32, evict = 1 : i32} : tensor<256xf32> + gpu.barrier + %52 = tt.addptr %arg0, %0 : !tt.ptr, i32 + %53 = tt.splat %52 : (!tt.ptr) -> tensor<1x!tt.ptr> + tt.store %53, %46 {cache = 1 : i32, evict = 1 : i32} : tensor<1xf32> + %54 = tt.splat %arg9 : (!tt.ptr) -> tensor<256x!tt.ptr> + %55 = tt.addptr %54, %5 : tensor<256x!tt.ptr>, tensor<256xi32> + %56 = arith.truncf %49 : tensor<256xf32> to tensor<256xbf16> + tt.store %55, %56, %2 {cache = 1 : i32, evict = 1 : i32} : tensor<256xbf16> + %57 = tt.addptr %arg8, %0 : !tt.ptr, i32 + %58 = tt.splat %57 : (!tt.ptr) -> tensor<1x!tt.ptr> + tt.store %58, %36 {cache = 1 : i32, evict = 1 : i32} : tensor<1xf32> + tt.return + } +}