in_source_id
stringlengths
13
58
issue
stringlengths
3
241k
before_files
listlengths
0
3
after_files
listlengths
0
3
pr_diff
stringlengths
109
107M
angr__angr-1669
angr should not require futures In [setup.py](https://github.com/angr/angr/blob/c2cf015f78bd060b263e80627f5962b3062e0ea7/setup.py#L145), a dependency on [futures](https://pypi.org/project/futures/) is declared. However, `futures` is a backport to Python2 of the `concurrent.futures` standard library module available in Python3. The only occurrences of `futures` at this point are in [exploration_techniques/threading.py](https://github.com/angr/angr/blob/8edb29f5f885f029d2e97fba470063c3d78f7832/angr/exploration_techniques/threading.py). (Maybe) Relates to #1277 .
[ { "content": "# pylint: disable=no-name-in-module,import-error,unused-variable\nimport os\nimport sys\nimport subprocess\nimport pkg_resources\nimport shutil\nimport platform\nimport glob\n\nif bytes is str:\n raise Exception(\"\"\"\n\n=-=-=-=-=-=-=-=-=-=-=-=-= WELCOME TO THE FUTURE! =-=-=-=-=-=-=-=-=-=-=-=-=-=\n\nangr has transitioned to python 3. Due to the small size of the team behind it,\nwe can't reasonably maintain compatibility between both python 2 and python 3.\nIf you want to continue using the most recent version of angr (you definitely\nwant that, trust us) you should upgrade to python 3. It's like getting your\nvaccinations. It hurts a little bit initially but in the end it's worth it.\n\nIf you are staying on python 2 and would like to make sure you don't get\nincompatible versions, make sure your pip is at least version 9.0, and it will\nuse our metadata to implicitly avoid them.\n\nFor more information, see here: https://docs.angr.io/appendix/migration\n\nGood luck!\n\"\"\")\n\ntry:\n from setuptools import setup\n from setuptools import find_packages\n packages = find_packages()\nexcept ImportError:\n from distutils.core import setup\n packages = [x.strip('./').replace('/','.') for x in os.popen('find -name \"__init__.py\" | xargs -n1 dirname').read().strip().split('\\n')]\n\nfrom distutils.util import get_platform\nfrom distutils.errors import LibError\nfrom distutils.command.build import build as _build\nfrom distutils.command.clean import clean as _clean\n\nif sys.platform == 'darwin':\n library_file = \"angr_native.dylib\"\nelif sys.platform in ('win32', 'cygwin'):\n library_file = \"angr_native.dll\"\nelse:\n library_file = \"angr_native.so\"\n\ndef _build_native():\n try:\n import unicorn\n import pyvex\n except ImportError:\n raise LibError(\"You must install unicorn and pyvex before building angr\")\n\n env = os.environ.copy()\n env_data = (('UNICORN_INCLUDE_PATH', 'unicorn', 'include'),\n ('UNICORN_LIB_PATH', 'unicorn', 'lib'),\n ('UNICORN_LIB_FILE', 'unicorn', 'lib\\\\unicorn.lib'),\n ('PYVEX_INCLUDE_PATH', 'pyvex', 'include'),\n ('PYVEX_LIB_PATH', 'pyvex', 'lib'),\n ('PYVEX_LIB_FILE', 'pyvex', 'lib\\\\pyvex.lib'))\n for var, pkg, fnm in env_data:\n try:\n env[var] = pkg_resources.resource_filename(pkg, fnm)\n except KeyError:\n pass\n\n cmd1 = ['nmake', '/f', 'Makefile-win']\n cmd2 = ['make']\n for cmd in (cmd1, cmd2):\n try:\n if subprocess.call(cmd, cwd='native', env=env) != 0:\n raise LibError('Unable to build angr_native')\n break\n except OSError:\n continue\n else:\n raise LibError('Unable to build angr_native')\n\n shutil.rmtree('angr/lib', ignore_errors=True)\n os.mkdir('angr/lib')\n shutil.copy(os.path.join('native', library_file), 'angr/lib')\n\ndef _clean_native():\n oglob = glob.glob('native/*.o')\n oglob += glob.glob('native/*.obj')\n oglob += glob.glob('native/*.so')\n oglob += glob.glob('native/*.dll')\n oglob += glob.glob('native/*.dylib')\n for fname in oglob:\n os.unlink(fname)\n\nclass build(_build):\n def run(self, *args):\n self.execute(_build_native, (), msg='Building angr_native')\n _build.run(self, *args)\n\nclass clean(_clean):\n def run(self, *args):\n self.execute(_clean_native, (), msg='Cleaning angr_native')\n _clean.run(self, *args)\n\ncmdclass = {\n 'build': build,\n 'clean': clean,\n}\n\ntry:\n from setuptools.command.develop import develop as _develop\n class develop(_develop):\n def run(self, *args):\n self.execute(_build_native, (), msg='Building angr_native')\n _develop.run(self, *args)\n\n cmdclass['develop'] = develop\nexcept ImportError:\n pass\n\nif 'bdist_wheel' in sys.argv and '--plat-name' not in sys.argv:\n sys.argv.append('--plat-name')\n name = get_platform()\n if 'linux' in name:\n # linux_* platform tags are disallowed because the python ecosystem is fubar\n # linux builds should be built in the centos 5 vm for maximum compatibility\n sys.argv.append('manylinux1_' + platform.machine())\n else:\n # https://www.python.org/dev/peps/pep-0425/\n sys.argv.append(name.replace('.', '_').replace('-', '_'))\n\nsetup(\n name='angr',\n version='8.19.4.5',\n python_requires='>=3.5',\n description='A multi-architecture binary analysis toolkit, with the ability to perform dynamic symbolic execution and various static analyses on binaries',\n url='https://github.com/angr/angr',\n packages=packages,\n install_requires=[\n 'sortedcontainers',\n 'cachetools',\n 'capstone>=3.0.5rc2',\n 'cooldict',\n 'dpkt',\n 'futures; python_version == \"2.7\"',\n 'mulpyplexer',\n 'networkx>=2.0',\n 'progressbar',\n 'rpyc',\n 'cffi>=1.7.0',\n 'unicorn',\n 'archinfo==8.19.4.5',\n 'claripy==8.19.4.5',\n 'cle==8.19.4.5',\n 'pyvex==8.19.4.5',\n 'ailment==8.19.4.5',\n 'GitPython',\n 'psutil',\n 'pycparser>=2.18',\n 'itanium_demangler',\n 'protobuf',\n ],\n setup_requires=['unicorn', 'pyvex'],\n cmdclass=cmdclass,\n include_package_data=True,\n package_data={\n 'angr': ['lib/*']\n }\n)\n", "path": "setup.py" } ]
[ { "content": "# pylint: disable=no-name-in-module,import-error,unused-variable\nimport os\nimport sys\nimport subprocess\nimport pkg_resources\nimport shutil\nimport platform\nimport glob\n\nif bytes is str:\n raise Exception(\"\"\"\n\n=-=-=-=-=-=-=-=-=-=-=-=-= WELCOME TO THE FUTURE! =-=-=-=-=-=-=-=-=-=-=-=-=-=\n\nangr has transitioned to python 3. Due to the small size of the team behind it,\nwe can't reasonably maintain compatibility between both python 2 and python 3.\nIf you want to continue using the most recent version of angr (you definitely\nwant that, trust us) you should upgrade to python 3. It's like getting your\nvaccinations. It hurts a little bit initially but in the end it's worth it.\n\nIf you are staying on python 2 and would like to make sure you don't get\nincompatible versions, make sure your pip is at least version 9.0, and it will\nuse our metadata to implicitly avoid them.\n\nFor more information, see here: https://docs.angr.io/appendix/migration\n\nGood luck!\n\"\"\")\n\ntry:\n from setuptools import setup\n from setuptools import find_packages\n packages = find_packages()\nexcept ImportError:\n from distutils.core import setup\n packages = [x.strip('./').replace('/','.') for x in os.popen('find -name \"__init__.py\" | xargs -n1 dirname').read().strip().split('\\n')]\n\nfrom distutils.util import get_platform\nfrom distutils.errors import LibError\nfrom distutils.command.build import build as _build\nfrom distutils.command.clean import clean as _clean\n\nif sys.platform == 'darwin':\n library_file = \"angr_native.dylib\"\nelif sys.platform in ('win32', 'cygwin'):\n library_file = \"angr_native.dll\"\nelse:\n library_file = \"angr_native.so\"\n\ndef _build_native():\n try:\n import unicorn\n import pyvex\n except ImportError:\n raise LibError(\"You must install unicorn and pyvex before building angr\")\n\n env = os.environ.copy()\n env_data = (('UNICORN_INCLUDE_PATH', 'unicorn', 'include'),\n ('UNICORN_LIB_PATH', 'unicorn', 'lib'),\n ('UNICORN_LIB_FILE', 'unicorn', 'lib\\\\unicorn.lib'),\n ('PYVEX_INCLUDE_PATH', 'pyvex', 'include'),\n ('PYVEX_LIB_PATH', 'pyvex', 'lib'),\n ('PYVEX_LIB_FILE', 'pyvex', 'lib\\\\pyvex.lib'))\n for var, pkg, fnm in env_data:\n try:\n env[var] = pkg_resources.resource_filename(pkg, fnm)\n except KeyError:\n pass\n\n cmd1 = ['nmake', '/f', 'Makefile-win']\n cmd2 = ['make']\n for cmd in (cmd1, cmd2):\n try:\n if subprocess.call(cmd, cwd='native', env=env) != 0:\n raise LibError('Unable to build angr_native')\n break\n except OSError:\n continue\n else:\n raise LibError('Unable to build angr_native')\n\n shutil.rmtree('angr/lib', ignore_errors=True)\n os.mkdir('angr/lib')\n shutil.copy(os.path.join('native', library_file), 'angr/lib')\n\ndef _clean_native():\n oglob = glob.glob('native/*.o')\n oglob += glob.glob('native/*.obj')\n oglob += glob.glob('native/*.so')\n oglob += glob.glob('native/*.dll')\n oglob += glob.glob('native/*.dylib')\n for fname in oglob:\n os.unlink(fname)\n\nclass build(_build):\n def run(self, *args):\n self.execute(_build_native, (), msg='Building angr_native')\n _build.run(self, *args)\n\nclass clean(_clean):\n def run(self, *args):\n self.execute(_clean_native, (), msg='Cleaning angr_native')\n _clean.run(self, *args)\n\ncmdclass = {\n 'build': build,\n 'clean': clean,\n}\n\ntry:\n from setuptools.command.develop import develop as _develop\n class develop(_develop):\n def run(self, *args):\n self.execute(_build_native, (), msg='Building angr_native')\n _develop.run(self, *args)\n\n cmdclass['develop'] = develop\nexcept ImportError:\n pass\n\nif 'bdist_wheel' in sys.argv and '--plat-name' not in sys.argv:\n sys.argv.append('--plat-name')\n name = get_platform()\n if 'linux' in name:\n # linux_* platform tags are disallowed because the python ecosystem is fubar\n # linux builds should be built in the centos 5 vm for maximum compatibility\n sys.argv.append('manylinux1_' + platform.machine())\n else:\n # https://www.python.org/dev/peps/pep-0425/\n sys.argv.append(name.replace('.', '_').replace('-', '_'))\n\nsetup(\n name='angr',\n version='8.19.4.5',\n python_requires='>=3.5',\n description='A multi-architecture binary analysis toolkit, with the ability to perform dynamic symbolic execution and various static analyses on binaries',\n url='https://github.com/angr/angr',\n packages=packages,\n install_requires=[\n 'sortedcontainers',\n 'cachetools',\n 'capstone>=3.0.5rc2',\n 'cooldict',\n 'dpkt',\n 'mulpyplexer',\n 'networkx>=2.0',\n 'progressbar',\n 'rpyc',\n 'cffi>=1.7.0',\n 'unicorn',\n 'archinfo==8.19.4.5',\n 'claripy==8.19.4.5',\n 'cle==8.19.4.5',\n 'pyvex==8.19.4.5',\n 'ailment==8.19.4.5',\n 'GitPython',\n 'psutil',\n 'pycparser>=2.18',\n 'itanium_demangler',\n 'protobuf',\n ],\n setup_requires=['unicorn', 'pyvex'],\n cmdclass=cmdclass,\n include_package_data=True,\n package_data={\n 'angr': ['lib/*']\n }\n)\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 67ee6acfdc5..6db63516e05 100644 --- a/setup.py +++ b/setup.py @@ -142,7 +142,6 @@ def run(self, *args): 'capstone>=3.0.5rc2', 'cooldict', 'dpkt', - 'futures; python_version == "2.7"', 'mulpyplexer', 'networkx>=2.0', 'progressbar',
django-hijack__django-hijack-693
Missing staticfiles manifest entry for 'hijack/hijack.js' When trying to access the User Admin, the `hijack.js` file fails to load when DEBUG is enabled. Under production settings with a manifest based staticfiles storage, it results in an exception. ``` Missing staticfiles manifest entry for 'hijack/hijack.js' ``` This is possibly the culprit: https://github.com/django-hijack/django-hijack/blob/3966f3758fbe5490c79957ca3b15b81e300616c0/hijack/contrib/admin/admin.py#L19 Shouldn't it be `.mjs` instead?
[ { "content": "import django\nfrom django import forms\nfrom django.shortcuts import resolve_url\nfrom django.template.loader import render_to_string\nfrom django.utils.translation import gettext_lazy as _\n\nfrom hijack.conf import settings\nfrom hijack.forms import ESM\n\n\nclass HijackUserAdminMixin:\n \"\"\"Add hijack button to changelist admin view.\"\"\"\n\n hijack_success_url = None\n \"\"\"Return URL to which one will be forwarded to after hijacking another user.\"\"\"\n\n @property\n def media(self):\n return super().media + forms.Media(js=[ESM(\"hijack/hijack.js\")])\n\n def get_hijack_user(self, obj):\n \"\"\"\n Return the user based on the current object.\n\n This method may be overridden to support hijack keys on related objects.\n \"\"\"\n return obj\n\n def get_hijack_success_url(self, request, obj):\n \"\"\"Return URL to which one will be forwarded to after hijacking another user.\"\"\"\n success_url = settings.LOGIN_REDIRECT_URL\n if self.hijack_success_url:\n success_url = self.hijack_success_url\n elif hasattr(obj, \"get_absolute_url\"):\n success_url = obj\n return resolve_url(success_url)\n\n def hijack_button(self, request, obj):\n \"\"\"\n Render hijack button.\n\n Should the user only be a related object we include the username in the button\n to ensure deliberate action. However, the name is omitted in the user admin,\n as the table layout suggests that the button targets the current user.\n \"\"\"\n user = self.get_hijack_user(obj)\n return render_to_string(\n \"hijack/contrib/admin/button.html\",\n {\n \"request\": request,\n \"another_user\": user,\n \"username\": str(user),\n \"is_user_admin\": self.model == type(user),\n \"next\": self.get_hijack_success_url(request, obj),\n },\n request=request,\n )\n\n def get_changelist_instance(self, request):\n # We inject the request for the CSRF token, see also:\n # https://code.djangoproject.com/ticket/13659\n def hijack_field(obj):\n return self.hijack_button(request, obj)\n\n hijack_field.short_description = _(\"hijack user\")\n\n # we\n list_display = [*self.get_list_display(request), hijack_field]\n # Same as super method, see also:\n # https://github.com/django/django/blob/76c0b32f826469320c59709d31e2f2126dd7c505/django/contrib/admin/options.py#L724-L750\n list_display_links = self.get_list_display_links(request, list_display)\n # Add the action checkboxes if any actions are available.\n if self.get_actions(request):\n list_display = [\"action_checkbox\", *list_display]\n sortable_by = self.get_sortable_by(request)\n ChangeList = self.get_changelist(request)\n args = [\n request,\n self.model,\n list_display,\n list_display_links,\n self.get_list_filter(request),\n self.date_hierarchy,\n self.get_search_fields(request),\n self.get_list_select_related(request),\n self.list_per_page,\n self.list_max_show_all,\n self.list_editable,\n self,\n sortable_by,\n ]\n if django.VERSION >= (4, 0):\n args.append(self.search_help_text)\n return ChangeList(*args)\n", "path": "hijack/contrib/admin/admin.py" } ]
[ { "content": "import django\nfrom django import forms\nfrom django.shortcuts import resolve_url\nfrom django.template.loader import render_to_string\nfrom django.utils.translation import gettext_lazy as _\n\nfrom hijack.conf import settings\nfrom hijack.forms import ESM\n\n\nclass HijackUserAdminMixin:\n \"\"\"Add hijack button to changelist admin view.\"\"\"\n\n hijack_success_url = None\n \"\"\"Return URL to which one will be forwarded to after hijacking another user.\"\"\"\n\n @property\n def media(self):\n return super().media + forms.Media(js=[ESM(\"hijack/hijack.mjs\")])\n\n def get_hijack_user(self, obj):\n \"\"\"\n Return the user based on the current object.\n\n This method may be overridden to support hijack keys on related objects.\n \"\"\"\n return obj\n\n def get_hijack_success_url(self, request, obj):\n \"\"\"Return URL to which one will be forwarded to after hijacking another user.\"\"\"\n success_url = settings.LOGIN_REDIRECT_URL\n if self.hijack_success_url:\n success_url = self.hijack_success_url\n elif hasattr(obj, \"get_absolute_url\"):\n success_url = obj\n return resolve_url(success_url)\n\n def hijack_button(self, request, obj):\n \"\"\"\n Render hijack button.\n\n Should the user only be a related object we include the username in the button\n to ensure deliberate action. However, the name is omitted in the user admin,\n as the table layout suggests that the button targets the current user.\n \"\"\"\n user = self.get_hijack_user(obj)\n return render_to_string(\n \"hijack/contrib/admin/button.html\",\n {\n \"request\": request,\n \"another_user\": user,\n \"username\": str(user),\n \"is_user_admin\": self.model == type(user),\n \"next\": self.get_hijack_success_url(request, obj),\n },\n request=request,\n )\n\n def get_changelist_instance(self, request):\n # We inject the request for the CSRF token, see also:\n # https://code.djangoproject.com/ticket/13659\n def hijack_field(obj):\n return self.hijack_button(request, obj)\n\n hijack_field.short_description = _(\"hijack user\")\n\n # we\n list_display = [*self.get_list_display(request), hijack_field]\n # Same as super method, see also:\n # https://github.com/django/django/blob/76c0b32f826469320c59709d31e2f2126dd7c505/django/contrib/admin/options.py#L724-L750\n list_display_links = self.get_list_display_links(request, list_display)\n # Add the action checkboxes if any actions are available.\n if self.get_actions(request):\n list_display = [\"action_checkbox\", *list_display]\n sortable_by = self.get_sortable_by(request)\n ChangeList = self.get_changelist(request)\n args = [\n request,\n self.model,\n list_display,\n list_display_links,\n self.get_list_filter(request),\n self.date_hierarchy,\n self.get_search_fields(request),\n self.get_list_select_related(request),\n self.list_per_page,\n self.list_max_show_all,\n self.list_editable,\n self,\n sortable_by,\n ]\n if django.VERSION >= (4, 0):\n args.append(self.search_help_text)\n return ChangeList(*args)\n", "path": "hijack/contrib/admin/admin.py" } ]
diff --git a/hijack/contrib/admin/admin.py b/hijack/contrib/admin/admin.py index e51622f6..27d4c148 100644 --- a/hijack/contrib/admin/admin.py +++ b/hijack/contrib/admin/admin.py @@ -16,7 +16,7 @@ class HijackUserAdminMixin: @property def media(self): - return super().media + forms.Media(js=[ESM("hijack/hijack.js")]) + return super().media + forms.Media(js=[ESM("hijack/hijack.mjs")]) def get_hijack_user(self, obj): """
archlinux__archinstall-504
Incorrect line ending after "progressbar" finishes I thought this would be handled in: https://github.com/archlinux/archinstall/blob/54a693be4fa2fbce83fd894b5ac3b0909f3a1e10/archinstall/lib/general.py#L157-L161 ![2021-05-21-093818_1024x795_scrot](https://user-images.githubusercontent.com/861439/119100996-cc082780-ba18-11eb-844d-aafd34ea583c.png)
[ { "content": "import hashlib\nimport json\nimport logging\nimport os\nimport pty\nimport shlex\nimport subprocess\nimport sys\nimport time\nfrom datetime import datetime, date\nfrom select import epoll, EPOLLIN, EPOLLHUP\nfrom typing import Union\n\nfrom .exceptions import *\nfrom .output import log\n\n\ndef gen_uid(entropy_length=256):\n\treturn hashlib.sha512(os.urandom(entropy_length)).hexdigest()\n\n\ndef multisplit(s, splitters):\n\ts = [s, ]\n\tfor key in splitters:\n\t\tns = []\n\t\tfor obj in s:\n\t\t\tx = obj.split(key)\n\t\t\tfor index, part in enumerate(x):\n\t\t\t\tif len(part):\n\t\t\t\t\tns.append(part)\n\t\t\t\tif index < len(x) - 1:\n\t\t\t\t\tns.append(key)\n\t\ts = ns\n\treturn s\n\n\ndef locate_binary(name):\n\tfor PATH in os.environ['PATH'].split(':'):\n\t\tfor root, folders, files in os.walk(PATH):\n\t\t\tfor file in files:\n\t\t\t\tif file == name:\n\t\t\t\t\treturn os.path.join(root, file)\n\t\t\tbreak # Don't recurse\n\n\traise RequirementError(f\"Binary {name} does not exist.\")\n\n\nclass JsonEncoder:\n\tdef _encode(obj):\n\t\tif isinstance(obj, dict):\n\t\t\t# We'll need to iterate not just the value that default() usually gets passed\n\t\t\t# But also iterate manually over each key: value pair in order to trap the keys.\n\n\t\t\tcopy = {}\n\t\t\tfor key, val in list(obj.items()):\n\t\t\t\tif isinstance(val, dict):\n\t\t\t\t\t# This, is a EXTREMELY ugly hack.. but it's the only quick way I can think of to trigger a encoding of sub-dictionaries.\n\t\t\t\t\tval = json.loads(json.dumps(val, cls=JSON))\n\t\t\t\telse:\n\t\t\t\t\tval = JsonEncoder._encode(val)\n\n\t\t\t\tif type(key) == str and key[0] == '!':\n\t\t\t\t\tcopy[JsonEncoder._encode(key)] = '******'\n\t\t\t\telse:\n\t\t\t\t\tcopy[JsonEncoder._encode(key)] = val\n\t\t\treturn copy\n\t\telif hasattr(obj, 'json'):\n\t\t\treturn obj.json()\n\t\telif hasattr(obj, '__dump__'):\n\t\t\treturn obj.__dump__()\n\t\telif isinstance(obj, (datetime, date)):\n\t\t\treturn obj.isoformat()\n\t\telif isinstance(obj, (list, set, tuple)):\n\t\t\tr = []\n\t\t\tfor item in obj:\n\t\t\t\tr.append(json.loads(json.dumps(item, cls=JSON)))\n\t\t\treturn r\n\t\telse:\n\t\t\treturn obj\n\n\nclass JSON(json.JSONEncoder, json.JSONDecoder):\n\tdef _encode(self, obj):\n\t\treturn JsonEncoder._encode(obj)\n\n\tdef encode(self, obj):\n\t\treturn super(JSON, self).encode(self._encode(obj))\n\n\nclass SysCommandWorker:\n\tdef __init__(self, cmd, callbacks=None, peak_output=False, environment_vars=None, logfile=None, working_directory='./'):\n\t\tif not callbacks:\n\t\t\tcallbacks = {}\n\t\tif not environment_vars:\n\t\t\tenvironment_vars = {}\n\n\t\tif type(cmd) is str:\n\t\t\tcmd = shlex.split(cmd)\n\n\t\tif cmd[0][0] != '/' and cmd[0][:2] != './':\n\t\t\t# \"which\" doesn't work as it's a builtin to bash.\n\t\t\t# It used to work, but for whatever reason it doesn't anymore.\n\t\t\t# We there for fall back on manual lookup in os.PATH\n\t\t\tcmd[0] = locate_binary(cmd[0])\n\n\t\tself.cmd = cmd\n\t\tself.callbacks = callbacks\n\t\tself.peak_output = peak_output\n\t\tself.environment_vars = environment_vars\n\t\tself.logfile = logfile\n\t\tself.working_directory = working_directory\n\n\t\tself.exit_code = None\n\t\tself._trace_log = b''\n\t\tself._trace_log_pos = 0\n\t\tself.poll_object = epoll()\n\t\tself.child_fd = None\n\t\tself.started = None\n\t\tself.ended = None\n\n\tdef __contains__(self, key: bytes):\n\t\t\"\"\"\n\t\tContains will also move the current buffert position forward.\n\t\tThis is to avoid re-checking the same data when looking for output.\n\t\t\"\"\"\n\t\tassert type(key) == bytes\n\n\t\tif (contains := key in self._trace_log[self._trace_log_pos:]):\n\t\t\tself._trace_log_pos += self._trace_log[self._trace_log_pos:].find(key) + len(key)\n\n\t\treturn contains\n\n\tdef __iter__(self, *args, **kwargs):\n\t\tfor line in self._trace_log[self._trace_log_pos:self._trace_log.rfind(b'\\n')].split(b'\\n'):\n\t\t\tif line:\n\t\t\t\tyield line + b'\\n'\n\n\t\tself._trace_log_pos = self._trace_log.rfind(b'\\n')\n\n\tdef __repr__(self):\n\t\tself.make_sure_we_are_executing()\n\t\treturn str(self._trace_log)\n\n\tdef __enter__(self):\n\t\treturn self\n\n\tdef __exit__(self, *args):\n\t\t# b''.join(sys_command('sync')) # No need to, since the underlying fs() object will call sync.\n\t\t# TODO: https://stackoverflow.com/questions/28157929/how-to-safely-handle-an-exception-inside-a-context-manager\n\n\t\tif self.child_fd:\n\t\t\ttry:\n\t\t\t\tos.close(self.child_fd)\n\t\t\texcept:\n\t\t\t\tpass\n\n\t\tif self.peak_output:\n\t\t\t# To make sure any peaked output didn't leave us hanging\n\t\t\t# on the same line we were on.\n\t\t\tsys.stdout.write(\"\\n\")\n\t\t\tsys.stdout.flush()\n\n\t\tif len(args) >= 2 and args[1]:\n\t\t\tlog(args[1], level=logging.ERROR, fg='red')\n\n\t\tif self.exit_code != 0:\n\t\t\traise SysCallError(f\"{self.cmd} exited with abnormal exit code: {self.exit_code}\")\n\n\tdef is_alive(self):\n\t\tself.poll()\n\n\t\tif self.started and self.ended is None:\n\t\t\treturn True\n\n\t\treturn False\n\n\tdef write(self, data: bytes, line_ending=True):\n\t\tassert type(data) == bytes # TODO: Maybe we can support str as well and encode it\n\n\t\tself.make_sure_we_are_executing()\n\n\t\tos.write(self.child_fd, data + (b'\\n' if line_ending else b''))\n\n\tdef make_sure_we_are_executing(self):\n\t\tif not self.started:\n\t\t\treturn self.execute()\n\n\tdef tell(self) -> int:\n\t\tself.make_sure_we_are_executing()\n\t\treturn self._trace_log_pos\n\n\tdef seek(self, pos):\n\t\tself.make_sure_we_are_executing()\n\t\t# Safety check to ensure 0 < pos < len(tracelog)\n\t\tself._trace_log_pos = min(max(0, pos), len(self._trace_log))\n\n\tdef peak(self, output: Union[str, bytes]) -> bool:\n\t\tif self.peak_output:\n\t\t\tif type(output) == bytes:\n\t\t\t\ttry:\n\t\t\t\t\toutput = output.decode('UTF-8')\n\t\t\t\texcept UnicodeDecodeError:\n\t\t\t\t\treturn False\n\n\t\t\toutput = output.strip('\\r\\n ')\n\t\t\tif len(output) <= 0:\n\t\t\t\treturn False\n\n\t\t\tfrom .user_interaction import get_terminal_width\n\n\t\t\t# Move back to the beginning of the terminal\n\t\t\tsys.stdout.flush()\n\t\t\tsys.stdout.write(\"\\033[%dG\" % 0)\n\t\t\tsys.stdout.flush()\n\n\t\t\t# Clear the line\n\t\t\tsys.stdout.write(\" \" * get_terminal_width())\n\t\t\tsys.stdout.flush()\n\n\t\t\t# Move back to the beginning again\n\t\t\tsys.stdout.flush()\n\t\t\tsys.stdout.write(\"\\033[%dG\" % 0)\n\t\t\tsys.stdout.flush()\n\n\t\t\t# And print the new output we're peaking on:\n\t\t\tsys.stdout.write(output)\n\t\t\tsys.stdout.flush()\n\t\treturn True\n\n\tdef poll(self):\n\t\tself.make_sure_we_are_executing()\n\n\t\tgot_output = False\n\t\tfor fileno, event in self.poll_object.poll(0.1):\n\t\t\ttry:\n\t\t\t\toutput = os.read(self.child_fd, 8192)\n\t\t\t\tgot_output = True\n\t\t\t\tself.peak(output)\n\t\t\t\tself._trace_log += output\n\t\t\texcept OSError as err:\n\t\t\t\tself.ended = time.time()\n\t\t\t\tbreak\n\n\t\tif self.ended or (got_output is False and pid_exists(self.pid) is False):\n\t\t\tself.ended = time.time()\n\t\t\ttry:\n\t\t\t\tself.exit_code = os.waitpid(self.pid, 0)[1]\n\t\t\texcept ChildProcessError:\n\t\t\t\ttry:\n\t\t\t\t\tself.exit_code = os.waitpid(self.child_fd, 0)[1]\n\t\t\t\texcept ChildProcessError:\n\t\t\t\t\tself.exit_code = 1\n\n\tdef execute(self) -> bool:\n\t\tif (old_dir := os.getcwd()) != self.working_directory:\n\t\t\tos.chdir(self.working_directory)\n\n\t\t# Note: If for any reason, we get a Python exception between here\n\t\t# and until os.close(), the traceback will get locked inside\n\t\t# stdout of the child_fd object. `os.read(self.child_fd, 8192)` is the\n\t\t# only way to get the traceback without loosing it.\n\t\tself.pid, self.child_fd = pty.fork()\n\t\tos.chdir(old_dir)\n\n\t\tif not self.pid:\n\t\t\ttry:\n\t\t\t\tos.execve(self.cmd[0], self.cmd, {**os.environ, **self.environment_vars})\n\t\t\texcept FileNotFoundError:\n\t\t\t\tlog(f\"{self.cmd[0]} does not exist.\", level=logging.ERROR, fg=\"red\")\n\t\t\t\tself.exit_code = 1\n\t\t\t\treturn False\n\n\t\tself.started = time.time()\n\t\tself.poll_object.register(self.child_fd, EPOLLIN | EPOLLHUP)\n\n\t\treturn True\n\n\tdef decode(self, encoding='UTF-8'):\n\t\treturn self._trace_log.decode(encoding)\n\n\nclass SysCommand:\n\tdef __init__(self, cmd, callback=None, start_callback=None, peak_output=False, environment_vars=None, working_directory='./'):\n\t\t_callbacks = {}\n\t\tif callback:\n\t\t\t_callbacks['on_end'] = callback\n\t\tif start_callback:\n\t\t\t_callbacks['on_start'] = start_callback\n\n\t\tself.cmd = cmd\n\t\tself._callbacks = _callbacks\n\t\tself.peak_output = peak_output\n\t\tself.environment_vars = environment_vars\n\t\tself.working_directory = working_directory\n\n\t\tself.session = None\n\t\tself.create_session()\n\n\tdef __enter__(self):\n\t\treturn self.session\n\n\tdef __exit__(self, *args, **kwargs):\n\t\t# b''.join(sys_command('sync')) # No need to, since the underlying fs() object will call sync.\n\t\t# TODO: https://stackoverflow.com/questions/28157929/how-to-safely-handle-an-exception-inside-a-context-manager\n\n\t\tif len(args) >= 2 and args[1]:\n\t\t\tlog(args[1], level=logging.ERROR, fg='red')\n\n\tdef __iter__(self, *args, **kwargs):\n\n\t\tfor line in self.session:\n\t\t\tyield line\n\n\tdef __repr__(self, *args, **kwargs):\n\t\treturn self.session._trace_log.decode('UTF-8')\n\n\tdef __json__(self):\n\t\treturn {\n\t\t\t'cmd': self.cmd,\n\t\t\t'callbacks': self._callbacks,\n\t\t\t'peak': self.peak_output,\n\t\t\t'environment_vars': self.environment_vars,\n\t\t\t'session': True if self.session else False\n\t\t}\n\n\tdef create_session(self):\n\t\tif self.session:\n\t\t\treturn True\n\n\t\ttry:\n\t\t\tself.session = SysCommandWorker(self.cmd, callbacks=self._callbacks, peak_output=self.peak_output, environment_vars=self.environment_vars)\n\n\t\t\twhile self.session.ended is None:\n\t\t\t\tself.session.poll()\n\n\t\texcept SysCallError:\n\t\t\treturn False\n\n\t\treturn True\n\n\tdef decode(self, fmt='UTF-8'):\n\t\treturn self.session._trace_log.decode(fmt)\n\n\t@property\n\tdef exit_code(self):\n\t\treturn self.session.exit_code\n\n\t@property\n\tdef trace_log(self):\n\t\treturn self.session._trace_log\n\n\ndef prerequisite_check():\n\tif not os.path.isdir(\"/sys/firmware/efi\"):\n\t\traise RequirementError(\"Archinstall only supports machines in UEFI mode.\")\n\n\treturn True\n\n\ndef reboot():\n\to = b''.join(SysCommand(\"/usr/bin/reboot\"))\n\n\ndef pid_exists(pid: int):\n\ttry:\n\t\treturn any(subprocess.check_output(['/usr/bin/ps', '--no-headers', '-o', 'pid', '-p', str(pid)]).strip())\n\texcept subprocess.CalledProcessError:\n\t\treturn False\n\n\ndef run_custom_user_commands(commands, installation):\n\tfor index, command in enumerate(commands):\n\t\tlog(f'Executing custom command \"{command}\" ...', fg='yellow')\n\t\twith open(f\"{installation.target}/var/tmp/user-command.{index}.sh\", \"w\") as temp_script:\n\t\t\ttemp_script.write(command)\n\t\texecution_output = SysCommand(f\"arch-chroot {installation.target} bash /var/tmp/user-command.{index}.sh\")\n\t\tlog(execution_output)\n\t\tos.unlink(f\"{installation.target}/var/tmp/user-command.{index}.sh\")\n", "path": "archinstall/lib/general.py" } ]
[ { "content": "import hashlib\nimport json\nimport logging\nimport os\nimport pty\nimport shlex\nimport subprocess\nimport sys\nimport time\nfrom datetime import datetime, date\nfrom select import epoll, EPOLLIN, EPOLLHUP\nfrom typing import Union\n\nfrom .exceptions import *\nfrom .output import log\n\n\ndef gen_uid(entropy_length=256):\n\treturn hashlib.sha512(os.urandom(entropy_length)).hexdigest()\n\n\ndef multisplit(s, splitters):\n\ts = [s, ]\n\tfor key in splitters:\n\t\tns = []\n\t\tfor obj in s:\n\t\t\tx = obj.split(key)\n\t\t\tfor index, part in enumerate(x):\n\t\t\t\tif len(part):\n\t\t\t\t\tns.append(part)\n\t\t\t\tif index < len(x) - 1:\n\t\t\t\t\tns.append(key)\n\t\ts = ns\n\treturn s\n\n\ndef locate_binary(name):\n\tfor PATH in os.environ['PATH'].split(':'):\n\t\tfor root, folders, files in os.walk(PATH):\n\t\t\tfor file in files:\n\t\t\t\tif file == name:\n\t\t\t\t\treturn os.path.join(root, file)\n\t\t\tbreak # Don't recurse\n\n\traise RequirementError(f\"Binary {name} does not exist.\")\n\n\nclass JsonEncoder:\n\tdef _encode(obj):\n\t\tif isinstance(obj, dict):\n\t\t\t# We'll need to iterate not just the value that default() usually gets passed\n\t\t\t# But also iterate manually over each key: value pair in order to trap the keys.\n\n\t\t\tcopy = {}\n\t\t\tfor key, val in list(obj.items()):\n\t\t\t\tif isinstance(val, dict):\n\t\t\t\t\t# This, is a EXTREMELY ugly hack.. but it's the only quick way I can think of to trigger a encoding of sub-dictionaries.\n\t\t\t\t\tval = json.loads(json.dumps(val, cls=JSON))\n\t\t\t\telse:\n\t\t\t\t\tval = JsonEncoder._encode(val)\n\n\t\t\t\tif type(key) == str and key[0] == '!':\n\t\t\t\t\tcopy[JsonEncoder._encode(key)] = '******'\n\t\t\t\telse:\n\t\t\t\t\tcopy[JsonEncoder._encode(key)] = val\n\t\t\treturn copy\n\t\telif hasattr(obj, 'json'):\n\t\t\treturn obj.json()\n\t\telif hasattr(obj, '__dump__'):\n\t\t\treturn obj.__dump__()\n\t\telif isinstance(obj, (datetime, date)):\n\t\t\treturn obj.isoformat()\n\t\telif isinstance(obj, (list, set, tuple)):\n\t\t\tr = []\n\t\t\tfor item in obj:\n\t\t\t\tr.append(json.loads(json.dumps(item, cls=JSON)))\n\t\t\treturn r\n\t\telse:\n\t\t\treturn obj\n\n\nclass JSON(json.JSONEncoder, json.JSONDecoder):\n\tdef _encode(self, obj):\n\t\treturn JsonEncoder._encode(obj)\n\n\tdef encode(self, obj):\n\t\treturn super(JSON, self).encode(self._encode(obj))\n\n\nclass SysCommandWorker:\n\tdef __init__(self, cmd, callbacks=None, peak_output=False, environment_vars=None, logfile=None, working_directory='./'):\n\t\tif not callbacks:\n\t\t\tcallbacks = {}\n\t\tif not environment_vars:\n\t\t\tenvironment_vars = {}\n\n\t\tif type(cmd) is str:\n\t\t\tcmd = shlex.split(cmd)\n\n\t\tif cmd[0][0] != '/' and cmd[0][:2] != './':\n\t\t\t# \"which\" doesn't work as it's a builtin to bash.\n\t\t\t# It used to work, but for whatever reason it doesn't anymore.\n\t\t\t# We there for fall back on manual lookup in os.PATH\n\t\t\tcmd[0] = locate_binary(cmd[0])\n\n\t\tself.cmd = cmd\n\t\tself.callbacks = callbacks\n\t\tself.peak_output = peak_output\n\t\tself.environment_vars = environment_vars\n\t\tself.logfile = logfile\n\t\tself.working_directory = working_directory\n\n\t\tself.exit_code = None\n\t\tself._trace_log = b''\n\t\tself._trace_log_pos = 0\n\t\tself.poll_object = epoll()\n\t\tself.child_fd = None\n\t\tself.started = None\n\t\tself.ended = None\n\n\tdef __contains__(self, key: bytes):\n\t\t\"\"\"\n\t\tContains will also move the current buffert position forward.\n\t\tThis is to avoid re-checking the same data when looking for output.\n\t\t\"\"\"\n\t\tassert type(key) == bytes\n\n\t\tif (contains := key in self._trace_log[self._trace_log_pos:]):\n\t\t\tself._trace_log_pos += self._trace_log[self._trace_log_pos:].find(key) + len(key)\n\n\t\treturn contains\n\n\tdef __iter__(self, *args, **kwargs):\n\t\tfor line in self._trace_log[self._trace_log_pos:self._trace_log.rfind(b'\\n')].split(b'\\n'):\n\t\t\tif line:\n\t\t\t\tyield line + b'\\n'\n\n\t\tself._trace_log_pos = self._trace_log.rfind(b'\\n')\n\n\tdef __repr__(self):\n\t\tself.make_sure_we_are_executing()\n\t\treturn str(self._trace_log)\n\n\tdef __enter__(self):\n\t\treturn self\n\n\tdef __exit__(self, *args):\n\t\t# b''.join(sys_command('sync')) # No need to, since the underlying fs() object will call sync.\n\t\t# TODO: https://stackoverflow.com/questions/28157929/how-to-safely-handle-an-exception-inside-a-context-manager\n\n\t\tif self.child_fd:\n\t\t\ttry:\n\t\t\t\tos.close(self.child_fd)\n\t\t\texcept:\n\t\t\t\tpass\n\n\t\tif self.peak_output:\n\t\t\t# To make sure any peaked output didn't leave us hanging\n\t\t\t# on the same line we were on.\n\t\t\tsys.stdout.write(\"\\n\")\n\t\t\tsys.stdout.flush()\n\n\t\tif len(args) >= 2 and args[1]:\n\t\t\tlog(args[1], level=logging.ERROR, fg='red')\n\n\t\tif self.exit_code != 0:\n\t\t\traise SysCallError(f\"{self.cmd} exited with abnormal exit code: {self.exit_code}\")\n\n\tdef is_alive(self):\n\t\tself.poll()\n\n\t\tif self.started and self.ended is None:\n\t\t\treturn True\n\n\t\treturn False\n\n\tdef write(self, data: bytes, line_ending=True):\n\t\tassert type(data) == bytes # TODO: Maybe we can support str as well and encode it\n\n\t\tself.make_sure_we_are_executing()\n\n\t\tos.write(self.child_fd, data + (b'\\n' if line_ending else b''))\n\n\tdef make_sure_we_are_executing(self):\n\t\tif not self.started:\n\t\t\treturn self.execute()\n\n\tdef tell(self) -> int:\n\t\tself.make_sure_we_are_executing()\n\t\treturn self._trace_log_pos\n\n\tdef seek(self, pos):\n\t\tself.make_sure_we_are_executing()\n\t\t# Safety check to ensure 0 < pos < len(tracelog)\n\t\tself._trace_log_pos = min(max(0, pos), len(self._trace_log))\n\n\tdef peak(self, output: Union[str, bytes]) -> bool:\n\t\tif self.peak_output:\n\t\t\tif type(output) == bytes:\n\t\t\t\ttry:\n\t\t\t\t\toutput = output.decode('UTF-8')\n\t\t\t\texcept UnicodeDecodeError:\n\t\t\t\t\treturn False\n\n\t\t\toutput = output.strip('\\r\\n ')\n\t\t\tif len(output) <= 0:\n\t\t\t\treturn False\n\n\t\t\tfrom .user_interaction import get_terminal_width\n\n\t\t\t# Move back to the beginning of the terminal\n\t\t\tsys.stdout.flush()\n\t\t\tsys.stdout.write(\"\\033[%dG\" % 0)\n\t\t\tsys.stdout.flush()\n\n\t\t\t# Clear the line\n\t\t\tsys.stdout.write(\" \" * get_terminal_width())\n\t\t\tsys.stdout.flush()\n\n\t\t\t# Move back to the beginning again\n\t\t\tsys.stdout.flush()\n\t\t\tsys.stdout.write(\"\\033[%dG\" % 0)\n\t\t\tsys.stdout.flush()\n\n\t\t\t# And print the new output we're peaking on:\n\t\t\tsys.stdout.write(output)\n\t\t\tsys.stdout.flush()\n\t\treturn True\n\n\tdef poll(self):\n\t\tself.make_sure_we_are_executing()\n\n\t\tgot_output = False\n\t\tfor fileno, event in self.poll_object.poll(0.1):\n\t\t\ttry:\n\t\t\t\toutput = os.read(self.child_fd, 8192)\n\t\t\t\tgot_output = True\n\t\t\t\tself.peak(output)\n\t\t\t\tself._trace_log += output\n\t\t\texcept OSError as err:\n\t\t\t\tself.ended = time.time()\n\t\t\t\tbreak\n\n\t\tif self.ended or (got_output is False and pid_exists(self.pid) is False):\n\t\t\tself.ended = time.time()\n\t\t\ttry:\n\t\t\t\tself.exit_code = os.waitpid(self.pid, 0)[1]\n\t\t\texcept ChildProcessError:\n\t\t\t\ttry:\n\t\t\t\t\tself.exit_code = os.waitpid(self.child_fd, 0)[1]\n\t\t\t\texcept ChildProcessError:\n\t\t\t\t\tself.exit_code = 1\n\n\tdef execute(self) -> bool:\n\t\tif (old_dir := os.getcwd()) != self.working_directory:\n\t\t\tos.chdir(self.working_directory)\n\n\t\t# Note: If for any reason, we get a Python exception between here\n\t\t# and until os.close(), the traceback will get locked inside\n\t\t# stdout of the child_fd object. `os.read(self.child_fd, 8192)` is the\n\t\t# only way to get the traceback without loosing it.\n\t\tself.pid, self.child_fd = pty.fork()\n\t\tos.chdir(old_dir)\n\n\t\tif not self.pid:\n\t\t\ttry:\n\t\t\t\tos.execve(self.cmd[0], self.cmd, {**os.environ, **self.environment_vars})\n\t\t\texcept FileNotFoundError:\n\t\t\t\tlog(f\"{self.cmd[0]} does not exist.\", level=logging.ERROR, fg=\"red\")\n\t\t\t\tself.exit_code = 1\n\t\t\t\treturn False\n\n\t\tself.started = time.time()\n\t\tself.poll_object.register(self.child_fd, EPOLLIN | EPOLLHUP)\n\n\t\treturn True\n\n\tdef decode(self, encoding='UTF-8'):\n\t\treturn self._trace_log.decode(encoding)\n\n\nclass SysCommand:\n\tdef __init__(self, cmd, callback=None, start_callback=None, peak_output=False, environment_vars=None, working_directory='./'):\n\t\t_callbacks = {}\n\t\tif callback:\n\t\t\t_callbacks['on_end'] = callback\n\t\tif start_callback:\n\t\t\t_callbacks['on_start'] = start_callback\n\n\t\tself.cmd = cmd\n\t\tself._callbacks = _callbacks\n\t\tself.peak_output = peak_output\n\t\tself.environment_vars = environment_vars\n\t\tself.working_directory = working_directory\n\n\t\tself.session = None\n\t\tself.create_session()\n\n\tdef __enter__(self):\n\t\treturn self.session\n\n\tdef __exit__(self, *args, **kwargs):\n\t\t# b''.join(sys_command('sync')) # No need to, since the underlying fs() object will call sync.\n\t\t# TODO: https://stackoverflow.com/questions/28157929/how-to-safely-handle-an-exception-inside-a-context-manager\n\n\t\tif len(args) >= 2 and args[1]:\n\t\t\tlog(args[1], level=logging.ERROR, fg='red')\n\n\tdef __iter__(self, *args, **kwargs):\n\n\t\tfor line in self.session:\n\t\t\tyield line\n\n\tdef __repr__(self, *args, **kwargs):\n\t\treturn self.session._trace_log.decode('UTF-8')\n\n\tdef __json__(self):\n\t\treturn {\n\t\t\t'cmd': self.cmd,\n\t\t\t'callbacks': self._callbacks,\n\t\t\t'peak': self.peak_output,\n\t\t\t'environment_vars': self.environment_vars,\n\t\t\t'session': True if self.session else False\n\t\t}\n\n\tdef create_session(self):\n\t\tif self.session:\n\t\t\treturn True\n\n\t\ttry:\n\t\t\tself.session = SysCommandWorker(self.cmd, callbacks=self._callbacks, peak_output=self.peak_output, environment_vars=self.environment_vars)\n\n\t\t\twhile self.session.ended is None:\n\t\t\t\tself.session.poll()\n\n\t\t\tif self.peak_output:\n\t\t\t\tsys.stdout.write('\\n')\n\t\t\t\tsys.stdout.flush()\n\n\t\texcept SysCallError:\n\t\t\treturn False\n\n\t\treturn True\n\n\tdef decode(self, fmt='UTF-8'):\n\t\treturn self.session._trace_log.decode(fmt)\n\n\t@property\n\tdef exit_code(self):\n\t\treturn self.session.exit_code\n\n\t@property\n\tdef trace_log(self):\n\t\treturn self.session._trace_log\n\n\ndef prerequisite_check():\n\tif not os.path.isdir(\"/sys/firmware/efi\"):\n\t\traise RequirementError(\"Archinstall only supports machines in UEFI mode.\")\n\n\treturn True\n\n\ndef reboot():\n\to = b''.join(SysCommand(\"/usr/bin/reboot\"))\n\n\ndef pid_exists(pid: int):\n\ttry:\n\t\treturn any(subprocess.check_output(['/usr/bin/ps', '--no-headers', '-o', 'pid', '-p', str(pid)]).strip())\n\texcept subprocess.CalledProcessError:\n\t\treturn False\n\n\ndef run_custom_user_commands(commands, installation):\n\tfor index, command in enumerate(commands):\n\t\tlog(f'Executing custom command \"{command}\" ...', fg='yellow')\n\t\twith open(f\"{installation.target}/var/tmp/user-command.{index}.sh\", \"w\") as temp_script:\n\t\t\ttemp_script.write(command)\n\t\texecution_output = SysCommand(f\"arch-chroot {installation.target} bash /var/tmp/user-command.{index}.sh\")\n\t\tlog(execution_output)\n\t\tos.unlink(f\"{installation.target}/var/tmp/user-command.{index}.sh\")\n", "path": "archinstall/lib/general.py" } ]
diff --git a/archinstall/lib/general.py b/archinstall/lib/general.py index 249c789071..3b62c891a8 100644 --- a/archinstall/lib/general.py +++ b/archinstall/lib/general.py @@ -333,6 +333,10 @@ def create_session(self): while self.session.ended is None: self.session.poll() + if self.peak_output: + sys.stdout.write('\n') + sys.stdout.flush() + except SysCallError: return False
plone__Products.CMFPlone-3404
Expose the human_readable_size helper in the @@plone view The [@@plone view](https://github.com/plone/Products.CMFPlone/blob/009f785e450430ee7b143624480aef9268491c0b/Products/CMFPlone/browser/ploneview.py#L19) has helper methods that can be used in templates. It would be handy to add the [Products.CMFPlone.utils.human_readable_size](https://github.com/plone/Products.CMFPlone/blob/009f785e450430ee7b143624480aef9268491c0b/Products/CMFPlone/utils.py#L855-L876) function as a method of that view.
[ { "content": "from Acquisition import aq_inner\nfrom plone.memoize.view import memoize\nfrom Products.CMFCore.utils import getToolByName\nfrom Products.CMFPlone import utils\nfrom Products.CMFPlone.browser.interfaces import IPlone\nfrom Products.Five import BrowserView\nfrom zope.component import getMultiAdapter\nfrom zope.i18n import translate\nfrom zope.interface import implementer\nfrom zope.size import byteDisplay\n\n_marker = []\n\n\n@implementer(IPlone)\nclass Plone(BrowserView):\n\n # Utility methods\n\n @memoize\n def uniqueItemIndex(self, pos=0):\n \"\"\"Return an index iterator.\"\"\"\n return utils.RealIndexIterator(pos=pos)\n\n def toLocalizedTime(self, time, long_format=None, time_only=None):\n \"\"\"Convert time to localized time\n \"\"\"\n context = aq_inner(self.context)\n util = getToolByName(context, 'translation_service')\n return util.ulocalized_time(time, long_format, time_only,\n context=context, domain='plonelocales',\n request=self.request)\n\n def toLocalizedSize(self, size):\n \"\"\"Convert an integer to a localized size string\n \"\"\"\n return translate(byteDisplay(size), context=self.request)\n\n # This can't be request-memoized, because it won't necessarily remain\n # valid across traversals. For example, you may get tabs on an error\n # message.\n def showToolbar(self):\n \"\"\"Determine if the editable border should be shown\n \"\"\"\n request = self.request\n if 'disable_border' in request or 'disable_toolbar' in request:\n return False\n if 'enable_border' in request or 'enable_toolbar' in request:\n return True\n\n context = aq_inner(self.context)\n\n portal_membership = getToolByName(context, 'portal_membership')\n checkPerm = portal_membership.checkPermission\n\n if (checkPerm('Modify portal content', context) or\n checkPerm('Add portal content', context) or\n checkPerm('Review portal content', context)):\n return True\n\n if portal_membership.isAnonymousUser():\n return False\n\n context_state = getMultiAdapter(\n (context, request),\n name=\"plone_context_state\"\n )\n actions = context_state.actions\n\n if actions('workflow', max=1):\n return True\n\n if actions('batch', max=1):\n return True\n\n for action in actions('object'):\n if action.get('id', '') != 'view':\n return True\n\n template_id = None\n if 'PUBLISHED' in request:\n if getattr(request['PUBLISHED'], 'getId', None):\n template_id = request['PUBLISHED'].getId()\n\n idActions = {}\n for obj in actions('object') + actions('folder'):\n idActions[obj.get('id', '')] = 1\n\n if 'edit' in idActions:\n if (template_id in idActions or\n template_id in ['synPropertiesForm', 'folder_contents',\n 'folder_listing', 'listing_view']):\n return True\n\n # Check to see if the user is able to add content\n allowedTypes = context.allowedContentTypes()\n if allowedTypes:\n return True\n\n return False\n\n def normalizeString(self, text):\n \"\"\"Normalizes a title to an id.\n \"\"\"\n return utils.normalizeString(text, context=self)\n\n def cropText(self, text, length, ellipsis='...'):\n \"\"\"Crop text on a word boundary\n \"\"\"\n if not length:\n return text\n converted = False\n if not isinstance(text, str):\n text = utils.safe_unicode(text)\n converted = True\n if len(text) > length:\n text = text[:length]\n l = text.rfind(' ')\n if l > length / 2:\n text = text[:l + 1]\n text += ellipsis\n if converted:\n # encode back from unicode\n text = text.encode('utf-8')\n return text\n\n def site_encoding(self):\n return 'utf-8'\n\n # Deprecated in favour of @@plone_context_state and @@plone_portal_state\n\n def getCurrentUrl(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.current_page_url()\n\n def isDefaultPageInFolder(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.is_default_page()\n\n def isStructuralFolder(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.is_structural_folder()\n\n def navigationRootPath(self):\n portal_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_portal_state')\n return portal_state.navigation_root_path()\n\n def navigationRootUrl(self):\n portal_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_portal_state')\n return portal_state.navigation_root_url()\n\n def getParentObject(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.parent()\n\n def getCurrentFolder(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.folder()\n\n def getCurrentFolderUrl(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.folder().absolute_url()\n\n @memoize\n def getCurrentObjectUrl(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.canonical_object_url()\n\n @memoize\n def isFolderOrFolderDefaultPage(self):\n state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return state.is_structural_folder() or state.is_default_page()\n\n @memoize\n def isPortalOrPortalDefaultPage(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.is_portal_root()\n\n @memoize\n def getViewTemplateId(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.view_template_id()\n\n @memoize\n def patterns_settings(self):\n context = aq_inner(self.context)\n return getMultiAdapter(\n (context, self.request),\n name='plone_patterns_settings')()\n", "path": "Products/CMFPlone/browser/ploneview.py" } ]
[ { "content": "from Acquisition import aq_inner\nfrom plone.memoize.view import memoize\nfrom Products.CMFCore.utils import getToolByName\nfrom Products.CMFPlone import utils\nfrom Products.CMFPlone.browser.interfaces import IPlone\nfrom Products.Five import BrowserView\nfrom zope.component import getMultiAdapter\nfrom zope.i18n import translate\nfrom zope.interface import implementer\nfrom zope.size import byteDisplay\n\n_marker = []\n\n\n@implementer(IPlone)\nclass Plone(BrowserView):\n\n # Utility methods\n\n @memoize\n def uniqueItemIndex(self, pos=0):\n \"\"\"Return an index iterator.\"\"\"\n return utils.RealIndexIterator(pos=pos)\n\n def toLocalizedTime(self, time, long_format=None, time_only=None):\n \"\"\"Convert time to localized time\n \"\"\"\n context = aq_inner(self.context)\n util = getToolByName(context, 'translation_service')\n return util.ulocalized_time(time, long_format, time_only,\n context=context, domain='plonelocales',\n request=self.request)\n\n def toLocalizedSize(self, size):\n \"\"\"Convert an integer to a localized size string\n \"\"\"\n return translate(byteDisplay(size), context=self.request)\n\n # This can't be request-memoized, because it won't necessarily remain\n # valid across traversals. For example, you may get tabs on an error\n # message.\n def showToolbar(self):\n \"\"\"Determine if the editable border should be shown\n \"\"\"\n request = self.request\n if 'disable_border' in request or 'disable_toolbar' in request:\n return False\n if 'enable_border' in request or 'enable_toolbar' in request:\n return True\n\n context = aq_inner(self.context)\n\n portal_membership = getToolByName(context, 'portal_membership')\n checkPerm = portal_membership.checkPermission\n\n if (checkPerm('Modify portal content', context) or\n checkPerm('Add portal content', context) or\n checkPerm('Review portal content', context)):\n return True\n\n if portal_membership.isAnonymousUser():\n return False\n\n context_state = getMultiAdapter(\n (context, request),\n name=\"plone_context_state\"\n )\n actions = context_state.actions\n\n if actions('workflow', max=1):\n return True\n\n if actions('batch', max=1):\n return True\n\n for action in actions('object'):\n if action.get('id', '') != 'view':\n return True\n\n template_id = None\n if 'PUBLISHED' in request:\n if getattr(request['PUBLISHED'], 'getId', None):\n template_id = request['PUBLISHED'].getId()\n\n idActions = {}\n for obj in actions('object') + actions('folder'):\n idActions[obj.get('id', '')] = 1\n\n if 'edit' in idActions:\n if (template_id in idActions or\n template_id in ['synPropertiesForm', 'folder_contents',\n 'folder_listing', 'listing_view']):\n return True\n\n # Check to see if the user is able to add content\n allowedTypes = context.allowedContentTypes()\n if allowedTypes:\n return True\n\n return False\n\n def normalizeString(self, text):\n \"\"\"Normalizes a title to an id.\n \"\"\"\n return utils.normalizeString(text, context=self)\n\n def cropText(self, text, length, ellipsis='...'):\n \"\"\"Crop text on a word boundary\n \"\"\"\n if not length:\n return text\n converted = False\n if not isinstance(text, str):\n text = utils.safe_unicode(text)\n converted = True\n if len(text) > length:\n text = text[:length]\n l = text.rfind(' ')\n if l > length / 2:\n text = text[:l + 1]\n text += ellipsis\n if converted:\n # encode back from unicode\n text = text.encode('utf-8')\n return text\n\n def site_encoding(self):\n return 'utf-8'\n\n # Deprecated in favour of @@plone_context_state and @@plone_portal_state\n\n def getCurrentUrl(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.current_page_url()\n\n def isDefaultPageInFolder(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.is_default_page()\n\n def isStructuralFolder(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.is_structural_folder()\n\n def navigationRootPath(self):\n portal_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_portal_state')\n return portal_state.navigation_root_path()\n\n def navigationRootUrl(self):\n portal_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_portal_state')\n return portal_state.navigation_root_url()\n\n def getParentObject(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.parent()\n\n def getCurrentFolder(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.folder()\n\n def getCurrentFolderUrl(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.folder().absolute_url()\n\n @memoize\n def getCurrentObjectUrl(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.canonical_object_url()\n\n @memoize\n def isFolderOrFolderDefaultPage(self):\n state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return state.is_structural_folder() or state.is_default_page()\n\n @memoize\n def isPortalOrPortalDefaultPage(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.is_portal_root()\n\n @memoize\n def getViewTemplateId(self):\n context_state = getMultiAdapter(\n (aq_inner(self.context), self.request),\n name='plone_context_state')\n return context_state.view_template_id()\n\n @memoize\n def patterns_settings(self):\n context = aq_inner(self.context)\n return getMultiAdapter(\n (context, self.request),\n name='plone_patterns_settings')()\n\n @property\n def human_readable_size(self):\n return utils.human_readable_size\n", "path": "Products/CMFPlone/browser/ploneview.py" } ]
diff --git a/Products/CMFPlone/browser/ploneview.py b/Products/CMFPlone/browser/ploneview.py index 7bae74eb74..c9735e95c8 100644 --- a/Products/CMFPlone/browser/ploneview.py +++ b/Products/CMFPlone/browser/ploneview.py @@ -211,3 +211,7 @@ def patterns_settings(self): return getMultiAdapter( (context, self.request), name='plone_patterns_settings')() + + @property + def human_readable_size(self): + return utils.human_readable_size diff --git a/Products/CMFPlone/tests/testPloneView.py b/Products/CMFPlone/tests/testPloneView.py index 4f68681a98..5efdcc84bd 100644 --- a/Products/CMFPlone/tests/testPloneView.py +++ b/Products/CMFPlone/tests/testPloneView.py @@ -151,3 +151,8 @@ def testCropText(self): def testSiteEncoding(self): view = Plone(self.portal, self.app.REQUEST) self.assertEqual('utf-8', view.site_encoding()) + + def test_human_readable_size(self): + view = Plone(self.portal, self.app.REQUEST) + from Products.CMFPlone.utils import human_readable_size + self.assertIs(view.human_readable_size, human_readable_size) diff --git a/news/3146.feature b/news/3146.feature new file mode 100644 index 0000000000..49a34411e5 --- /dev/null +++ b/news/3146.feature @@ -0,0 +1,2 @@ +The @@plone view exposes the human_readable_size helper +[ale-rt]
gammapy__gammapy-3719
FitResult print output is confusing A `print(fit_result)` displays both the `covariance_result` and the `optimize_result` as `OptimizeResult`, eg: see cell 19 https://docs.gammapy.org/dev/tutorials/starting/analysis_2.html#Fit-the-model Reminder issue to fix it during the sprint week
[ { "content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\nimport itertools\nimport logging\nimport numpy as np\nfrom gammapy.utils.pbar import progress_bar\nfrom gammapy.utils.table import table_from_row_data\nfrom .covariance import Covariance\nfrom .iminuit import (\n confidence_iminuit,\n contour_iminuit,\n covariance_iminuit,\n optimize_iminuit,\n)\nfrom .scipy import confidence_scipy, optimize_scipy\nfrom .sherpa import optimize_sherpa\n\n__all__ = [\"Fit\"]\n\nlog = logging.getLogger(__name__)\n\n\nclass Registry:\n \"\"\"Registry of available backends for given tasks.\n\n Gives users the power to extend from their scripts.\n Used by `Fit` below.\n\n Not sure if we should call it \"backend\" or \"method\" or something else.\n Probably we will code up some methods, e.g. for profile analysis ourselves,\n using scipy or even just Python / Numpy?\n \"\"\"\n\n register = {\n \"optimize\": {\n \"minuit\": optimize_iminuit,\n \"sherpa\": optimize_sherpa,\n \"scipy\": optimize_scipy,\n },\n \"covariance\": {\n \"minuit\": covariance_iminuit,\n # \"sherpa\": covariance_sherpa,\n # \"scipy\": covariance_scipy,\n },\n \"confidence\": {\n \"minuit\": confidence_iminuit,\n # \"sherpa\": confidence_sherpa,\n \"scipy\": confidence_scipy,\n },\n }\n\n @classmethod\n def get(cls, task, backend):\n if task not in cls.register:\n raise ValueError(f\"Unknown task {task!r}\")\n\n backend_options = cls.register[task]\n\n if backend not in backend_options:\n raise ValueError(f\"Unknown backend {backend!r} for task {task!r}\")\n\n return backend_options[backend]\n\n\nregistry = Registry()\n\n\nclass Fit:\n \"\"\"Fit class.\n\n The fit class provides a uniform interface to multiple fitting backends.\n Currently available: \"minuit\", \"sherpa\" and \"scipy\"\n\n Parameters\n ----------\n backend : {\"minuit\", \"scipy\" \"sherpa\"}\n Global backend used for fitting, default : minuit\n optimize_opts : dict\n Keyword arguments passed to the optimizer. For the `\"minuit\"` backend\n see https://iminuit.readthedocs.io/en/latest/api.html#iminuit.Minuit\n for a detailed description of the available options. If there is an entry\n 'migrad_opts', those options will be passed to `iminuit.Minuit.migrad()`.\n\n For the `\"sherpa\"` backend you can from the options `method = {\"simplex\", \"levmar\", \"moncar\", \"gridsearch\"}`\n Those methods are described and compared in detail on\n http://cxc.cfa.harvard.edu/sherpa/methods/index.html. The available\n options of the optimization methods are described on the following\n pages in detail:\n\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/neldermead.html\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/montecarlo.html\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/gridsearch.html\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/levmar.html\n\n For the `\"scipy\"` backend the available options are described in detail here:\n https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html\n\n covariance_opts : dict\n Covariance options passed to the given backend.\n confidence_opts : dict\n Extra arguments passed to the backend. E.g. `iminuit.Minuit.minos` supports\n a ``maxcall`` option. For the scipy backend ``confidence_opts`` are forwarded\n to `~scipy.optimize.brentq`. If the confidence estimation fails, the bracketing\n interval can be adapted by modifying the the upper bound of the interval (``b``) value.\n store_trace : bool\n Whether to store the trace of the fit\n \"\"\"\n\n def __init__(\n self,\n backend=\"minuit\",\n optimize_opts=None,\n covariance_opts=None,\n confidence_opts=None,\n store_trace=False,\n ):\n self.store_trace = store_trace\n self.backend = backend\n\n if optimize_opts is None:\n optimize_opts = {\"backend\": backend}\n\n if covariance_opts is None:\n covariance_opts = {\"backend\": backend}\n\n if confidence_opts is None:\n confidence_opts = {\"backend\": backend}\n\n self.optimize_opts = optimize_opts\n self.covariance_opts = covariance_opts\n self.confidence_opts = confidence_opts\n self._minuit = None\n\n @property\n def minuit(self):\n \"\"\"Iminuit object\"\"\"\n return self._minuit\n\n @staticmethod\n def _parse_datasets(datasets):\n from gammapy.datasets import Datasets\n\n datasets = Datasets(datasets)\n return datasets, datasets.parameters\n\n def run(self, datasets):\n \"\"\"Run all fitting steps.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n\n Returns\n -------\n fit_result : `FitResult`\n Fit result\n \"\"\"\n optimize_result = self.optimize(datasets=datasets)\n\n if self.backend not in registry.register[\"covariance\"]:\n log.warning(\"No covariance estimate - not supported by this backend.\")\n return optimize_result\n\n covariance_result = self.covariance(datasets=datasets)\n\n return FitResult(\n optimize_result=optimize_result,\n covariance_result=covariance_result,\n )\n\n def optimize(self, datasets):\n \"\"\"Run the optimization.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n\n Returns\n -------\n optimize_result : `OptimizeResult`\n Optimization result\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n datasets.parameters.check_limits()\n\n parameters.autoscale()\n\n kwargs = self.optimize_opts.copy()\n backend = kwargs.pop(\"backend\", self.backend)\n\n compute = registry.get(\"optimize\", backend)\n # TODO: change this calling interface!\n # probably should pass a fit statistic, which has a model, which has parameters\n # and return something simpler, not a tuple of three things\n factors, info, optimizer = compute(\n parameters=parameters,\n function=datasets.stat_sum,\n store_trace=self.store_trace,\n **kwargs,\n )\n\n if backend == \"minuit\":\n self._minuit = optimizer\n kwargs[\"method\"] = \"migrad\"\n\n trace = table_from_row_data(info.pop(\"trace\"))\n\n if self.store_trace:\n idx = [\n parameters.index(par)\n for par in parameters.unique_parameters.free_parameters\n ]\n unique_names = np.array(datasets.models.parameters_unique_names)[idx]\n trace.rename_columns(trace.colnames[1:], list(unique_names))\n\n # Copy final results into the parameters object\n parameters.set_parameter_factors(factors)\n parameters.check_limits()\n return OptimizeResult(\n parameters=parameters,\n total_stat=datasets.stat_sum(),\n backend=backend,\n method=kwargs.get(\"method\", backend),\n trace=trace,\n **info,\n )\n\n def covariance(self, datasets):\n \"\"\"Estimate the covariance matrix.\n\n Assumes that the model parameters are already optimised.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n\n Returns\n -------\n result : `CovarianceResult`\n Results\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n kwargs = self.covariance_opts.copy()\n kwargs[\"minuit\"] = self.minuit\n backend = kwargs.pop(\"backend\", self.backend)\n compute = registry.get(\"covariance\", backend)\n\n with parameters.restore_status():\n if self.backend == \"minuit\":\n method = \"hesse\"\n else:\n method = \"\"\n\n factor_matrix, info = compute(\n parameters=parameters, function=datasets.stat_sum, **kwargs\n )\n\n datasets.models.covariance = Covariance.from_factor_matrix(\n parameters=parameters, matrix=factor_matrix\n )\n\n # TODO: decide what to return, and fill the info correctly!\n return CovarianceResult(\n backend=backend,\n method=method,\n success=info[\"success\"],\n message=info[\"message\"],\n )\n\n def confidence(self, datasets, parameter, sigma=1, reoptimize=True):\n \"\"\"Estimate confidence interval.\n\n Extra ``kwargs`` are passed to the backend.\n E.g. `iminuit.Minuit.minos` supports a ``maxcall`` option.\n\n For the scipy backend ``kwargs`` are forwarded to `~scipy.optimize.brentq`. If the\n confidence estimation fails, the bracketing interval can be adapted by modifying the\n the upper bound of the interval (``b``) value.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n parameter : `~gammapy.modeling.Parameter`\n Parameter of interest\n sigma : float\n Number of standard deviations for the confidence level\n reoptimize : bool\n Re-optimize other parameters, when computing the confidence region.\n\n Returns\n -------\n result : dict\n Dictionary with keys \"errp\", 'errn\", \"success\" and \"nfev\".\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n kwargs = self.confidence_opts.copy()\n backend = kwargs.pop(\"backend\", self.backend)\n\n compute = registry.get(\"confidence\", backend)\n parameter = parameters[parameter]\n\n with parameters.restore_status():\n result = compute(\n parameters=parameters,\n parameter=parameter,\n function=datasets.stat_sum,\n sigma=sigma,\n reoptimize=reoptimize,\n **kwargs,\n )\n\n result[\"errp\"] *= parameter.scale\n result[\"errn\"] *= parameter.scale\n return result\n\n def stat_profile(self, datasets, parameter, reoptimize=False):\n \"\"\"Compute fit statistic profile.\n\n The method used is to vary one parameter, keeping all others fixed.\n So this is taking a \"slice\" or \"scan\" of the fit statistic.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n parameter : `~gammapy.modeling.Parameter`\n Parameter of interest. The specification for the scan, such as bounds\n and number of values is taken from the parameter object.\n reoptimize : bool\n Re-optimize other parameters, when computing the confidence region.\n\n Returns\n -------\n results : dict\n Dictionary with keys \"values\", \"stat\" and \"fit_results\". The latter contains an\n empty list, if `reoptimize` is set to False\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n parameter = parameters[parameter]\n values = parameter.scan_values\n\n stats = []\n fit_results = []\n with parameters.restore_status():\n for value in progress_bar(values, desc=\"Scan values\"):\n parameter.value = value\n if reoptimize:\n parameter.frozen = True\n result = self.optimize(datasets=datasets)\n stat = result.total_stat\n fit_results.append(result)\n else:\n stat = datasets.stat_sum()\n stats.append(stat)\n\n return {\n f\"{parameter.name}_scan\": values,\n \"stat_scan\": np.array(stats),\n \"fit_results\": fit_results,\n }\n\n def stat_surface(self, datasets, x, y, reoptimize=False):\n \"\"\"Compute fit statistic surface.\n\n The method used is to vary two parameters, keeping all others fixed.\n So this is taking a \"slice\" or \"scan\" of the fit statistic.\n\n Caveat: This method can be very computationally intensive and slow\n\n See also: `Fit.stat_contour`\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n x, y : `~gammapy.modeling.Parameter`\n Parameters of interest\n reoptimize : bool\n Re-optimize other parameters, when computing the confidence region.\n\n Returns\n -------\n results : dict\n Dictionary with keys \"x_values\", \"y_values\", \"stat\" and \"fit_results\". The latter contains an\n empty list, if `reoptimize` is set to False\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n x, y = parameters[x], parameters[y]\n\n stats = []\n fit_results = []\n\n with parameters.restore_status():\n for x_value, y_value in progress_bar(\n itertools.product(x.scan_values, y.scan_values), desc=\"Trial values\"\n ):\n x.value, y.value = x_value, y_value\n\n if reoptimize:\n x.frozen, y.frozen = True, True\n result = self.optimize(datasets=datasets)\n stat = result.total_stat\n fit_results.append(result)\n else:\n stat = datasets.stat_sum()\n\n stats.append(stat)\n\n shape = (len(x.scan_values), len(y.scan_values))\n stats = np.array(stats).reshape(shape)\n\n if reoptimize:\n fit_results = np.array(fit_results).reshape(shape)\n\n return {\n f\"{x.name}_scan\": x.scan_values,\n f\"{y.name}_scan\": y.scan_values,\n \"stat_scan\": stats,\n \"fit_results\": fit_results,\n }\n\n def stat_contour(self, datasets, x, y, numpoints=10, sigma=1):\n \"\"\"Compute stat contour.\n\n Calls ``iminuit.Minuit.mncontour``.\n\n This is a contouring algorithm for a 2D function\n which is not simply the fit statistic function.\n That 2D function is given at each point ``(par_1, par_2)``\n by re-optimising all other free parameters,\n and taking the fit statistic at that point.\n\n Very compute-intensive and slow.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n x, y : `~gammapy.modeling.Parameter`\n Parameters of interest\n numpoints : int\n Number of contour points\n sigma : float\n Number of standard deviations for the confidence level\n\n Returns\n -------\n result : dict\n Dictionary containing the parameter values defining the contour, with the\n boolean flag \"success\" and the info objects from ``mncontour``.\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n x = parameters[x]\n y = parameters[y]\n\n with parameters.restore_status():\n result = contour_iminuit(\n parameters=parameters,\n function=datasets.stat_sum,\n x=x,\n y=y,\n numpoints=numpoints,\n sigma=sigma,\n )\n\n x_name = x.name\n y_name = y.name\n x = result[\"x\"] * x.scale\n y = result[\"y\"] * y.scale\n\n return {\n x_name: x,\n y_name: y,\n \"success\": result[\"success\"],\n }\n\n\nclass FitStepResult:\n \"\"\"Fit result base class\"\"\"\n\n def __init__(self, backend, method, success, message):\n self._success = success\n self._message = message\n self._backend = backend\n self._method = method\n\n @property\n def backend(self):\n \"\"\"Optimizer backend used for the fit.\"\"\"\n return self._backend\n\n @property\n def method(self):\n \"\"\"Optimizer method used for the fit.\"\"\"\n return self._method\n\n @property\n def success(self):\n \"\"\"Fit success status flag.\"\"\"\n return self._success\n\n @property\n def message(self):\n \"\"\"Optimizer status message.\"\"\"\n return self._message\n\n def __repr__(self):\n return (\n f\"{self.__class__.__name__}\\n\\n\"\n f\"\\tbackend : {self.backend}\\n\"\n f\"\\tmethod : {self.method}\\n\"\n f\"\\tsuccess : {self.success}\\n\"\n f\"\\tmessage : {self.message}\\n\"\n )\n\n\nclass CovarianceResult(FitStepResult):\n \"\"\"Covariance result object.\"\"\"\n\n pass\n\n\nclass OptimizeResult(FitStepResult):\n \"\"\"Optimize result object.\"\"\"\n\n def __init__(self, parameters, nfev, total_stat, trace, **kwargs):\n self._parameters = parameters\n self._nfev = nfev\n self._total_stat = total_stat\n self._trace = trace\n super().__init__(**kwargs)\n\n @property\n def parameters(self):\n \"\"\"Best fit parameters\"\"\"\n return self._parameters\n\n @property\n def trace(self):\n \"\"\"Parameter trace from the optimisation\"\"\"\n return self._trace\n\n @property\n def nfev(self):\n \"\"\"Number of function evaluations.\"\"\"\n return self._nfev\n\n @property\n def total_stat(self):\n \"\"\"Value of the fit statistic at minimum.\"\"\"\n return self._total_stat\n\n def __repr__(self):\n str_ = super().__repr__()\n str_ += f\"\\tnfev : {self.nfev}\\n\"\n str_ += f\"\\ttotal stat : {self.total_stat:.2f}\\n\\n\"\n return str_\n\n\nclass FitResult:\n \"\"\"Fit result class\n\n Parameters\n ----------\n optimize_result : `OptimizeResult`\n Result of the optimization step.\n covariance_result : `CovarianceResult`\n Result of the covariance step.\n \"\"\"\n\n def __init__(self, optimize_result=None, covariance_result=None):\n self._optimize_result = optimize_result\n self._covariance_result = covariance_result\n\n # TODO: is the convenience access needed?\n @property\n def parameters(self):\n \"\"\"Best fit parameters of the optimization step\"\"\"\n return self.optimize_result.parameters\n\n # TODO: is the convenience access needed?\n @property\n def total_stat(self):\n \"\"\"Total stat of the optimization step\"\"\"\n return self.optimize_result.total_stat\n\n # TODO: is the convenience access needed?\n @property\n def trace(self):\n \"\"\"Parameter trace of the optimisation step\"\"\"\n return self.optimize_result.trace\n\n # TODO: is the convenience access needed?\n @property\n def nfev(self):\n \"\"\"Number of function evaluations of the optimisation step\"\"\"\n return self.optimize_result.nfev\n\n # TODO: is the convenience access needed?\n @property\n def backend(self):\n \"\"\"Optimizer backend used for the fit.\"\"\"\n return self.optimize_result.backend\n\n # TODO: is the convenience access needed?\n @property\n def method(self):\n \"\"\"Optimizer method used for the fit.\"\"\"\n return self.optimize_result.method\n\n # TODO: is the convenience access needed?\n @property\n def message(self):\n \"\"\"Optimizer status message.\"\"\"\n return self.optimize_result.message\n\n @property\n def success(self):\n \"\"\"Total success flag\"\"\"\n success = self.optimize_result.success and self.covariance_result.success\n return success\n\n @property\n def optimize_result(self):\n \"\"\"Optimize result\"\"\"\n return self._optimize_result\n\n @property\n def covariance_result(self):\n \"\"\"Optimize result\"\"\"\n return self._optimize_result\n\n def __repr__(self):\n str_ = \"\"\n if self.optimize_result:\n str_ += str(self.optimize_result)\n\n if self.covariance_result:\n str_ += str(self.covariance_result)\n\n return str_\n", "path": "gammapy/modeling/fit.py" } ]
[ { "content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\nimport itertools\nimport logging\nimport numpy as np\nfrom gammapy.utils.pbar import progress_bar\nfrom gammapy.utils.table import table_from_row_data\nfrom .covariance import Covariance\nfrom .iminuit import (\n confidence_iminuit,\n contour_iminuit,\n covariance_iminuit,\n optimize_iminuit,\n)\nfrom .scipy import confidence_scipy, optimize_scipy\nfrom .sherpa import optimize_sherpa\n\n__all__ = [\"Fit\"]\n\nlog = logging.getLogger(__name__)\n\n\nclass Registry:\n \"\"\"Registry of available backends for given tasks.\n\n Gives users the power to extend from their scripts.\n Used by `Fit` below.\n\n Not sure if we should call it \"backend\" or \"method\" or something else.\n Probably we will code up some methods, e.g. for profile analysis ourselves,\n using scipy or even just Python / Numpy?\n \"\"\"\n\n register = {\n \"optimize\": {\n \"minuit\": optimize_iminuit,\n \"sherpa\": optimize_sherpa,\n \"scipy\": optimize_scipy,\n },\n \"covariance\": {\n \"minuit\": covariance_iminuit,\n # \"sherpa\": covariance_sherpa,\n # \"scipy\": covariance_scipy,\n },\n \"confidence\": {\n \"minuit\": confidence_iminuit,\n # \"sherpa\": confidence_sherpa,\n \"scipy\": confidence_scipy,\n },\n }\n\n @classmethod\n def get(cls, task, backend):\n if task not in cls.register:\n raise ValueError(f\"Unknown task {task!r}\")\n\n backend_options = cls.register[task]\n\n if backend not in backend_options:\n raise ValueError(f\"Unknown backend {backend!r} for task {task!r}\")\n\n return backend_options[backend]\n\n\nregistry = Registry()\n\n\nclass Fit:\n \"\"\"Fit class.\n\n The fit class provides a uniform interface to multiple fitting backends.\n Currently available: \"minuit\", \"sherpa\" and \"scipy\"\n\n Parameters\n ----------\n backend : {\"minuit\", \"scipy\" \"sherpa\"}\n Global backend used for fitting, default : minuit\n optimize_opts : dict\n Keyword arguments passed to the optimizer. For the `\"minuit\"` backend\n see https://iminuit.readthedocs.io/en/latest/api.html#iminuit.Minuit\n for a detailed description of the available options. If there is an entry\n 'migrad_opts', those options will be passed to `iminuit.Minuit.migrad()`.\n\n For the `\"sherpa\"` backend you can from the options `method = {\"simplex\", \"levmar\", \"moncar\", \"gridsearch\"}`\n Those methods are described and compared in detail on\n http://cxc.cfa.harvard.edu/sherpa/methods/index.html. The available\n options of the optimization methods are described on the following\n pages in detail:\n\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/neldermead.html\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/montecarlo.html\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/gridsearch.html\n * http://cxc.cfa.harvard.edu/sherpa/ahelp/levmar.html\n\n For the `\"scipy\"` backend the available options are described in detail here:\n https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html\n\n covariance_opts : dict\n Covariance options passed to the given backend.\n confidence_opts : dict\n Extra arguments passed to the backend. E.g. `iminuit.Minuit.minos` supports\n a ``maxcall`` option. For the scipy backend ``confidence_opts`` are forwarded\n to `~scipy.optimize.brentq`. If the confidence estimation fails, the bracketing\n interval can be adapted by modifying the the upper bound of the interval (``b``) value.\n store_trace : bool\n Whether to store the trace of the fit\n \"\"\"\n\n def __init__(\n self,\n backend=\"minuit\",\n optimize_opts=None,\n covariance_opts=None,\n confidence_opts=None,\n store_trace=False,\n ):\n self.store_trace = store_trace\n self.backend = backend\n\n if optimize_opts is None:\n optimize_opts = {\"backend\": backend}\n\n if covariance_opts is None:\n covariance_opts = {\"backend\": backend}\n\n if confidence_opts is None:\n confidence_opts = {\"backend\": backend}\n\n self.optimize_opts = optimize_opts\n self.covariance_opts = covariance_opts\n self.confidence_opts = confidence_opts\n self._minuit = None\n\n @property\n def minuit(self):\n \"\"\"Iminuit object\"\"\"\n return self._minuit\n\n @staticmethod\n def _parse_datasets(datasets):\n from gammapy.datasets import Datasets\n\n datasets = Datasets(datasets)\n return datasets, datasets.parameters\n\n def run(self, datasets):\n \"\"\"Run all fitting steps.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n\n Returns\n -------\n fit_result : `FitResult`\n Fit result\n \"\"\"\n optimize_result = self.optimize(datasets=datasets)\n\n if self.backend not in registry.register[\"covariance\"]:\n log.warning(\"No covariance estimate - not supported by this backend.\")\n return optimize_result\n\n covariance_result = self.covariance(datasets=datasets)\n\n return FitResult(\n optimize_result=optimize_result,\n covariance_result=covariance_result,\n )\n\n def optimize(self, datasets):\n \"\"\"Run the optimization.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n\n Returns\n -------\n optimize_result : `OptimizeResult`\n Optimization result\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n datasets.parameters.check_limits()\n\n parameters.autoscale()\n\n kwargs = self.optimize_opts.copy()\n backend = kwargs.pop(\"backend\", self.backend)\n\n compute = registry.get(\"optimize\", backend)\n # TODO: change this calling interface!\n # probably should pass a fit statistic, which has a model, which has parameters\n # and return something simpler, not a tuple of three things\n factors, info, optimizer = compute(\n parameters=parameters,\n function=datasets.stat_sum,\n store_trace=self.store_trace,\n **kwargs,\n )\n\n if backend == \"minuit\":\n self._minuit = optimizer\n kwargs[\"method\"] = \"migrad\"\n\n trace = table_from_row_data(info.pop(\"trace\"))\n\n if self.store_trace:\n idx = [\n parameters.index(par)\n for par in parameters.unique_parameters.free_parameters\n ]\n unique_names = np.array(datasets.models.parameters_unique_names)[idx]\n trace.rename_columns(trace.colnames[1:], list(unique_names))\n\n # Copy final results into the parameters object\n parameters.set_parameter_factors(factors)\n parameters.check_limits()\n return OptimizeResult(\n parameters=parameters,\n total_stat=datasets.stat_sum(),\n backend=backend,\n method=kwargs.get(\"method\", backend),\n trace=trace,\n **info,\n )\n\n def covariance(self, datasets):\n \"\"\"Estimate the covariance matrix.\n\n Assumes that the model parameters are already optimised.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n\n Returns\n -------\n result : `CovarianceResult`\n Results\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n kwargs = self.covariance_opts.copy()\n kwargs[\"minuit\"] = self.minuit\n backend = kwargs.pop(\"backend\", self.backend)\n compute = registry.get(\"covariance\", backend)\n\n with parameters.restore_status():\n if self.backend == \"minuit\":\n method = \"hesse\"\n else:\n method = \"\"\n\n factor_matrix, info = compute(\n parameters=parameters, function=datasets.stat_sum, **kwargs\n )\n\n datasets.models.covariance = Covariance.from_factor_matrix(\n parameters=parameters, matrix=factor_matrix\n )\n\n # TODO: decide what to return, and fill the info correctly!\n return CovarianceResult(\n backend=backend,\n method=method,\n success=info[\"success\"],\n message=info[\"message\"],\n )\n\n def confidence(self, datasets, parameter, sigma=1, reoptimize=True):\n \"\"\"Estimate confidence interval.\n\n Extra ``kwargs`` are passed to the backend.\n E.g. `iminuit.Minuit.minos` supports a ``maxcall`` option.\n\n For the scipy backend ``kwargs`` are forwarded to `~scipy.optimize.brentq`. If the\n confidence estimation fails, the bracketing interval can be adapted by modifying the\n the upper bound of the interval (``b``) value.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n parameter : `~gammapy.modeling.Parameter`\n Parameter of interest\n sigma : float\n Number of standard deviations for the confidence level\n reoptimize : bool\n Re-optimize other parameters, when computing the confidence region.\n\n Returns\n -------\n result : dict\n Dictionary with keys \"errp\", 'errn\", \"success\" and \"nfev\".\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n kwargs = self.confidence_opts.copy()\n backend = kwargs.pop(\"backend\", self.backend)\n\n compute = registry.get(\"confidence\", backend)\n parameter = parameters[parameter]\n\n with parameters.restore_status():\n result = compute(\n parameters=parameters,\n parameter=parameter,\n function=datasets.stat_sum,\n sigma=sigma,\n reoptimize=reoptimize,\n **kwargs,\n )\n\n result[\"errp\"] *= parameter.scale\n result[\"errn\"] *= parameter.scale\n return result\n\n def stat_profile(self, datasets, parameter, reoptimize=False):\n \"\"\"Compute fit statistic profile.\n\n The method used is to vary one parameter, keeping all others fixed.\n So this is taking a \"slice\" or \"scan\" of the fit statistic.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n parameter : `~gammapy.modeling.Parameter`\n Parameter of interest. The specification for the scan, such as bounds\n and number of values is taken from the parameter object.\n reoptimize : bool\n Re-optimize other parameters, when computing the confidence region.\n\n Returns\n -------\n results : dict\n Dictionary with keys \"values\", \"stat\" and \"fit_results\". The latter contains an\n empty list, if `reoptimize` is set to False\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n parameter = parameters[parameter]\n values = parameter.scan_values\n\n stats = []\n fit_results = []\n with parameters.restore_status():\n for value in progress_bar(values, desc=\"Scan values\"):\n parameter.value = value\n if reoptimize:\n parameter.frozen = True\n result = self.optimize(datasets=datasets)\n stat = result.total_stat\n fit_results.append(result)\n else:\n stat = datasets.stat_sum()\n stats.append(stat)\n\n return {\n f\"{parameter.name}_scan\": values,\n \"stat_scan\": np.array(stats),\n \"fit_results\": fit_results,\n }\n\n def stat_surface(self, datasets, x, y, reoptimize=False):\n \"\"\"Compute fit statistic surface.\n\n The method used is to vary two parameters, keeping all others fixed.\n So this is taking a \"slice\" or \"scan\" of the fit statistic.\n\n Caveat: This method can be very computationally intensive and slow\n\n See also: `Fit.stat_contour`\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n x, y : `~gammapy.modeling.Parameter`\n Parameters of interest\n reoptimize : bool\n Re-optimize other parameters, when computing the confidence region.\n\n Returns\n -------\n results : dict\n Dictionary with keys \"x_values\", \"y_values\", \"stat\" and \"fit_results\". The latter contains an\n empty list, if `reoptimize` is set to False\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n x, y = parameters[x], parameters[y]\n\n stats = []\n fit_results = []\n\n with parameters.restore_status():\n for x_value, y_value in progress_bar(\n itertools.product(x.scan_values, y.scan_values), desc=\"Trial values\"\n ):\n x.value, y.value = x_value, y_value\n\n if reoptimize:\n x.frozen, y.frozen = True, True\n result = self.optimize(datasets=datasets)\n stat = result.total_stat\n fit_results.append(result)\n else:\n stat = datasets.stat_sum()\n\n stats.append(stat)\n\n shape = (len(x.scan_values), len(y.scan_values))\n stats = np.array(stats).reshape(shape)\n\n if reoptimize:\n fit_results = np.array(fit_results).reshape(shape)\n\n return {\n f\"{x.name}_scan\": x.scan_values,\n f\"{y.name}_scan\": y.scan_values,\n \"stat_scan\": stats,\n \"fit_results\": fit_results,\n }\n\n def stat_contour(self, datasets, x, y, numpoints=10, sigma=1):\n \"\"\"Compute stat contour.\n\n Calls ``iminuit.Minuit.mncontour``.\n\n This is a contouring algorithm for a 2D function\n which is not simply the fit statistic function.\n That 2D function is given at each point ``(par_1, par_2)``\n by re-optimising all other free parameters,\n and taking the fit statistic at that point.\n\n Very compute-intensive and slow.\n\n Parameters\n ----------\n datasets : `Datasets` or list of `Dataset`\n Datasets to optimize.\n x, y : `~gammapy.modeling.Parameter`\n Parameters of interest\n numpoints : int\n Number of contour points\n sigma : float\n Number of standard deviations for the confidence level\n\n Returns\n -------\n result : dict\n Dictionary containing the parameter values defining the contour, with the\n boolean flag \"success\" and the info objects from ``mncontour``.\n \"\"\"\n datasets, parameters = self._parse_datasets(datasets=datasets)\n\n x = parameters[x]\n y = parameters[y]\n\n with parameters.restore_status():\n result = contour_iminuit(\n parameters=parameters,\n function=datasets.stat_sum,\n x=x,\n y=y,\n numpoints=numpoints,\n sigma=sigma,\n )\n\n x_name = x.name\n y_name = y.name\n x = result[\"x\"] * x.scale\n y = result[\"y\"] * y.scale\n\n return {\n x_name: x,\n y_name: y,\n \"success\": result[\"success\"],\n }\n\n\nclass FitStepResult:\n \"\"\"Fit result base class\"\"\"\n\n def __init__(self, backend, method, success, message):\n self._success = success\n self._message = message\n self._backend = backend\n self._method = method\n\n @property\n def backend(self):\n \"\"\"Optimizer backend used for the fit.\"\"\"\n return self._backend\n\n @property\n def method(self):\n \"\"\"Optimizer method used for the fit.\"\"\"\n return self._method\n\n @property\n def success(self):\n \"\"\"Fit success status flag.\"\"\"\n return self._success\n\n @property\n def message(self):\n \"\"\"Optimizer status message.\"\"\"\n return self._message\n\n def __repr__(self):\n return (\n f\"{self.__class__.__name__}\\n\\n\"\n f\"\\tbackend : {self.backend}\\n\"\n f\"\\tmethod : {self.method}\\n\"\n f\"\\tsuccess : {self.success}\\n\"\n f\"\\tmessage : {self.message}\\n\"\n )\n\n\nclass CovarianceResult(FitStepResult):\n \"\"\"Covariance result object.\"\"\"\n\n pass\n\n\nclass OptimizeResult(FitStepResult):\n \"\"\"Optimize result object.\"\"\"\n\n def __init__(self, parameters, nfev, total_stat, trace, **kwargs):\n self._parameters = parameters\n self._nfev = nfev\n self._total_stat = total_stat\n self._trace = trace\n super().__init__(**kwargs)\n\n @property\n def parameters(self):\n \"\"\"Best fit parameters\"\"\"\n return self._parameters\n\n @property\n def trace(self):\n \"\"\"Parameter trace from the optimisation\"\"\"\n return self._trace\n\n @property\n def nfev(self):\n \"\"\"Number of function evaluations.\"\"\"\n return self._nfev\n\n @property\n def total_stat(self):\n \"\"\"Value of the fit statistic at minimum.\"\"\"\n return self._total_stat\n\n def __repr__(self):\n str_ = super().__repr__()\n str_ += f\"\\tnfev : {self.nfev}\\n\"\n str_ += f\"\\ttotal stat : {self.total_stat:.2f}\\n\\n\"\n return str_\n\n\nclass FitResult:\n \"\"\"Fit result class\n\n Parameters\n ----------\n optimize_result : `OptimizeResult`\n Result of the optimization step.\n covariance_result : `CovarianceResult`\n Result of the covariance step.\n \"\"\"\n\n def __init__(self, optimize_result=None, covariance_result=None):\n self._optimize_result = optimize_result\n self._covariance_result = covariance_result\n\n # TODO: is the convenience access needed?\n @property\n def parameters(self):\n \"\"\"Best fit parameters of the optimization step\"\"\"\n return self.optimize_result.parameters\n\n # TODO: is the convenience access needed?\n @property\n def total_stat(self):\n \"\"\"Total stat of the optimization step\"\"\"\n return self.optimize_result.total_stat\n\n # TODO: is the convenience access needed?\n @property\n def trace(self):\n \"\"\"Parameter trace of the optimisation step\"\"\"\n return self.optimize_result.trace\n\n # TODO: is the convenience access needed?\n @property\n def nfev(self):\n \"\"\"Number of function evaluations of the optimisation step\"\"\"\n return self.optimize_result.nfev\n\n # TODO: is the convenience access needed?\n @property\n def backend(self):\n \"\"\"Optimizer backend used for the fit.\"\"\"\n return self.optimize_result.backend\n\n # TODO: is the convenience access needed?\n @property\n def method(self):\n \"\"\"Optimizer method used for the fit.\"\"\"\n return self.optimize_result.method\n\n # TODO: is the convenience access needed?\n @property\n def message(self):\n \"\"\"Optimizer status message.\"\"\"\n return self.optimize_result.message\n\n @property\n def success(self):\n \"\"\"Total success flag\"\"\"\n success = self.optimize_result.success and self.covariance_result.success\n return success\n\n @property\n def optimize_result(self):\n \"\"\"Optimize result\"\"\"\n return self._optimize_result\n\n @property\n def covariance_result(self):\n \"\"\"Optimize result\"\"\"\n return self._covariance_result\n\n def __repr__(self):\n str_ = \"\"\n if self.optimize_result:\n str_ += str(self.optimize_result)\n\n if self.covariance_result:\n str_ += str(self.covariance_result)\n\n return str_\n", "path": "gammapy/modeling/fit.py" } ]
diff --git a/gammapy/modeling/fit.py b/gammapy/modeling/fit.py index 76900466f9..1e4f4b89ca 100644 --- a/gammapy/modeling/fit.py +++ b/gammapy/modeling/fit.py @@ -635,7 +635,7 @@ def optimize_result(self): @property def covariance_result(self): """Optimize result""" - return self._optimize_result + return self._covariance_result def __repr__(self): str_ = "" diff --git a/gammapy/modeling/tests/test_fit.py b/gammapy/modeling/tests/test_fit.py index 59bd504688..0f63fd640d 100644 --- a/gammapy/modeling/tests/test_fit.py +++ b/gammapy/modeling/tests/test_fit.py @@ -95,6 +95,8 @@ def test_run(backend): assert result.success assert result.optimize_result.method == "migrad" + assert result.covariance_result.method == "hesse" + assert result.covariance_result.success == True assert_allclose(pars["x"].value, 2, rtol=1e-3) assert_allclose(pars["y"].value, 3e2, rtol=1e-3)
aio-libs__aiohttp-6924
ClientSession.timeout has an incorrect typing ### Describe the bug The `aiohttp.ClientSession.timeout` attribute has a type of `Union[object, aiohttp.ClientTimeout]`, however the code logic will never actually assign a bare `object` type to the `self._timeout` attribute, making this typing quite over-inclusive. Trying to use this attribute in typed code results in having to use `cast(aiohttp.ClientTimeout, session.timeout)`, which is far from ideal considering one can just fix the typing in the library. I ran into this while using Python 3.8.10, but the exact same explanation above applies to the current master branch (and the version I'm using of course), as shown by the snippets below. 3.8 branch `__init__` parameter: https://github.com/aio-libs/aiohttp/blob/6243204a6a6a0e5ff84ac754218381b44a841e72/aiohttp/client.py#L217 3.8 branch `self._timeout` assignment: https://github.com/aio-libs/aiohttp/blob/6243204a6a6a0e5ff84ac754218381b44a841e72/aiohttp/client.py#L261-L290 Note the `# type: ignore` comment on `L278` there - it's because the `timeout is sentinel` check does not narrow down the `timeout` type. The correct way to go about this would be to use a `cast` there instead of ignoring the issue like that. 3.8 branch `timeout` attribute declaration: https://github.com/aio-libs/aiohttp/blob/6243204a6a6a0e5ff84ac754218381b44a841e72/aiohttp/client.py#L1029-L1032 Master branch `__init__` parameter: https://github.com/aio-libs/aiohttp/blob/52fa599c5637dd1a38761afb6829b0439b1cf505/aiohttp/client.py#L215 Master branch `self._timeout` assignment: https://github.com/aio-libs/aiohttp/blob/52fa599c5637dd1a38761afb6829b0439b1cf505/aiohttp/client.py#L260-L263 Due to a different handling of the `sentinel` value via an `Enum` member, no `cast` is needed here. Master branch `timeout` attribute declaration: https://github.com/aio-libs/aiohttp/blob/52fa599c5637dd1a38761afb6829b0439b1cf505/aiohttp/client.py#L1008-L1011 The attribute type is still over-inclusive here though. The solution would be quite simple: ```py @property def timeout(self) -> ClientTimeout: """Timeout for the session.""" return self._timeout ```` Please let me know if you'd welcome a PR for this. I'd like to get this backported back to 3.8 (that I'm using) if possible, but if not, just fixing it in the master branch so that it's correct going forward would be good enough for me. ### To Reproduce Utilize some kind of a type checker like MyPy. ```py import asyncio import aiohttp async def main: session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=10)) # read back the total time attribute total_time = session.timeout.total # "object" type of "Union[object, ClientTimeout]" has no attribute "total" print(total_time) asyncio.run(main()) ``` ### Expected behavior The attribute having only the `aiohttp.ClientTimeout` type and not requiring `cast` usage when accessing the attribute during library usage in user code. ### Logs/tracebacks ```python-traceback Not applicable ``` ### Python Version ```console Python 3.8.10 ``` ### aiohttp Version ```console Version: 3.8.1 ``` ### multidict Version ```console Version: 6.0.2 ``` ### yarl Version ```console Version: 1.7.2 ``` ### OS Windows ### Related component Client ### Additional context Related issues and PRs: #4191 #4193 ### Code of Conduct - [X] I agree to follow the aio-libs Code of Conduct
[ { "content": "\"\"\"HTTP Client for asyncio.\"\"\"\n\nimport asyncio\nimport base64\nimport hashlib\nimport json\nimport os\nimport sys\nimport traceback\nimport warnings\nfrom contextlib import suppress\nfrom types import SimpleNamespace, TracebackType\nfrom typing import (\n Any,\n Awaitable,\n Callable,\n Coroutine,\n FrozenSet,\n Generator,\n Generic,\n Iterable,\n List,\n Mapping,\n Optional,\n Set,\n Tuple,\n Type,\n TypeVar,\n Union,\n)\n\nimport attr\nfrom multidict import CIMultiDict, MultiDict, MultiDictProxy, istr\nfrom yarl import URL\n\nfrom . import hdrs, http, payload\nfrom .abc import AbstractCookieJar\nfrom .client_exceptions import (\n ClientConnectionError as ClientConnectionError,\n ClientConnectorCertificateError as ClientConnectorCertificateError,\n ClientConnectorError as ClientConnectorError,\n ClientConnectorSSLError as ClientConnectorSSLError,\n ClientError as ClientError,\n ClientHttpProxyError as ClientHttpProxyError,\n ClientOSError as ClientOSError,\n ClientPayloadError as ClientPayloadError,\n ClientProxyConnectionError as ClientProxyConnectionError,\n ClientResponseError as ClientResponseError,\n ClientSSLError as ClientSSLError,\n ContentTypeError as ContentTypeError,\n InvalidURL as InvalidURL,\n ServerConnectionError as ServerConnectionError,\n ServerDisconnectedError as ServerDisconnectedError,\n ServerFingerprintMismatch as ServerFingerprintMismatch,\n ServerTimeoutError as ServerTimeoutError,\n TooManyRedirects as TooManyRedirects,\n WSServerHandshakeError as WSServerHandshakeError,\n)\nfrom .client_reqrep import (\n ClientRequest as ClientRequest,\n ClientResponse as ClientResponse,\n Fingerprint as Fingerprint,\n RequestInfo as RequestInfo,\n _merge_ssl_params,\n)\nfrom .client_ws import ClientWebSocketResponse as ClientWebSocketResponse\nfrom .connector import (\n BaseConnector as BaseConnector,\n NamedPipeConnector as NamedPipeConnector,\n TCPConnector as TCPConnector,\n UnixConnector as UnixConnector,\n)\nfrom .cookiejar import CookieJar\nfrom .helpers import (\n _SENTINEL,\n DEBUG,\n PY_36,\n BasicAuth,\n TimeoutHandle,\n ceil_timeout,\n get_env_proxy_for_url,\n get_running_loop,\n sentinel,\n strip_auth_from_url,\n)\nfrom .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter\nfrom .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse\nfrom .streams import FlowControlDataQueue\nfrom .tracing import Trace, TraceConfig\nfrom .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL\n\n__all__ = (\n # client_exceptions\n \"ClientConnectionError\",\n \"ClientConnectorCertificateError\",\n \"ClientConnectorError\",\n \"ClientConnectorSSLError\",\n \"ClientError\",\n \"ClientHttpProxyError\",\n \"ClientOSError\",\n \"ClientPayloadError\",\n \"ClientProxyConnectionError\",\n \"ClientResponseError\",\n \"ClientSSLError\",\n \"ContentTypeError\",\n \"InvalidURL\",\n \"ServerConnectionError\",\n \"ServerDisconnectedError\",\n \"ServerFingerprintMismatch\",\n \"ServerTimeoutError\",\n \"TooManyRedirects\",\n \"WSServerHandshakeError\",\n # client_reqrep\n \"ClientRequest\",\n \"ClientResponse\",\n \"Fingerprint\",\n \"RequestInfo\",\n # connector\n \"BaseConnector\",\n \"TCPConnector\",\n \"UnixConnector\",\n \"NamedPipeConnector\",\n # client_ws\n \"ClientWebSocketResponse\",\n # client\n \"ClientSession\",\n \"ClientTimeout\",\n \"request\",\n)\n\n\ntry:\n from ssl import SSLContext\nexcept ImportError: # pragma: no cover\n SSLContext = object # type: ignore[misc,assignment]\n\n\[email protected](auto_attribs=True, frozen=True, slots=True)\nclass ClientTimeout:\n total: Optional[float] = None\n connect: Optional[float] = None\n sock_read: Optional[float] = None\n sock_connect: Optional[float] = None\n ceil_threshold: float = 5\n\n # pool_queue_timeout: Optional[float] = None\n # dns_resolution_timeout: Optional[float] = None\n # socket_connect_timeout: Optional[float] = None\n # connection_acquiring_timeout: Optional[float] = None\n # new_connection_timeout: Optional[float] = None\n # http_header_timeout: Optional[float] = None\n # response_body_timeout: Optional[float] = None\n\n # to create a timeout specific for a single request, either\n # - create a completely new one to overwrite the default\n # - or use http://www.attrs.org/en/stable/api.html#attr.evolve\n # to overwrite the defaults\n\n\n# 5 Minute default read timeout\nDEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)\n\n_RetType = TypeVar(\"_RetType\")\n\n\nclass ClientSession:\n \"\"\"First-class interface for making HTTP requests.\"\"\"\n\n ATTRS = frozenset(\n [\n \"_base_url\",\n \"_source_traceback\",\n \"_connector\",\n \"requote_redirect_url\",\n \"_loop\",\n \"_cookie_jar\",\n \"_connector_owner\",\n \"_default_auth\",\n \"_version\",\n \"_json_serialize\",\n \"_requote_redirect_url\",\n \"_timeout\",\n \"_raise_for_status\",\n \"_auto_decompress\",\n \"_trust_env\",\n \"_default_headers\",\n \"_skip_auto_headers\",\n \"_request_class\",\n \"_response_class\",\n \"_ws_response_class\",\n \"_trace_configs\",\n \"_read_bufsize\",\n ]\n )\n\n _source_traceback: Optional[traceback.StackSummary] = None\n _connector: Optional[BaseConnector] = None\n\n def __init__(\n self,\n base_url: Optional[StrOrURL] = None,\n *,\n connector: Optional[BaseConnector] = None,\n loop: Optional[asyncio.AbstractEventLoop] = None,\n cookies: Optional[LooseCookies] = None,\n headers: Optional[LooseHeaders] = None,\n skip_auto_headers: Optional[Iterable[str]] = None,\n auth: Optional[BasicAuth] = None,\n json_serialize: JSONEncoder = json.dumps,\n request_class: Type[ClientRequest] = ClientRequest,\n response_class: Type[ClientResponse] = ClientResponse,\n ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,\n version: HttpVersion = http.HttpVersion11,\n cookie_jar: Optional[AbstractCookieJar] = None,\n connector_owner: bool = True,\n raise_for_status: Union[\n bool, Callable[[ClientResponse], Awaitable[None]]\n ] = False,\n read_timeout: Union[float, object] = sentinel,\n conn_timeout: Optional[float] = None,\n timeout: Union[object, ClientTimeout] = sentinel,\n auto_decompress: bool = True,\n trust_env: bool = False,\n requote_redirect_url: bool = True,\n trace_configs: Optional[List[TraceConfig]] = None,\n read_bufsize: int = 2**16,\n ) -> None:\n if loop is None:\n if connector is not None:\n loop = connector._loop\n\n loop = get_running_loop(loop)\n\n if base_url is None or isinstance(base_url, URL):\n self._base_url: Optional[URL] = base_url\n else:\n self._base_url = URL(base_url)\n assert (\n self._base_url.origin() == self._base_url\n ), \"Only absolute URLs without path part are supported\"\n\n if connector is None:\n connector = TCPConnector(loop=loop)\n\n if connector._loop is not loop:\n raise RuntimeError(\"Session and connector has to use same event loop\")\n\n self._loop = loop\n\n if loop.get_debug():\n self._source_traceback = traceback.extract_stack(sys._getframe(1))\n\n if cookie_jar is None:\n cookie_jar = CookieJar(loop=loop)\n self._cookie_jar = cookie_jar\n\n if cookies is not None:\n self._cookie_jar.update_cookies(cookies)\n\n self._connector = connector\n self._connector_owner = connector_owner\n self._default_auth = auth\n self._version = version\n self._json_serialize = json_serialize\n if timeout is sentinel:\n self._timeout = DEFAULT_TIMEOUT\n if read_timeout is not sentinel:\n warnings.warn(\n \"read_timeout is deprecated, \" \"use timeout argument instead\",\n DeprecationWarning,\n stacklevel=2,\n )\n self._timeout = attr.evolve(self._timeout, total=read_timeout)\n if conn_timeout is not None:\n self._timeout = attr.evolve(self._timeout, connect=conn_timeout)\n warnings.warn(\n \"conn_timeout is deprecated, \" \"use timeout argument instead\",\n DeprecationWarning,\n stacklevel=2,\n )\n else:\n self._timeout = timeout # type: ignore[assignment]\n if read_timeout is not sentinel:\n raise ValueError(\n \"read_timeout and timeout parameters \"\n \"conflict, please setup \"\n \"timeout.read\"\n )\n if conn_timeout is not None:\n raise ValueError(\n \"conn_timeout and timeout parameters \"\n \"conflict, please setup \"\n \"timeout.connect\"\n )\n self._raise_for_status = raise_for_status\n self._auto_decompress = auto_decompress\n self._trust_env = trust_env\n self._requote_redirect_url = requote_redirect_url\n self._read_bufsize = read_bufsize\n\n # Convert to list of tuples\n if headers:\n real_headers: CIMultiDict[str] = CIMultiDict(headers)\n else:\n real_headers = CIMultiDict()\n self._default_headers: CIMultiDict[str] = real_headers\n if skip_auto_headers is not None:\n self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)\n else:\n self._skip_auto_headers = frozenset()\n\n self._request_class = request_class\n self._response_class = response_class\n self._ws_response_class = ws_response_class\n\n self._trace_configs = trace_configs or []\n for trace_config in self._trace_configs:\n trace_config.freeze()\n\n def __init_subclass__(cls: Type[\"ClientSession\"]) -> None:\n warnings.warn(\n \"Inheritance class {} from ClientSession \"\n \"is discouraged\".format(cls.__name__),\n DeprecationWarning,\n stacklevel=2,\n )\n\n if DEBUG:\n\n def __setattr__(self, name: str, val: Any) -> None:\n if name not in self.ATTRS:\n warnings.warn(\n \"Setting custom ClientSession.{} attribute \"\n \"is discouraged\".format(name),\n DeprecationWarning,\n stacklevel=2,\n )\n super().__setattr__(name, val)\n\n def __del__(self, _warnings: Any = warnings) -> None:\n if not self.closed:\n if PY_36:\n kwargs = {\"source\": self}\n else:\n kwargs = {}\n _warnings.warn(\n f\"Unclosed client session {self!r}\", ResourceWarning, **kwargs\n )\n context = {\"client_session\": self, \"message\": \"Unclosed client session\"}\n if self._source_traceback is not None:\n context[\"source_traceback\"] = self._source_traceback\n self._loop.call_exception_handler(context)\n\n def request(\n self, method: str, url: StrOrURL, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP request.\"\"\"\n return _RequestContextManager(self._request(method, url, **kwargs))\n\n def _build_url(self, str_or_url: StrOrURL) -> URL:\n url = URL(str_or_url)\n if self._base_url is None:\n return url\n else:\n assert not url.is_absolute() and url.path.startswith(\"/\")\n return self._base_url.join(url)\n\n async def _request(\n self,\n method: str,\n str_or_url: StrOrURL,\n *,\n params: Optional[Mapping[str, str]] = None,\n data: Any = None,\n json: Any = None,\n cookies: Optional[LooseCookies] = None,\n headers: Optional[LooseHeaders] = None,\n skip_auto_headers: Optional[Iterable[str]] = None,\n auth: Optional[BasicAuth] = None,\n allow_redirects: bool = True,\n max_redirects: int = 10,\n compress: Optional[str] = None,\n chunked: Optional[bool] = None,\n expect100: bool = False,\n raise_for_status: Union[\n None, bool, Callable[[ClientResponse], Awaitable[None]]\n ] = None,\n read_until_eof: bool = True,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n timeout: Union[ClientTimeout, _SENTINEL] = sentinel,\n verify_ssl: Optional[bool] = None,\n fingerprint: Optional[bytes] = None,\n ssl_context: Optional[SSLContext] = None,\n ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,\n proxy_headers: Optional[LooseHeaders] = None,\n trace_request_ctx: Optional[SimpleNamespace] = None,\n read_bufsize: Optional[int] = None,\n ) -> ClientResponse:\n\n # NOTE: timeout clamps existing connect and read timeouts. We cannot\n # set the default to None because we need to detect if the user wants\n # to use the existing timeouts by setting timeout to None.\n\n if self.closed:\n raise RuntimeError(\"Session is closed\")\n\n ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)\n\n if data is not None and json is not None:\n raise ValueError(\n \"data and json parameters can not be used at the same time\"\n )\n elif json is not None:\n data = payload.JsonPayload(json, dumps=self._json_serialize)\n\n if not isinstance(chunked, bool) and chunked is not None:\n warnings.warn(\"Chunk size is deprecated #1615\", DeprecationWarning)\n\n redirects = 0\n history = []\n version = self._version\n\n # Merge with default headers and transform to CIMultiDict\n headers = self._prepare_headers(headers)\n proxy_headers = self._prepare_headers(proxy_headers)\n\n try:\n url = self._build_url(str_or_url)\n except ValueError as e:\n raise InvalidURL(str_or_url) from e\n\n skip_headers = set(self._skip_auto_headers)\n if skip_auto_headers is not None:\n for i in skip_auto_headers:\n skip_headers.add(istr(i))\n\n if proxy is not None:\n try:\n proxy = URL(proxy)\n except ValueError as e:\n raise InvalidURL(proxy) from e\n\n if timeout is sentinel:\n real_timeout: ClientTimeout = self._timeout\n else:\n if not isinstance(timeout, ClientTimeout):\n real_timeout = ClientTimeout(total=timeout)\n else:\n real_timeout = timeout\n # timeout is cumulative for all request operations\n # (request, redirects, responses, data consuming)\n tm = TimeoutHandle(\n self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold\n )\n handle = tm.start()\n\n if read_bufsize is None:\n read_bufsize = self._read_bufsize\n\n traces = [\n Trace(\n self,\n trace_config,\n trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),\n )\n for trace_config in self._trace_configs\n ]\n\n for trace in traces:\n await trace.send_request_start(method, url.update_query(params), headers)\n\n timer = tm.timer()\n try:\n with timer:\n while True:\n url, auth_from_url = strip_auth_from_url(url)\n if auth and auth_from_url:\n raise ValueError(\n \"Cannot combine AUTH argument with \"\n \"credentials encoded in URL\"\n )\n\n if auth is None:\n auth = auth_from_url\n if auth is None:\n auth = self._default_auth\n # It would be confusing if we support explicit\n # Authorization header with auth argument\n if (\n headers is not None\n and auth is not None\n and hdrs.AUTHORIZATION in headers\n ):\n raise ValueError(\n \"Cannot combine AUTHORIZATION header \"\n \"with AUTH argument or credentials \"\n \"encoded in URL\"\n )\n\n all_cookies = self._cookie_jar.filter_cookies(url)\n\n if cookies is not None:\n tmp_cookie_jar = CookieJar()\n tmp_cookie_jar.update_cookies(cookies)\n req_cookies = tmp_cookie_jar.filter_cookies(url)\n if req_cookies:\n all_cookies.load(req_cookies)\n\n if proxy is not None:\n proxy = URL(proxy)\n elif self._trust_env:\n with suppress(LookupError):\n proxy, proxy_auth = get_env_proxy_for_url(url)\n\n req = self._request_class(\n method,\n url,\n params=params,\n headers=headers,\n skip_auto_headers=skip_headers,\n data=data,\n cookies=all_cookies,\n auth=auth,\n version=version,\n compress=compress,\n chunked=chunked,\n expect100=expect100,\n loop=self._loop,\n response_class=self._response_class,\n proxy=proxy,\n proxy_auth=proxy_auth,\n timer=timer,\n session=self,\n ssl=ssl,\n proxy_headers=proxy_headers,\n traces=traces,\n )\n\n # connection timeout\n try:\n async with ceil_timeout(\n real_timeout.connect,\n ceil_threshold=real_timeout.ceil_threshold,\n ):\n assert self._connector is not None\n conn = await self._connector.connect(\n req, traces=traces, timeout=real_timeout\n )\n except asyncio.TimeoutError as exc:\n raise ServerTimeoutError(\n \"Connection timeout \" \"to host {}\".format(url)\n ) from exc\n\n assert conn.transport is not None\n\n assert conn.protocol is not None\n conn.protocol.set_response_params(\n timer=timer,\n skip_payload=method.upper() == \"HEAD\",\n read_until_eof=read_until_eof,\n auto_decompress=self._auto_decompress,\n read_timeout=real_timeout.sock_read,\n read_bufsize=read_bufsize,\n timeout_ceil_threshold=self._connector._timeout_ceil_threshold,\n )\n\n try:\n try:\n resp = await req.send(conn)\n try:\n await resp.start(conn)\n except BaseException:\n resp.close()\n raise\n except BaseException:\n conn.close()\n raise\n except ClientError:\n raise\n except OSError as exc:\n if exc.errno is None and isinstance(exc, asyncio.TimeoutError):\n raise\n raise ClientOSError(*exc.args) from exc\n\n self._cookie_jar.update_cookies(resp.cookies, resp.url)\n\n # redirects\n if resp.status in (301, 302, 303, 307, 308) and allow_redirects:\n\n for trace in traces:\n await trace.send_request_redirect(\n method, url.update_query(params), headers, resp\n )\n\n redirects += 1\n history.append(resp)\n if max_redirects and redirects >= max_redirects:\n resp.close()\n raise TooManyRedirects(\n history[0].request_info, tuple(history)\n )\n\n # For 301 and 302, mimic IE, now changed in RFC\n # https://github.com/kennethreitz/requests/pull/269\n if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (\n resp.status in (301, 302) and resp.method == hdrs.METH_POST\n ):\n method = hdrs.METH_GET\n data = None\n if headers.get(hdrs.CONTENT_LENGTH):\n headers.pop(hdrs.CONTENT_LENGTH)\n\n r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(\n hdrs.URI\n )\n if r_url is None:\n # see github.com/aio-libs/aiohttp/issues/2022\n break\n else:\n # reading from correct redirection\n # response is forbidden\n resp.release()\n\n try:\n parsed_url = URL(\n r_url, encoded=not self._requote_redirect_url\n )\n\n except ValueError as e:\n raise InvalidURL(r_url) from e\n\n scheme = parsed_url.scheme\n if scheme not in (\"http\", \"https\", \"\"):\n resp.close()\n raise ValueError(\"Can redirect only to http or https\")\n elif not scheme:\n parsed_url = url.join(parsed_url)\n\n if url.origin() != parsed_url.origin():\n auth = None\n headers.pop(hdrs.AUTHORIZATION, None)\n\n url = parsed_url\n params = None\n resp.release()\n continue\n\n break\n\n # check response status\n if raise_for_status is None:\n raise_for_status = self._raise_for_status\n\n if raise_for_status is None:\n pass\n elif callable(raise_for_status):\n await raise_for_status(resp)\n elif raise_for_status:\n resp.raise_for_status()\n\n # register connection\n if handle is not None:\n if resp.connection is not None:\n resp.connection.add_callback(handle.cancel)\n else:\n handle.cancel()\n\n resp._history = tuple(history)\n\n for trace in traces:\n await trace.send_request_end(\n method, url.update_query(params), headers, resp\n )\n return resp\n\n except BaseException as e:\n # cleanup timer\n tm.close()\n if handle:\n handle.cancel()\n handle = None\n\n for trace in traces:\n await trace.send_request_exception(\n method, url.update_query(params), headers, e\n )\n raise\n\n def ws_connect(\n self,\n url: StrOrURL,\n *,\n method: str = hdrs.METH_GET,\n protocols: Iterable[str] = (),\n timeout: float = 10.0,\n receive_timeout: Optional[float] = None,\n autoclose: bool = True,\n autoping: bool = True,\n heartbeat: Optional[float] = None,\n auth: Optional[BasicAuth] = None,\n origin: Optional[str] = None,\n params: Optional[Mapping[str, str]] = None,\n headers: Optional[LooseHeaders] = None,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n ssl: Union[SSLContext, bool, None, Fingerprint] = None,\n verify_ssl: Optional[bool] = None,\n fingerprint: Optional[bytes] = None,\n ssl_context: Optional[SSLContext] = None,\n proxy_headers: Optional[LooseHeaders] = None,\n compress: int = 0,\n max_msg_size: int = 4 * 1024 * 1024,\n ) -> \"_WSRequestContextManager\":\n \"\"\"Initiate websocket connection.\"\"\"\n return _WSRequestContextManager(\n self._ws_connect(\n url,\n method=method,\n protocols=protocols,\n timeout=timeout,\n receive_timeout=receive_timeout,\n autoclose=autoclose,\n autoping=autoping,\n heartbeat=heartbeat,\n auth=auth,\n origin=origin,\n params=params,\n headers=headers,\n proxy=proxy,\n proxy_auth=proxy_auth,\n ssl=ssl,\n verify_ssl=verify_ssl,\n fingerprint=fingerprint,\n ssl_context=ssl_context,\n proxy_headers=proxy_headers,\n compress=compress,\n max_msg_size=max_msg_size,\n )\n )\n\n async def _ws_connect(\n self,\n url: StrOrURL,\n *,\n method: str = hdrs.METH_GET,\n protocols: Iterable[str] = (),\n timeout: float = 10.0,\n receive_timeout: Optional[float] = None,\n autoclose: bool = True,\n autoping: bool = True,\n heartbeat: Optional[float] = None,\n auth: Optional[BasicAuth] = None,\n origin: Optional[str] = None,\n params: Optional[Mapping[str, str]] = None,\n headers: Optional[LooseHeaders] = None,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n ssl: Union[SSLContext, bool, None, Fingerprint] = None,\n verify_ssl: Optional[bool] = None,\n fingerprint: Optional[bytes] = None,\n ssl_context: Optional[SSLContext] = None,\n proxy_headers: Optional[LooseHeaders] = None,\n compress: int = 0,\n max_msg_size: int = 4 * 1024 * 1024,\n ) -> ClientWebSocketResponse:\n\n if headers is None:\n real_headers: CIMultiDict[str] = CIMultiDict()\n else:\n real_headers = CIMultiDict(headers)\n\n default_headers = {\n hdrs.UPGRADE: \"websocket\",\n hdrs.CONNECTION: \"upgrade\",\n hdrs.SEC_WEBSOCKET_VERSION: \"13\",\n }\n\n for key, value in default_headers.items():\n real_headers.setdefault(key, value)\n\n sec_key = base64.b64encode(os.urandom(16))\n real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()\n\n if protocols:\n real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = \",\".join(protocols)\n if origin is not None:\n real_headers[hdrs.ORIGIN] = origin\n if compress:\n extstr = ws_ext_gen(compress=compress)\n real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr\n\n ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)\n\n # send request\n resp = await self.request(\n method,\n url,\n params=params,\n headers=real_headers,\n read_until_eof=False,\n auth=auth,\n proxy=proxy,\n proxy_auth=proxy_auth,\n ssl=ssl,\n proxy_headers=proxy_headers,\n )\n\n try:\n # check handshake\n if resp.status != 101:\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid response status\",\n status=resp.status,\n headers=resp.headers,\n )\n\n if resp.headers.get(hdrs.UPGRADE, \"\").lower() != \"websocket\":\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid upgrade header\",\n status=resp.status,\n headers=resp.headers,\n )\n\n if resp.headers.get(hdrs.CONNECTION, \"\").lower() != \"upgrade\":\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid connection header\",\n status=resp.status,\n headers=resp.headers,\n )\n\n # key calculation\n r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, \"\")\n match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()\n if r_key != match:\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid challenge response\",\n status=resp.status,\n headers=resp.headers,\n )\n\n # websocket protocol\n protocol = None\n if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:\n resp_protocols = [\n proto.strip()\n for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(\",\")\n ]\n\n for proto in resp_protocols:\n if proto in protocols:\n protocol = proto\n break\n\n # websocket compress\n notakeover = False\n if compress:\n compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)\n if compress_hdrs:\n try:\n compress, notakeover = ws_ext_parse(compress_hdrs)\n except WSHandshakeError as exc:\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=exc.args[0],\n status=resp.status,\n headers=resp.headers,\n ) from exc\n else:\n compress = 0\n notakeover = False\n\n conn = resp.connection\n assert conn is not None\n conn_proto = conn.protocol\n assert conn_proto is not None\n transport = conn.transport\n assert transport is not None\n reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(\n conn_proto, 2**16, loop=self._loop\n )\n conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)\n writer = WebSocketWriter(\n conn_proto,\n transport,\n use_mask=True,\n compress=compress,\n notakeover=notakeover,\n )\n except BaseException:\n resp.close()\n raise\n else:\n return self._ws_response_class(\n reader,\n writer,\n protocol,\n resp,\n timeout,\n autoclose,\n autoping,\n self._loop,\n receive_timeout=receive_timeout,\n heartbeat=heartbeat,\n compress=compress,\n client_notakeover=notakeover,\n )\n\n def _prepare_headers(self, headers: Optional[LooseHeaders]) -> \"CIMultiDict[str]\":\n \"\"\"Add default headers and transform it to CIMultiDict\"\"\"\n # Convert headers to MultiDict\n result = CIMultiDict(self._default_headers)\n if headers:\n if not isinstance(headers, (MultiDictProxy, MultiDict)):\n headers = CIMultiDict(headers)\n added_names: Set[str] = set()\n for key, value in headers.items():\n if key in added_names:\n result.add(key, value)\n else:\n result[key] = value\n added_names.add(key)\n return result\n\n def get(\n self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP GET request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)\n )\n\n def options(\n self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP OPTIONS request.\"\"\"\n return _RequestContextManager(\n self._request(\n hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs\n )\n )\n\n def head(\n self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP HEAD request.\"\"\"\n return _RequestContextManager(\n self._request(\n hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs\n )\n )\n\n def post(\n self, url: StrOrURL, *, data: Any = None, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP POST request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_POST, url, data=data, **kwargs)\n )\n\n def put(\n self, url: StrOrURL, *, data: Any = None, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP PUT request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_PUT, url, data=data, **kwargs)\n )\n\n def patch(\n self, url: StrOrURL, *, data: Any = None, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP PATCH request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_PATCH, url, data=data, **kwargs)\n )\n\n def delete(self, url: StrOrURL, **kwargs: Any) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP DELETE request.\"\"\"\n return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))\n\n async def close(self) -> None:\n \"\"\"Close underlying connector.\n\n Release all acquired resources.\n \"\"\"\n if not self.closed:\n if self._connector is not None and self._connector_owner:\n await self._connector.close()\n self._connector = None\n\n @property\n def closed(self) -> bool:\n \"\"\"Is client session closed.\n\n A readonly property.\n \"\"\"\n return self._connector is None or self._connector.closed\n\n @property\n def connector(self) -> Optional[BaseConnector]:\n \"\"\"Connector instance used for the session.\"\"\"\n return self._connector\n\n @property\n def cookie_jar(self) -> AbstractCookieJar:\n \"\"\"The session cookies.\"\"\"\n return self._cookie_jar\n\n @property\n def version(self) -> Tuple[int, int]:\n \"\"\"The session HTTP protocol version.\"\"\"\n return self._version\n\n @property\n def requote_redirect_url(self) -> bool:\n \"\"\"Do URL requoting on redirection handling.\"\"\"\n return self._requote_redirect_url\n\n @requote_redirect_url.setter\n def requote_redirect_url(self, val: bool) -> None:\n \"\"\"Do URL requoting on redirection handling.\"\"\"\n warnings.warn(\n \"session.requote_redirect_url modification \" \"is deprecated #2778\",\n DeprecationWarning,\n stacklevel=2,\n )\n self._requote_redirect_url = val\n\n @property\n def loop(self) -> asyncio.AbstractEventLoop:\n \"\"\"Session's loop.\"\"\"\n warnings.warn(\n \"client.loop property is deprecated\", DeprecationWarning, stacklevel=2\n )\n return self._loop\n\n @property\n def timeout(self) -> Union[object, ClientTimeout]:\n \"\"\"Timeout for the session.\"\"\"\n return self._timeout\n\n @property\n def headers(self) -> \"CIMultiDict[str]\":\n \"\"\"The default headers of the client session.\"\"\"\n return self._default_headers\n\n @property\n def skip_auto_headers(self) -> FrozenSet[istr]:\n \"\"\"Headers for which autogeneration should be skipped\"\"\"\n return self._skip_auto_headers\n\n @property\n def auth(self) -> Optional[BasicAuth]:\n \"\"\"An object that represents HTTP Basic Authorization\"\"\"\n return self._default_auth\n\n @property\n def json_serialize(self) -> JSONEncoder:\n \"\"\"Json serializer callable\"\"\"\n return self._json_serialize\n\n @property\n def connector_owner(self) -> bool:\n \"\"\"Should connector be closed on session closing\"\"\"\n return self._connector_owner\n\n @property\n def raise_for_status(\n self,\n ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:\n \"\"\"Should `ClientResponse.raise_for_status()` be called for each response.\"\"\"\n return self._raise_for_status\n\n @property\n def auto_decompress(self) -> bool:\n \"\"\"Should the body response be automatically decompressed.\"\"\"\n return self._auto_decompress\n\n @property\n def trust_env(self) -> bool:\n \"\"\"\n Should proxies information from environment or netrc be trusted.\n\n Information is from HTTP_PROXY / HTTPS_PROXY environment variables\n or ~/.netrc file if present.\n \"\"\"\n return self._trust_env\n\n @property\n def trace_configs(self) -> List[TraceConfig]:\n \"\"\"A list of TraceConfig instances used for client tracing\"\"\"\n return self._trace_configs\n\n def detach(self) -> None:\n \"\"\"Detach connector from session without closing the former.\n\n Session is switched to closed state anyway.\n \"\"\"\n self._connector = None\n\n def __enter__(self) -> None:\n raise TypeError(\"Use async with instead\")\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n # __exit__ should exist in pair with __enter__ but never executed\n pass # pragma: no cover\n\n async def __aenter__(self) -> \"ClientSession\":\n return self\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n await self.close()\n\n\nclass _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):\n\n __slots__ = (\"_coro\", \"_resp\")\n\n def __init__(self, coro: Coroutine[\"asyncio.Future[Any]\", None, _RetType]) -> None:\n self._coro = coro\n\n def send(self, arg: None) -> \"asyncio.Future[Any]\":\n return self._coro.send(arg)\n\n def throw(self, arg: BaseException) -> None: # type: ignore[override]\n self._coro.throw(arg)\n\n def close(self) -> None:\n return self._coro.close()\n\n def __await__(self) -> Generator[Any, None, _RetType]:\n ret = self._coro.__await__()\n return ret\n\n def __iter__(self) -> Generator[Any, None, _RetType]:\n return self.__await__()\n\n async def __aenter__(self) -> _RetType:\n self._resp = await self._coro\n return self._resp\n\n\nclass _RequestContextManager(_BaseRequestContextManager[ClientResponse]):\n __slots__ = ()\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc: Optional[BaseException],\n tb: Optional[TracebackType],\n ) -> None:\n # We're basing behavior on the exception as it can be caused by\n # user code unrelated to the status of the connection. If you\n # would like to close a connection you must do that\n # explicitly. Otherwise connection error handling should kick in\n # and close/recycle the connection as required.\n self._resp.release()\n\n\nclass _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):\n __slots__ = ()\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc: Optional[BaseException],\n tb: Optional[TracebackType],\n ) -> None:\n await self._resp.close()\n\n\nclass _SessionRequestContextManager:\n\n __slots__ = (\"_coro\", \"_resp\", \"_session\")\n\n def __init__(\n self,\n coro: Coroutine[\"asyncio.Future[Any]\", None, ClientResponse],\n session: ClientSession,\n ) -> None:\n self._coro = coro\n self._resp: Optional[ClientResponse] = None\n self._session = session\n\n async def __aenter__(self) -> ClientResponse:\n try:\n self._resp = await self._coro\n except BaseException:\n await self._session.close()\n raise\n else:\n return self._resp\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc: Optional[BaseException],\n tb: Optional[TracebackType],\n ) -> None:\n assert self._resp is not None\n self._resp.close()\n await self._session.close()\n\n\ndef request(\n method: str,\n url: StrOrURL,\n *,\n params: Optional[Mapping[str, str]] = None,\n data: Any = None,\n json: Any = None,\n headers: Optional[LooseHeaders] = None,\n skip_auto_headers: Optional[Iterable[str]] = None,\n auth: Optional[BasicAuth] = None,\n allow_redirects: bool = True,\n max_redirects: int = 10,\n compress: Optional[str] = None,\n chunked: Optional[bool] = None,\n expect100: bool = False,\n raise_for_status: Optional[bool] = None,\n read_until_eof: bool = True,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n timeout: Union[ClientTimeout, object] = sentinel,\n cookies: Optional[LooseCookies] = None,\n version: HttpVersion = http.HttpVersion11,\n connector: Optional[BaseConnector] = None,\n read_bufsize: Optional[int] = None,\n loop: Optional[asyncio.AbstractEventLoop] = None,\n) -> _SessionRequestContextManager:\n \"\"\"Constructs and sends a request.\n\n Returns response object.\n method - HTTP method\n url - request url\n params - (optional) Dictionary or bytes to be sent in the query\n string of the new request\n data - (optional) Dictionary, bytes, or file-like object to\n send in the body of the request\n json - (optional) Any json compatible python object\n headers - (optional) Dictionary of HTTP Headers to send with\n the request\n cookies - (optional) Dict object to send with the request\n auth - (optional) BasicAuth named tuple represent HTTP Basic Auth\n auth - aiohttp.helpers.BasicAuth\n allow_redirects - (optional) If set to False, do not follow\n redirects\n version - Request HTTP version.\n compress - Set to True if request has to be compressed\n with deflate encoding.\n chunked - Set to chunk size for chunked transfer encoding.\n expect100 - Expect 100-continue response from server.\n connector - BaseConnector sub-class instance to support\n connection pooling.\n read_until_eof - Read response until eof if response\n does not have Content-Length header.\n loop - Optional event loop.\n timeout - Optional ClientTimeout settings structure, 5min\n total timeout by default.\n Usage::\n >>> import aiohttp\n >>> resp = await aiohttp.request('GET', 'http://python.org/')\n >>> resp\n <ClientResponse(python.org/) [200]>\n >>> data = await resp.read()\n \"\"\"\n connector_owner = False\n if connector is None:\n connector_owner = True\n connector = TCPConnector(loop=loop, force_close=True)\n\n session = ClientSession(\n loop=loop,\n cookies=cookies,\n version=version,\n timeout=timeout,\n connector=connector,\n connector_owner=connector_owner,\n )\n\n return _SessionRequestContextManager(\n session._request(\n method,\n url,\n params=params,\n data=data,\n json=json,\n headers=headers,\n skip_auto_headers=skip_auto_headers,\n auth=auth,\n allow_redirects=allow_redirects,\n max_redirects=max_redirects,\n compress=compress,\n chunked=chunked,\n expect100=expect100,\n raise_for_status=raise_for_status,\n read_until_eof=read_until_eof,\n proxy=proxy,\n proxy_auth=proxy_auth,\n read_bufsize=read_bufsize,\n ),\n session,\n )\n", "path": "aiohttp/client.py" } ]
[ { "content": "\"\"\"HTTP Client for asyncio.\"\"\"\n\nimport asyncio\nimport base64\nimport hashlib\nimport json\nimport os\nimport sys\nimport traceback\nimport warnings\nfrom contextlib import suppress\nfrom types import SimpleNamespace, TracebackType\nfrom typing import (\n Any,\n Awaitable,\n Callable,\n Coroutine,\n FrozenSet,\n Generator,\n Generic,\n Iterable,\n List,\n Mapping,\n Optional,\n Set,\n Tuple,\n Type,\n TypeVar,\n Union,\n)\n\nimport attr\nfrom multidict import CIMultiDict, MultiDict, MultiDictProxy, istr\nfrom yarl import URL\n\nfrom . import hdrs, http, payload\nfrom .abc import AbstractCookieJar\nfrom .client_exceptions import (\n ClientConnectionError as ClientConnectionError,\n ClientConnectorCertificateError as ClientConnectorCertificateError,\n ClientConnectorError as ClientConnectorError,\n ClientConnectorSSLError as ClientConnectorSSLError,\n ClientError as ClientError,\n ClientHttpProxyError as ClientHttpProxyError,\n ClientOSError as ClientOSError,\n ClientPayloadError as ClientPayloadError,\n ClientProxyConnectionError as ClientProxyConnectionError,\n ClientResponseError as ClientResponseError,\n ClientSSLError as ClientSSLError,\n ContentTypeError as ContentTypeError,\n InvalidURL as InvalidURL,\n ServerConnectionError as ServerConnectionError,\n ServerDisconnectedError as ServerDisconnectedError,\n ServerFingerprintMismatch as ServerFingerprintMismatch,\n ServerTimeoutError as ServerTimeoutError,\n TooManyRedirects as TooManyRedirects,\n WSServerHandshakeError as WSServerHandshakeError,\n)\nfrom .client_reqrep import (\n ClientRequest as ClientRequest,\n ClientResponse as ClientResponse,\n Fingerprint as Fingerprint,\n RequestInfo as RequestInfo,\n _merge_ssl_params,\n)\nfrom .client_ws import ClientWebSocketResponse as ClientWebSocketResponse\nfrom .connector import (\n BaseConnector as BaseConnector,\n NamedPipeConnector as NamedPipeConnector,\n TCPConnector as TCPConnector,\n UnixConnector as UnixConnector,\n)\nfrom .cookiejar import CookieJar\nfrom .helpers import (\n _SENTINEL,\n DEBUG,\n PY_36,\n BasicAuth,\n TimeoutHandle,\n ceil_timeout,\n get_env_proxy_for_url,\n get_running_loop,\n sentinel,\n strip_auth_from_url,\n)\nfrom .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter\nfrom .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse\nfrom .streams import FlowControlDataQueue\nfrom .tracing import Trace, TraceConfig\nfrom .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL\n\n__all__ = (\n # client_exceptions\n \"ClientConnectionError\",\n \"ClientConnectorCertificateError\",\n \"ClientConnectorError\",\n \"ClientConnectorSSLError\",\n \"ClientError\",\n \"ClientHttpProxyError\",\n \"ClientOSError\",\n \"ClientPayloadError\",\n \"ClientProxyConnectionError\",\n \"ClientResponseError\",\n \"ClientSSLError\",\n \"ContentTypeError\",\n \"InvalidURL\",\n \"ServerConnectionError\",\n \"ServerDisconnectedError\",\n \"ServerFingerprintMismatch\",\n \"ServerTimeoutError\",\n \"TooManyRedirects\",\n \"WSServerHandshakeError\",\n # client_reqrep\n \"ClientRequest\",\n \"ClientResponse\",\n \"Fingerprint\",\n \"RequestInfo\",\n # connector\n \"BaseConnector\",\n \"TCPConnector\",\n \"UnixConnector\",\n \"NamedPipeConnector\",\n # client_ws\n \"ClientWebSocketResponse\",\n # client\n \"ClientSession\",\n \"ClientTimeout\",\n \"request\",\n)\n\n\ntry:\n from ssl import SSLContext\nexcept ImportError: # pragma: no cover\n SSLContext = object # type: ignore[misc,assignment]\n\n\[email protected](auto_attribs=True, frozen=True, slots=True)\nclass ClientTimeout:\n total: Optional[float] = None\n connect: Optional[float] = None\n sock_read: Optional[float] = None\n sock_connect: Optional[float] = None\n ceil_threshold: float = 5\n\n # pool_queue_timeout: Optional[float] = None\n # dns_resolution_timeout: Optional[float] = None\n # socket_connect_timeout: Optional[float] = None\n # connection_acquiring_timeout: Optional[float] = None\n # new_connection_timeout: Optional[float] = None\n # http_header_timeout: Optional[float] = None\n # response_body_timeout: Optional[float] = None\n\n # to create a timeout specific for a single request, either\n # - create a completely new one to overwrite the default\n # - or use http://www.attrs.org/en/stable/api.html#attr.evolve\n # to overwrite the defaults\n\n\n# 5 Minute default read timeout\nDEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)\n\n_RetType = TypeVar(\"_RetType\")\n\n\nclass ClientSession:\n \"\"\"First-class interface for making HTTP requests.\"\"\"\n\n ATTRS = frozenset(\n [\n \"_base_url\",\n \"_source_traceback\",\n \"_connector\",\n \"requote_redirect_url\",\n \"_loop\",\n \"_cookie_jar\",\n \"_connector_owner\",\n \"_default_auth\",\n \"_version\",\n \"_json_serialize\",\n \"_requote_redirect_url\",\n \"_timeout\",\n \"_raise_for_status\",\n \"_auto_decompress\",\n \"_trust_env\",\n \"_default_headers\",\n \"_skip_auto_headers\",\n \"_request_class\",\n \"_response_class\",\n \"_ws_response_class\",\n \"_trace_configs\",\n \"_read_bufsize\",\n ]\n )\n\n _source_traceback: Optional[traceback.StackSummary] = None\n _connector: Optional[BaseConnector] = None\n\n def __init__(\n self,\n base_url: Optional[StrOrURL] = None,\n *,\n connector: Optional[BaseConnector] = None,\n loop: Optional[asyncio.AbstractEventLoop] = None,\n cookies: Optional[LooseCookies] = None,\n headers: Optional[LooseHeaders] = None,\n skip_auto_headers: Optional[Iterable[str]] = None,\n auth: Optional[BasicAuth] = None,\n json_serialize: JSONEncoder = json.dumps,\n request_class: Type[ClientRequest] = ClientRequest,\n response_class: Type[ClientResponse] = ClientResponse,\n ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,\n version: HttpVersion = http.HttpVersion11,\n cookie_jar: Optional[AbstractCookieJar] = None,\n connector_owner: bool = True,\n raise_for_status: Union[\n bool, Callable[[ClientResponse], Awaitable[None]]\n ] = False,\n read_timeout: Union[float, object] = sentinel,\n conn_timeout: Optional[float] = None,\n timeout: Union[object, ClientTimeout] = sentinel,\n auto_decompress: bool = True,\n trust_env: bool = False,\n requote_redirect_url: bool = True,\n trace_configs: Optional[List[TraceConfig]] = None,\n read_bufsize: int = 2**16,\n ) -> None:\n if loop is None:\n if connector is not None:\n loop = connector._loop\n\n loop = get_running_loop(loop)\n\n if base_url is None or isinstance(base_url, URL):\n self._base_url: Optional[URL] = base_url\n else:\n self._base_url = URL(base_url)\n assert (\n self._base_url.origin() == self._base_url\n ), \"Only absolute URLs without path part are supported\"\n\n if connector is None:\n connector = TCPConnector(loop=loop)\n\n if connector._loop is not loop:\n raise RuntimeError(\"Session and connector has to use same event loop\")\n\n self._loop = loop\n\n if loop.get_debug():\n self._source_traceback = traceback.extract_stack(sys._getframe(1))\n\n if cookie_jar is None:\n cookie_jar = CookieJar(loop=loop)\n self._cookie_jar = cookie_jar\n\n if cookies is not None:\n self._cookie_jar.update_cookies(cookies)\n\n self._connector = connector\n self._connector_owner = connector_owner\n self._default_auth = auth\n self._version = version\n self._json_serialize = json_serialize\n if timeout is sentinel:\n self._timeout = DEFAULT_TIMEOUT\n if read_timeout is not sentinel:\n warnings.warn(\n \"read_timeout is deprecated, \" \"use timeout argument instead\",\n DeprecationWarning,\n stacklevel=2,\n )\n self._timeout = attr.evolve(self._timeout, total=read_timeout)\n if conn_timeout is not None:\n self._timeout = attr.evolve(self._timeout, connect=conn_timeout)\n warnings.warn(\n \"conn_timeout is deprecated, \" \"use timeout argument instead\",\n DeprecationWarning,\n stacklevel=2,\n )\n else:\n self._timeout = timeout # type: ignore[assignment]\n if read_timeout is not sentinel:\n raise ValueError(\n \"read_timeout and timeout parameters \"\n \"conflict, please setup \"\n \"timeout.read\"\n )\n if conn_timeout is not None:\n raise ValueError(\n \"conn_timeout and timeout parameters \"\n \"conflict, please setup \"\n \"timeout.connect\"\n )\n self._raise_for_status = raise_for_status\n self._auto_decompress = auto_decompress\n self._trust_env = trust_env\n self._requote_redirect_url = requote_redirect_url\n self._read_bufsize = read_bufsize\n\n # Convert to list of tuples\n if headers:\n real_headers: CIMultiDict[str] = CIMultiDict(headers)\n else:\n real_headers = CIMultiDict()\n self._default_headers: CIMultiDict[str] = real_headers\n if skip_auto_headers is not None:\n self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)\n else:\n self._skip_auto_headers = frozenset()\n\n self._request_class = request_class\n self._response_class = response_class\n self._ws_response_class = ws_response_class\n\n self._trace_configs = trace_configs or []\n for trace_config in self._trace_configs:\n trace_config.freeze()\n\n def __init_subclass__(cls: Type[\"ClientSession\"]) -> None:\n warnings.warn(\n \"Inheritance class {} from ClientSession \"\n \"is discouraged\".format(cls.__name__),\n DeprecationWarning,\n stacklevel=2,\n )\n\n if DEBUG:\n\n def __setattr__(self, name: str, val: Any) -> None:\n if name not in self.ATTRS:\n warnings.warn(\n \"Setting custom ClientSession.{} attribute \"\n \"is discouraged\".format(name),\n DeprecationWarning,\n stacklevel=2,\n )\n super().__setattr__(name, val)\n\n def __del__(self, _warnings: Any = warnings) -> None:\n if not self.closed:\n if PY_36:\n kwargs = {\"source\": self}\n else:\n kwargs = {}\n _warnings.warn(\n f\"Unclosed client session {self!r}\", ResourceWarning, **kwargs\n )\n context = {\"client_session\": self, \"message\": \"Unclosed client session\"}\n if self._source_traceback is not None:\n context[\"source_traceback\"] = self._source_traceback\n self._loop.call_exception_handler(context)\n\n def request(\n self, method: str, url: StrOrURL, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP request.\"\"\"\n return _RequestContextManager(self._request(method, url, **kwargs))\n\n def _build_url(self, str_or_url: StrOrURL) -> URL:\n url = URL(str_or_url)\n if self._base_url is None:\n return url\n else:\n assert not url.is_absolute() and url.path.startswith(\"/\")\n return self._base_url.join(url)\n\n async def _request(\n self,\n method: str,\n str_or_url: StrOrURL,\n *,\n params: Optional[Mapping[str, str]] = None,\n data: Any = None,\n json: Any = None,\n cookies: Optional[LooseCookies] = None,\n headers: Optional[LooseHeaders] = None,\n skip_auto_headers: Optional[Iterable[str]] = None,\n auth: Optional[BasicAuth] = None,\n allow_redirects: bool = True,\n max_redirects: int = 10,\n compress: Optional[str] = None,\n chunked: Optional[bool] = None,\n expect100: bool = False,\n raise_for_status: Union[\n None, bool, Callable[[ClientResponse], Awaitable[None]]\n ] = None,\n read_until_eof: bool = True,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n timeout: Union[ClientTimeout, _SENTINEL] = sentinel,\n verify_ssl: Optional[bool] = None,\n fingerprint: Optional[bytes] = None,\n ssl_context: Optional[SSLContext] = None,\n ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,\n proxy_headers: Optional[LooseHeaders] = None,\n trace_request_ctx: Optional[SimpleNamespace] = None,\n read_bufsize: Optional[int] = None,\n ) -> ClientResponse:\n\n # NOTE: timeout clamps existing connect and read timeouts. We cannot\n # set the default to None because we need to detect if the user wants\n # to use the existing timeouts by setting timeout to None.\n\n if self.closed:\n raise RuntimeError(\"Session is closed\")\n\n ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)\n\n if data is not None and json is not None:\n raise ValueError(\n \"data and json parameters can not be used at the same time\"\n )\n elif json is not None:\n data = payload.JsonPayload(json, dumps=self._json_serialize)\n\n if not isinstance(chunked, bool) and chunked is not None:\n warnings.warn(\"Chunk size is deprecated #1615\", DeprecationWarning)\n\n redirects = 0\n history = []\n version = self._version\n\n # Merge with default headers and transform to CIMultiDict\n headers = self._prepare_headers(headers)\n proxy_headers = self._prepare_headers(proxy_headers)\n\n try:\n url = self._build_url(str_or_url)\n except ValueError as e:\n raise InvalidURL(str_or_url) from e\n\n skip_headers = set(self._skip_auto_headers)\n if skip_auto_headers is not None:\n for i in skip_auto_headers:\n skip_headers.add(istr(i))\n\n if proxy is not None:\n try:\n proxy = URL(proxy)\n except ValueError as e:\n raise InvalidURL(proxy) from e\n\n if timeout is sentinel:\n real_timeout: ClientTimeout = self._timeout\n else:\n if not isinstance(timeout, ClientTimeout):\n real_timeout = ClientTimeout(total=timeout)\n else:\n real_timeout = timeout\n # timeout is cumulative for all request operations\n # (request, redirects, responses, data consuming)\n tm = TimeoutHandle(\n self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold\n )\n handle = tm.start()\n\n if read_bufsize is None:\n read_bufsize = self._read_bufsize\n\n traces = [\n Trace(\n self,\n trace_config,\n trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),\n )\n for trace_config in self._trace_configs\n ]\n\n for trace in traces:\n await trace.send_request_start(method, url.update_query(params), headers)\n\n timer = tm.timer()\n try:\n with timer:\n while True:\n url, auth_from_url = strip_auth_from_url(url)\n if auth and auth_from_url:\n raise ValueError(\n \"Cannot combine AUTH argument with \"\n \"credentials encoded in URL\"\n )\n\n if auth is None:\n auth = auth_from_url\n if auth is None:\n auth = self._default_auth\n # It would be confusing if we support explicit\n # Authorization header with auth argument\n if (\n headers is not None\n and auth is not None\n and hdrs.AUTHORIZATION in headers\n ):\n raise ValueError(\n \"Cannot combine AUTHORIZATION header \"\n \"with AUTH argument or credentials \"\n \"encoded in URL\"\n )\n\n all_cookies = self._cookie_jar.filter_cookies(url)\n\n if cookies is not None:\n tmp_cookie_jar = CookieJar()\n tmp_cookie_jar.update_cookies(cookies)\n req_cookies = tmp_cookie_jar.filter_cookies(url)\n if req_cookies:\n all_cookies.load(req_cookies)\n\n if proxy is not None:\n proxy = URL(proxy)\n elif self._trust_env:\n with suppress(LookupError):\n proxy, proxy_auth = get_env_proxy_for_url(url)\n\n req = self._request_class(\n method,\n url,\n params=params,\n headers=headers,\n skip_auto_headers=skip_headers,\n data=data,\n cookies=all_cookies,\n auth=auth,\n version=version,\n compress=compress,\n chunked=chunked,\n expect100=expect100,\n loop=self._loop,\n response_class=self._response_class,\n proxy=proxy,\n proxy_auth=proxy_auth,\n timer=timer,\n session=self,\n ssl=ssl,\n proxy_headers=proxy_headers,\n traces=traces,\n )\n\n # connection timeout\n try:\n async with ceil_timeout(\n real_timeout.connect,\n ceil_threshold=real_timeout.ceil_threshold,\n ):\n assert self._connector is not None\n conn = await self._connector.connect(\n req, traces=traces, timeout=real_timeout\n )\n except asyncio.TimeoutError as exc:\n raise ServerTimeoutError(\n \"Connection timeout \" \"to host {}\".format(url)\n ) from exc\n\n assert conn.transport is not None\n\n assert conn.protocol is not None\n conn.protocol.set_response_params(\n timer=timer,\n skip_payload=method.upper() == \"HEAD\",\n read_until_eof=read_until_eof,\n auto_decompress=self._auto_decompress,\n read_timeout=real_timeout.sock_read,\n read_bufsize=read_bufsize,\n timeout_ceil_threshold=self._connector._timeout_ceil_threshold,\n )\n\n try:\n try:\n resp = await req.send(conn)\n try:\n await resp.start(conn)\n except BaseException:\n resp.close()\n raise\n except BaseException:\n conn.close()\n raise\n except ClientError:\n raise\n except OSError as exc:\n if exc.errno is None and isinstance(exc, asyncio.TimeoutError):\n raise\n raise ClientOSError(*exc.args) from exc\n\n self._cookie_jar.update_cookies(resp.cookies, resp.url)\n\n # redirects\n if resp.status in (301, 302, 303, 307, 308) and allow_redirects:\n\n for trace in traces:\n await trace.send_request_redirect(\n method, url.update_query(params), headers, resp\n )\n\n redirects += 1\n history.append(resp)\n if max_redirects and redirects >= max_redirects:\n resp.close()\n raise TooManyRedirects(\n history[0].request_info, tuple(history)\n )\n\n # For 301 and 302, mimic IE, now changed in RFC\n # https://github.com/kennethreitz/requests/pull/269\n if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (\n resp.status in (301, 302) and resp.method == hdrs.METH_POST\n ):\n method = hdrs.METH_GET\n data = None\n if headers.get(hdrs.CONTENT_LENGTH):\n headers.pop(hdrs.CONTENT_LENGTH)\n\n r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(\n hdrs.URI\n )\n if r_url is None:\n # see github.com/aio-libs/aiohttp/issues/2022\n break\n else:\n # reading from correct redirection\n # response is forbidden\n resp.release()\n\n try:\n parsed_url = URL(\n r_url, encoded=not self._requote_redirect_url\n )\n\n except ValueError as e:\n raise InvalidURL(r_url) from e\n\n scheme = parsed_url.scheme\n if scheme not in (\"http\", \"https\", \"\"):\n resp.close()\n raise ValueError(\"Can redirect only to http or https\")\n elif not scheme:\n parsed_url = url.join(parsed_url)\n\n if url.origin() != parsed_url.origin():\n auth = None\n headers.pop(hdrs.AUTHORIZATION, None)\n\n url = parsed_url\n params = None\n resp.release()\n continue\n\n break\n\n # check response status\n if raise_for_status is None:\n raise_for_status = self._raise_for_status\n\n if raise_for_status is None:\n pass\n elif callable(raise_for_status):\n await raise_for_status(resp)\n elif raise_for_status:\n resp.raise_for_status()\n\n # register connection\n if handle is not None:\n if resp.connection is not None:\n resp.connection.add_callback(handle.cancel)\n else:\n handle.cancel()\n\n resp._history = tuple(history)\n\n for trace in traces:\n await trace.send_request_end(\n method, url.update_query(params), headers, resp\n )\n return resp\n\n except BaseException as e:\n # cleanup timer\n tm.close()\n if handle:\n handle.cancel()\n handle = None\n\n for trace in traces:\n await trace.send_request_exception(\n method, url.update_query(params), headers, e\n )\n raise\n\n def ws_connect(\n self,\n url: StrOrURL,\n *,\n method: str = hdrs.METH_GET,\n protocols: Iterable[str] = (),\n timeout: float = 10.0,\n receive_timeout: Optional[float] = None,\n autoclose: bool = True,\n autoping: bool = True,\n heartbeat: Optional[float] = None,\n auth: Optional[BasicAuth] = None,\n origin: Optional[str] = None,\n params: Optional[Mapping[str, str]] = None,\n headers: Optional[LooseHeaders] = None,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n ssl: Union[SSLContext, bool, None, Fingerprint] = None,\n verify_ssl: Optional[bool] = None,\n fingerprint: Optional[bytes] = None,\n ssl_context: Optional[SSLContext] = None,\n proxy_headers: Optional[LooseHeaders] = None,\n compress: int = 0,\n max_msg_size: int = 4 * 1024 * 1024,\n ) -> \"_WSRequestContextManager\":\n \"\"\"Initiate websocket connection.\"\"\"\n return _WSRequestContextManager(\n self._ws_connect(\n url,\n method=method,\n protocols=protocols,\n timeout=timeout,\n receive_timeout=receive_timeout,\n autoclose=autoclose,\n autoping=autoping,\n heartbeat=heartbeat,\n auth=auth,\n origin=origin,\n params=params,\n headers=headers,\n proxy=proxy,\n proxy_auth=proxy_auth,\n ssl=ssl,\n verify_ssl=verify_ssl,\n fingerprint=fingerprint,\n ssl_context=ssl_context,\n proxy_headers=proxy_headers,\n compress=compress,\n max_msg_size=max_msg_size,\n )\n )\n\n async def _ws_connect(\n self,\n url: StrOrURL,\n *,\n method: str = hdrs.METH_GET,\n protocols: Iterable[str] = (),\n timeout: float = 10.0,\n receive_timeout: Optional[float] = None,\n autoclose: bool = True,\n autoping: bool = True,\n heartbeat: Optional[float] = None,\n auth: Optional[BasicAuth] = None,\n origin: Optional[str] = None,\n params: Optional[Mapping[str, str]] = None,\n headers: Optional[LooseHeaders] = None,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n ssl: Union[SSLContext, bool, None, Fingerprint] = None,\n verify_ssl: Optional[bool] = None,\n fingerprint: Optional[bytes] = None,\n ssl_context: Optional[SSLContext] = None,\n proxy_headers: Optional[LooseHeaders] = None,\n compress: int = 0,\n max_msg_size: int = 4 * 1024 * 1024,\n ) -> ClientWebSocketResponse:\n\n if headers is None:\n real_headers: CIMultiDict[str] = CIMultiDict()\n else:\n real_headers = CIMultiDict(headers)\n\n default_headers = {\n hdrs.UPGRADE: \"websocket\",\n hdrs.CONNECTION: \"upgrade\",\n hdrs.SEC_WEBSOCKET_VERSION: \"13\",\n }\n\n for key, value in default_headers.items():\n real_headers.setdefault(key, value)\n\n sec_key = base64.b64encode(os.urandom(16))\n real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()\n\n if protocols:\n real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = \",\".join(protocols)\n if origin is not None:\n real_headers[hdrs.ORIGIN] = origin\n if compress:\n extstr = ws_ext_gen(compress=compress)\n real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr\n\n ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)\n\n # send request\n resp = await self.request(\n method,\n url,\n params=params,\n headers=real_headers,\n read_until_eof=False,\n auth=auth,\n proxy=proxy,\n proxy_auth=proxy_auth,\n ssl=ssl,\n proxy_headers=proxy_headers,\n )\n\n try:\n # check handshake\n if resp.status != 101:\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid response status\",\n status=resp.status,\n headers=resp.headers,\n )\n\n if resp.headers.get(hdrs.UPGRADE, \"\").lower() != \"websocket\":\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid upgrade header\",\n status=resp.status,\n headers=resp.headers,\n )\n\n if resp.headers.get(hdrs.CONNECTION, \"\").lower() != \"upgrade\":\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid connection header\",\n status=resp.status,\n headers=resp.headers,\n )\n\n # key calculation\n r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, \"\")\n match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()\n if r_key != match:\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=\"Invalid challenge response\",\n status=resp.status,\n headers=resp.headers,\n )\n\n # websocket protocol\n protocol = None\n if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:\n resp_protocols = [\n proto.strip()\n for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(\",\")\n ]\n\n for proto in resp_protocols:\n if proto in protocols:\n protocol = proto\n break\n\n # websocket compress\n notakeover = False\n if compress:\n compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)\n if compress_hdrs:\n try:\n compress, notakeover = ws_ext_parse(compress_hdrs)\n except WSHandshakeError as exc:\n raise WSServerHandshakeError(\n resp.request_info,\n resp.history,\n message=exc.args[0],\n status=resp.status,\n headers=resp.headers,\n ) from exc\n else:\n compress = 0\n notakeover = False\n\n conn = resp.connection\n assert conn is not None\n conn_proto = conn.protocol\n assert conn_proto is not None\n transport = conn.transport\n assert transport is not None\n reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(\n conn_proto, 2**16, loop=self._loop\n )\n conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)\n writer = WebSocketWriter(\n conn_proto,\n transport,\n use_mask=True,\n compress=compress,\n notakeover=notakeover,\n )\n except BaseException:\n resp.close()\n raise\n else:\n return self._ws_response_class(\n reader,\n writer,\n protocol,\n resp,\n timeout,\n autoclose,\n autoping,\n self._loop,\n receive_timeout=receive_timeout,\n heartbeat=heartbeat,\n compress=compress,\n client_notakeover=notakeover,\n )\n\n def _prepare_headers(self, headers: Optional[LooseHeaders]) -> \"CIMultiDict[str]\":\n \"\"\"Add default headers and transform it to CIMultiDict\"\"\"\n # Convert headers to MultiDict\n result = CIMultiDict(self._default_headers)\n if headers:\n if not isinstance(headers, (MultiDictProxy, MultiDict)):\n headers = CIMultiDict(headers)\n added_names: Set[str] = set()\n for key, value in headers.items():\n if key in added_names:\n result.add(key, value)\n else:\n result[key] = value\n added_names.add(key)\n return result\n\n def get(\n self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP GET request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)\n )\n\n def options(\n self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP OPTIONS request.\"\"\"\n return _RequestContextManager(\n self._request(\n hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs\n )\n )\n\n def head(\n self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP HEAD request.\"\"\"\n return _RequestContextManager(\n self._request(\n hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs\n )\n )\n\n def post(\n self, url: StrOrURL, *, data: Any = None, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP POST request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_POST, url, data=data, **kwargs)\n )\n\n def put(\n self, url: StrOrURL, *, data: Any = None, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP PUT request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_PUT, url, data=data, **kwargs)\n )\n\n def patch(\n self, url: StrOrURL, *, data: Any = None, **kwargs: Any\n ) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP PATCH request.\"\"\"\n return _RequestContextManager(\n self._request(hdrs.METH_PATCH, url, data=data, **kwargs)\n )\n\n def delete(self, url: StrOrURL, **kwargs: Any) -> \"_RequestContextManager\":\n \"\"\"Perform HTTP DELETE request.\"\"\"\n return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))\n\n async def close(self) -> None:\n \"\"\"Close underlying connector.\n\n Release all acquired resources.\n \"\"\"\n if not self.closed:\n if self._connector is not None and self._connector_owner:\n await self._connector.close()\n self._connector = None\n\n @property\n def closed(self) -> bool:\n \"\"\"Is client session closed.\n\n A readonly property.\n \"\"\"\n return self._connector is None or self._connector.closed\n\n @property\n def connector(self) -> Optional[BaseConnector]:\n \"\"\"Connector instance used for the session.\"\"\"\n return self._connector\n\n @property\n def cookie_jar(self) -> AbstractCookieJar:\n \"\"\"The session cookies.\"\"\"\n return self._cookie_jar\n\n @property\n def version(self) -> Tuple[int, int]:\n \"\"\"The session HTTP protocol version.\"\"\"\n return self._version\n\n @property\n def requote_redirect_url(self) -> bool:\n \"\"\"Do URL requoting on redirection handling.\"\"\"\n return self._requote_redirect_url\n\n @requote_redirect_url.setter\n def requote_redirect_url(self, val: bool) -> None:\n \"\"\"Do URL requoting on redirection handling.\"\"\"\n warnings.warn(\n \"session.requote_redirect_url modification \" \"is deprecated #2778\",\n DeprecationWarning,\n stacklevel=2,\n )\n self._requote_redirect_url = val\n\n @property\n def loop(self) -> asyncio.AbstractEventLoop:\n \"\"\"Session's loop.\"\"\"\n warnings.warn(\n \"client.loop property is deprecated\", DeprecationWarning, stacklevel=2\n )\n return self._loop\n\n @property\n def timeout(self) -> ClientTimeout:\n \"\"\"Timeout for the session.\"\"\"\n return self._timeout\n\n @property\n def headers(self) -> \"CIMultiDict[str]\":\n \"\"\"The default headers of the client session.\"\"\"\n return self._default_headers\n\n @property\n def skip_auto_headers(self) -> FrozenSet[istr]:\n \"\"\"Headers for which autogeneration should be skipped\"\"\"\n return self._skip_auto_headers\n\n @property\n def auth(self) -> Optional[BasicAuth]:\n \"\"\"An object that represents HTTP Basic Authorization\"\"\"\n return self._default_auth\n\n @property\n def json_serialize(self) -> JSONEncoder:\n \"\"\"Json serializer callable\"\"\"\n return self._json_serialize\n\n @property\n def connector_owner(self) -> bool:\n \"\"\"Should connector be closed on session closing\"\"\"\n return self._connector_owner\n\n @property\n def raise_for_status(\n self,\n ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:\n \"\"\"Should `ClientResponse.raise_for_status()` be called for each response.\"\"\"\n return self._raise_for_status\n\n @property\n def auto_decompress(self) -> bool:\n \"\"\"Should the body response be automatically decompressed.\"\"\"\n return self._auto_decompress\n\n @property\n def trust_env(self) -> bool:\n \"\"\"\n Should proxies information from environment or netrc be trusted.\n\n Information is from HTTP_PROXY / HTTPS_PROXY environment variables\n or ~/.netrc file if present.\n \"\"\"\n return self._trust_env\n\n @property\n def trace_configs(self) -> List[TraceConfig]:\n \"\"\"A list of TraceConfig instances used for client tracing\"\"\"\n return self._trace_configs\n\n def detach(self) -> None:\n \"\"\"Detach connector from session without closing the former.\n\n Session is switched to closed state anyway.\n \"\"\"\n self._connector = None\n\n def __enter__(self) -> None:\n raise TypeError(\"Use async with instead\")\n\n def __exit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n # __exit__ should exist in pair with __enter__ but never executed\n pass # pragma: no cover\n\n async def __aenter__(self) -> \"ClientSession\":\n return self\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_val: Optional[BaseException],\n exc_tb: Optional[TracebackType],\n ) -> None:\n await self.close()\n\n\nclass _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):\n\n __slots__ = (\"_coro\", \"_resp\")\n\n def __init__(self, coro: Coroutine[\"asyncio.Future[Any]\", None, _RetType]) -> None:\n self._coro = coro\n\n def send(self, arg: None) -> \"asyncio.Future[Any]\":\n return self._coro.send(arg)\n\n def throw(self, arg: BaseException) -> None: # type: ignore[override]\n self._coro.throw(arg)\n\n def close(self) -> None:\n return self._coro.close()\n\n def __await__(self) -> Generator[Any, None, _RetType]:\n ret = self._coro.__await__()\n return ret\n\n def __iter__(self) -> Generator[Any, None, _RetType]:\n return self.__await__()\n\n async def __aenter__(self) -> _RetType:\n self._resp = await self._coro\n return self._resp\n\n\nclass _RequestContextManager(_BaseRequestContextManager[ClientResponse]):\n __slots__ = ()\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc: Optional[BaseException],\n tb: Optional[TracebackType],\n ) -> None:\n # We're basing behavior on the exception as it can be caused by\n # user code unrelated to the status of the connection. If you\n # would like to close a connection you must do that\n # explicitly. Otherwise connection error handling should kick in\n # and close/recycle the connection as required.\n self._resp.release()\n\n\nclass _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):\n __slots__ = ()\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc: Optional[BaseException],\n tb: Optional[TracebackType],\n ) -> None:\n await self._resp.close()\n\n\nclass _SessionRequestContextManager:\n\n __slots__ = (\"_coro\", \"_resp\", \"_session\")\n\n def __init__(\n self,\n coro: Coroutine[\"asyncio.Future[Any]\", None, ClientResponse],\n session: ClientSession,\n ) -> None:\n self._coro = coro\n self._resp: Optional[ClientResponse] = None\n self._session = session\n\n async def __aenter__(self) -> ClientResponse:\n try:\n self._resp = await self._coro\n except BaseException:\n await self._session.close()\n raise\n else:\n return self._resp\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc: Optional[BaseException],\n tb: Optional[TracebackType],\n ) -> None:\n assert self._resp is not None\n self._resp.close()\n await self._session.close()\n\n\ndef request(\n method: str,\n url: StrOrURL,\n *,\n params: Optional[Mapping[str, str]] = None,\n data: Any = None,\n json: Any = None,\n headers: Optional[LooseHeaders] = None,\n skip_auto_headers: Optional[Iterable[str]] = None,\n auth: Optional[BasicAuth] = None,\n allow_redirects: bool = True,\n max_redirects: int = 10,\n compress: Optional[str] = None,\n chunked: Optional[bool] = None,\n expect100: bool = False,\n raise_for_status: Optional[bool] = None,\n read_until_eof: bool = True,\n proxy: Optional[StrOrURL] = None,\n proxy_auth: Optional[BasicAuth] = None,\n timeout: Union[ClientTimeout, object] = sentinel,\n cookies: Optional[LooseCookies] = None,\n version: HttpVersion = http.HttpVersion11,\n connector: Optional[BaseConnector] = None,\n read_bufsize: Optional[int] = None,\n loop: Optional[asyncio.AbstractEventLoop] = None,\n) -> _SessionRequestContextManager:\n \"\"\"Constructs and sends a request.\n\n Returns response object.\n method - HTTP method\n url - request url\n params - (optional) Dictionary or bytes to be sent in the query\n string of the new request\n data - (optional) Dictionary, bytes, or file-like object to\n send in the body of the request\n json - (optional) Any json compatible python object\n headers - (optional) Dictionary of HTTP Headers to send with\n the request\n cookies - (optional) Dict object to send with the request\n auth - (optional) BasicAuth named tuple represent HTTP Basic Auth\n auth - aiohttp.helpers.BasicAuth\n allow_redirects - (optional) If set to False, do not follow\n redirects\n version - Request HTTP version.\n compress - Set to True if request has to be compressed\n with deflate encoding.\n chunked - Set to chunk size for chunked transfer encoding.\n expect100 - Expect 100-continue response from server.\n connector - BaseConnector sub-class instance to support\n connection pooling.\n read_until_eof - Read response until eof if response\n does not have Content-Length header.\n loop - Optional event loop.\n timeout - Optional ClientTimeout settings structure, 5min\n total timeout by default.\n Usage::\n >>> import aiohttp\n >>> resp = await aiohttp.request('GET', 'http://python.org/')\n >>> resp\n <ClientResponse(python.org/) [200]>\n >>> data = await resp.read()\n \"\"\"\n connector_owner = False\n if connector is None:\n connector_owner = True\n connector = TCPConnector(loop=loop, force_close=True)\n\n session = ClientSession(\n loop=loop,\n cookies=cookies,\n version=version,\n timeout=timeout,\n connector=connector,\n connector_owner=connector_owner,\n )\n\n return _SessionRequestContextManager(\n session._request(\n method,\n url,\n params=params,\n data=data,\n json=json,\n headers=headers,\n skip_auto_headers=skip_auto_headers,\n auth=auth,\n allow_redirects=allow_redirects,\n max_redirects=max_redirects,\n compress=compress,\n chunked=chunked,\n expect100=expect100,\n raise_for_status=raise_for_status,\n read_until_eof=read_until_eof,\n proxy=proxy,\n proxy_auth=proxy_auth,\n read_bufsize=read_bufsize,\n ),\n session,\n )\n", "path": "aiohttp/client.py" } ]
diff --git a/CHANGES/6917.bugfix b/CHANGES/6917.bugfix new file mode 100644 index 00000000000..468e21a2b0f --- /dev/null +++ b/CHANGES/6917.bugfix @@ -0,0 +1,3 @@ +Dropped the :class:`object` type possibility from +the :py:attr:`aiohttp.ClientSession.timeout` +property return type declaration. diff --git a/CHANGES/6917.doc b/CHANGES/6917.doc new file mode 120000 index 00000000000..b8eddb8d6dc --- /dev/null +++ b/CHANGES/6917.doc @@ -0,0 +1 @@ +6917.bugfix \ No newline at end of file diff --git a/CHANGES/6923.bugfix b/CHANGES/6923.bugfix new file mode 120000 index 00000000000..b8eddb8d6dc --- /dev/null +++ b/CHANGES/6923.bugfix @@ -0,0 +1 @@ +6917.bugfix \ No newline at end of file diff --git a/CHANGES/6923.doc b/CHANGES/6923.doc new file mode 120000 index 00000000000..c05397962f9 --- /dev/null +++ b/CHANGES/6923.doc @@ -0,0 +1 @@ +6917.doc \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index 3746d6d81e4..c555a64808b 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -1044,7 +1044,7 @@ def loop(self) -> asyncio.AbstractEventLoop: return self._loop @property - def timeout(self) -> Union[object, ClientTimeout]: + def timeout(self) -> ClientTimeout: """Timeout for the session.""" return self._timeout
mathesar-foundation__mathesar-341
Individually run API tests don't build tables database ## Description Running a individual test in `mathesar` that doesn't use the `engine` or `test_db` fixture will not have the tables databases built for the test. As a result, many will error when trying to access the tables database. ## Expected behavior The tables database should always be built. ## To Reproduce Run any test in `mathesar` that doesn't use `engine` or `test_db`. Ex: ``` docker exec mathesar_web_1 pytest mathesar/tests/views/api/test_schema_api.py::test_schema_update ``` ## Additional context Introduced due to the changes in #329, since `pytest-django` no longer creates the tables db for us.
[ { "content": "\"\"\"\nThis file should provide utilities for setting up test DBs and the like. It's\nintended to be the containment zone for anything specific about the testing\nenvironment (e.g., the login info for the Postgres instance for testing)\n\"\"\"\nimport pytest\nfrom sqlalchemy import create_engine, text\nfrom config.settings import DATABASES\n\nTEST_DB = \"mathesar_db_test\"\n\n\[email protected](scope=\"session\")\ndef test_db_name():\n return TEST_DB\n\n\[email protected](scope=\"session\")\ndef test_db():\n superuser_engine = _get_superuser_engine()\n with superuser_engine.connect() as conn:\n conn.execution_options(isolation_level=\"AUTOCOMMIT\")\n conn.execute(text(f\"DROP DATABASE IF EXISTS {TEST_DB} WITH (FORCE)\"))\n conn.execute(text(f\"CREATE DATABASE {TEST_DB}\"))\n yield TEST_DB\n with superuser_engine.connect() as conn:\n conn.execution_options(isolation_level=\"AUTOCOMMIT\")\n conn.execute(text(f\"DROP DATABASE {TEST_DB} WITH (FORCE)\"))\n\n\[email protected](scope=\"session\")\ndef engine(test_db):\n return create_engine(\n _get_connection_string(\n DATABASES[\"default\"][\"USER\"],\n DATABASES[\"default\"][\"PASSWORD\"],\n DATABASES[\"default\"][\"HOST\"],\n test_db,\n ),\n future=True,\n )\n\n\ndef _get_superuser_engine():\n return create_engine(\n _get_connection_string(\n username=DATABASES[\"default\"][\"USER\"],\n password=DATABASES[\"default\"][\"PASSWORD\"],\n hostname=DATABASES[\"default\"][\"HOST\"],\n database=DATABASES[\"default\"][\"NAME\"],\n ),\n future=True,\n )\n\n\ndef _get_connection_string(username, password, hostname, database):\n return f\"postgresql://{username}:{password}@{hostname}/{database}\"\n", "path": "conftest.py" } ]
[ { "content": "\"\"\"\nThis file should provide utilities for setting up test DBs and the like. It's\nintended to be the containment zone for anything specific about the testing\nenvironment (e.g., the login info for the Postgres instance for testing)\n\"\"\"\nimport pytest\nfrom sqlalchemy import create_engine, text\nfrom config.settings import DATABASES\n\nTEST_DB = \"mathesar_db_test\"\n\n\[email protected](scope=\"session\")\ndef test_db_name():\n return TEST_DB\n\n\[email protected](scope=\"session\", autouse=True)\ndef test_db():\n superuser_engine = _get_superuser_engine()\n with superuser_engine.connect() as conn:\n conn.execution_options(isolation_level=\"AUTOCOMMIT\")\n conn.execute(text(f\"DROP DATABASE IF EXISTS {TEST_DB} WITH (FORCE)\"))\n conn.execute(text(f\"CREATE DATABASE {TEST_DB}\"))\n yield TEST_DB\n with superuser_engine.connect() as conn:\n conn.execution_options(isolation_level=\"AUTOCOMMIT\")\n conn.execute(text(f\"DROP DATABASE {TEST_DB} WITH (FORCE)\"))\n\n\[email protected](scope=\"session\")\ndef engine(test_db):\n return create_engine(\n _get_connection_string(\n DATABASES[\"default\"][\"USER\"],\n DATABASES[\"default\"][\"PASSWORD\"],\n DATABASES[\"default\"][\"HOST\"],\n test_db,\n ),\n future=True,\n )\n\n\ndef _get_superuser_engine():\n return create_engine(\n _get_connection_string(\n username=DATABASES[\"default\"][\"USER\"],\n password=DATABASES[\"default\"][\"PASSWORD\"],\n hostname=DATABASES[\"default\"][\"HOST\"],\n database=DATABASES[\"default\"][\"NAME\"],\n ),\n future=True,\n )\n\n\ndef _get_connection_string(username, password, hostname, database):\n return f\"postgresql://{username}:{password}@{hostname}/{database}\"\n", "path": "conftest.py" } ]
diff --git a/conftest.py b/conftest.py index 577e099be2..79447b14a7 100644 --- a/conftest.py +++ b/conftest.py @@ -15,7 +15,7 @@ def test_db_name(): return TEST_DB [email protected](scope="session") [email protected](scope="session", autouse=True) def test_db(): superuser_engine = _get_superuser_engine() with superuser_engine.connect() as conn:
pyodide__pyodide-4090
New Pyodide fatal error in scipy tests: Error: EAGAIN: resource temporarily unavailable, write This started to happen two days ago in https://github.com/lesteve/scipy-tests-pyodide, here is [a build log](https://github.com/lesteve/scipy-tests-pyodide/actions/runs/5946896593/job/16128148017). The stack trace looks like this: ``` Error: EAGAIN: resource temporarily unavailable, write at Object.writeSync (node:fs:936:3) at ue.write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:6566:23) at Object.write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:6301:28) at Object.write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:12457:46) at doWritev (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:19506:23) at _fd_write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:19589:19) at write (wasm://wasm/025b4bda:wasm-function[9088]:0x45849f) at _Py_write (wasm://wasm/025b4bda:wasm-function[4144]:0x2d9eec) at _io_FileIO_write (wasm://wasm/025b4bda:wasm-function[6443]:0x39de9f) at _PyCFunctionWithKeywords_TrampolineCall (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:6855:33) { errno: -11, syscall: 'write', code: 'EAGAIN', pyodide_fatal_error: true } ``` For some reason, it seems to happen right at the end of `scipy.special.tests` when pytest is printing its summary. In my experience, the timing of stdout vs stderr can not be fully trusted so maybe it happens in a test towards the end of scipy.special.tests. I'll be able to look into it more next week. My wild guess is that this could be related to #4035? New Pyodide fatal error in scipy tests: Error: EAGAIN: resource temporarily unavailable, write This started to happen two days ago in https://github.com/lesteve/scipy-tests-pyodide, here is [a build log](https://github.com/lesteve/scipy-tests-pyodide/actions/runs/5946896593/job/16128148017). The stack trace looks like this: ``` Error: EAGAIN: resource temporarily unavailable, write at Object.writeSync (node:fs:936:3) at ue.write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:6566:23) at Object.write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:6301:28) at Object.write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:12457:46) at doWritev (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:19506:23) at _fd_write (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:19589:19) at write (wasm://wasm/025b4bda:wasm-function[9088]:0x45849f) at _Py_write (wasm://wasm/025b4bda:wasm-function[4144]:0x2d9eec) at _io_FileIO_write (wasm://wasm/025b4bda:wasm-function[6443]:0x39de9f) at _PyCFunctionWithKeywords_TrampolineCall (/home/runner/work/scipy-tests-pyodide/scipy-tests-pyodide/node_modules/pyodide/pyodide.asm.js:6855:33) { errno: -11, syscall: 'write', code: 'EAGAIN', pyodide_fatal_error: true } ``` For some reason, it seems to happen right at the end of `scipy.special.tests` when pytest is printing its summary. In my experience, the timing of stdout vs stderr can not be fully trusted so maybe it happens in a test towards the end of scipy.special.tests. I'll be able to look into it more next week. My wild guess is that this could be related to #4035?
[ { "content": "\"\"\"\nVarious common utilities for testing.\n\"\"\"\nimport os\nimport pathlib\nimport re\nimport sys\nfrom collections.abc import Sequence\n\nimport pytest\n\nROOT_PATH = pathlib.Path(__file__).parents[0].resolve()\nDIST_PATH = ROOT_PATH / \"dist\"\n\nsys.path.append(str(ROOT_PATH / \"pyodide-build\"))\nsys.path.append(str(ROOT_PATH / \"src\" / \"py\"))\n\nimport pytest_pyodide.runner\nfrom pytest_pyodide.utils import package_is_built as _package_is_built\n\nos.environ[\"IN_PYTEST\"] = \"1\"\n\n# There are a bunch of global objects that occasionally enter the hiwire cache\n# but never leave. The refcount checks get angry about them if they aren't preloaded.\n# We need to go through and touch them all once to keep everything okay.\npytest_pyodide.runner.INITIALIZE_SCRIPT = \"\"\"\n pyodide.globals.get;\n pyodide._api.pyodide_code.eval_code;\n pyodide._api.pyodide_code.eval_code_async;\n pyodide._api.pyodide_code.find_imports;\n pyodide._api.pyodide_ffi.register_js_module;\n pyodide._api.pyodide_ffi.unregister_js_module;\n pyodide._api.importlib.invalidate_caches;\n pyodide._api.package_loader.unpack_buffer;\n pyodide._api.package_loader.get_dynlibs;\n pyodide._api.package_loader.sub_resource_hash;\n pyodide.runPython(\"\");\n pyodide.pyimport(\"pyodide.ffi.wrappers\").destroy();\n pyodide.pyimport(\"pyodide.http\").destroy();\n pyodide.pyimport(\"pyodide_js._api\")\n\"\"\"\n\n\ndef pytest_addoption(parser):\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--run-xfail\",\n action=\"store_true\",\n help=\"If provided, tests marked as xfail will be run\",\n )\n group.addoption(\n \"--skip-passed\",\n action=\"store_true\",\n help=(\n \"If provided, tests that passed on the last run will be skipped. \"\n \"CAUTION: this will skip tests even if tests are modified\"\n ),\n )\n\n\ndef maybe_skip_test(item, delayed=False):\n \"\"\"If necessary skip test at the fixture level, to avoid\n loading the selenium_standalone fixture which takes a long time.\n \"\"\"\n browsers = \"|\".join([\"firefox\", \"chrome\", \"node\", \"safari\"])\n is_common_test = str(item.fspath).endswith(\"test_packages_common.py\")\n\n skip_msg = None\n # Testing a package. Skip the test if the package is not built.\n match = re.match(\n r\".*/packages/(?P<name>[\\w\\-]+)/test_[\\w\\-]+\\.py\", str(item.parent.fspath)\n )\n if match and not is_common_test:\n package_name = match.group(\"name\")\n if not package_is_built(package_name) and re.match(\n rf\"test_[\\w\\-\\.]+\\[({browsers})[^\\]]*\\]\", item.name\n ):\n skip_msg = f\"package '{package_name}' is not built.\"\n\n # Common package import test. Skip it if the package is not built.\n if skip_msg is None and is_common_test and item.name.startswith(\"test_import\"):\n if not pytest.pyodide_runtimes:\n skip_msg = \"Not running browser tests\"\n\n else:\n match = re.match(\n rf\"test_import\\[({browsers})-(?P<name>[\\w\\-\\.]+)\\]\", item.name\n )\n if match:\n package_name = match.group(\"name\")\n if not package_is_built(package_name):\n # selenium_standalone as it takes a long time to initialize\n skip_msg = f\"package '{package_name}' is not built.\"\n else:\n raise AssertionError(\n f\"Couldn't parse package name from {item.name}. This should not happen!\"\n ) # If the test is going to be skipped remove the\n\n # TODO: also use this hook to skip doctests we cannot run (or run them\n # inside the selenium wrapper)\n\n if skip_msg is not None:\n if delayed:\n item.add_marker(pytest.mark.skip(reason=skip_msg))\n else:\n pytest.skip(skip_msg)\n\n\ndef pytest_configure(config):\n \"\"\"Monkey patch the function cwd_relative_nodeid\n\n returns the description of a test for the short summary table. Monkey patch\n it to reduce the verbosity of the test names in the table. This leaves\n enough room to see the information about the test failure in the summary.\n \"\"\"\n global CONFIG\n\n old_cwd_relative_nodeid = config.cwd_relative_nodeid\n\n def cwd_relative_nodeid(*args):\n result = old_cwd_relative_nodeid(*args)\n result = result.replace(\"src/tests/\", \"\")\n result = result.replace(\"packages/\", \"\")\n result = result.replace(\"::test_\", \"::\")\n return result\n\n config.cwd_relative_nodeid = cwd_relative_nodeid\n\n pytest.pyodide_dist_dir = config.getoption(\"--dist-dir\")\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Called after collect is completed.\n Parameters\n ----------\n config : pytest config\n items : list of collected items\n \"\"\"\n prev_test_result = {}\n if config.getoption(\"--skip-passed\"):\n cache = config.cache\n prev_test_result = cache.get(\"cache/lasttestresult\", {})\n\n skipped_docstrings = [\n \"_pyodide._base.CodeRunner\",\n \"pyodide.http.open_url\",\n \"pyodide.http.pyfetch\",\n ]\n\n for item in items:\n if isinstance(item, pytest.DoctestItem) and item.name in skipped_docstrings:\n item.add_marker(pytest.mark.skip(reason=\"skipped docstring\"))\n continue\n if prev_test_result.get(item.nodeid) in (\"passed\", \"warnings\", \"skip_passed\"):\n item.add_marker(pytest.mark.skip(reason=\"previously passed\"))\n continue\n\n maybe_skip_test(item, delayed=True)\n\n\n# Save test results to a cache\n# Code adapted from: https://github.com/pytest-dev/pytest/blob/main/src/_pytest/pastebin.py\[email protected](trylast=True)\ndef pytest_terminal_summary(terminalreporter):\n tr = terminalreporter\n cache = tr.config.cache\n assert cache\n\n test_result = {}\n for status in tr.stats:\n if status in (\"warnings\", \"deselected\"):\n continue\n\n for test in tr.stats[status]:\n if test.when != \"call\": # discard results from setup/teardown\n continue\n\n try:\n if test.longrepr and test.longrepr[2] in \"previously passed\":\n test_result[test.nodeid] = \"skip_passed\"\n else:\n test_result[test.nodeid] = test.outcome\n except Exception:\n pass\n\n cache.set(\"cache/lasttestresult\", test_result)\n\n\[email protected](hookwrapper=True)\ndef pytest_runtest_call(item):\n \"\"\"We want to run extra verification at the start and end of each test to\n check that we haven't leaked memory. According to pytest issue #5044, it's\n not possible to \"Fail\" a test from a fixture (no matter what you do, pytest\n sets the test status to \"Error\"). The approach suggested there is hook\n pytest_runtest_call as we do here. To get access to the selenium fixture, we\n imitate the definition of pytest_pyfunc_call:\n https://github.com/pytest-dev/pytest/blob/6.2.2/src/_pytest/python.py#L177\n\n Pytest issue #5044:\n https://github.com/pytest-dev/pytest/issues/5044\n \"\"\"\n browser = None\n for fixture in item._fixtureinfo.argnames:\n if fixture.startswith(\"selenium\"):\n browser = item.funcargs[fixture]\n break\n\n if not browser or not browser.pyodide_loaded:\n yield\n return\n\n trace_pyproxies = pytest.mark.skip_pyproxy_check.mark not in item.own_markers\n trace_hiwire_refs = (\n trace_pyproxies and pytest.mark.skip_refcount_check.mark not in item.own_markers\n )\n yield from extra_checks_test_wrapper(browser, trace_hiwire_refs, trace_pyproxies)\n\n\ndef extra_checks_test_wrapper(browser, trace_hiwire_refs, trace_pyproxies):\n \"\"\"Extra conditions for test to pass:\n 1. No explicit request for test to fail\n 2. No leaked JsRefs\n 3. No leaked PyProxys\n \"\"\"\n browser.clear_force_test_fail()\n init_num_keys = browser.get_num_hiwire_keys()\n if trace_pyproxies:\n browser.enable_pyproxy_tracing()\n init_num_proxies = browser.get_num_proxies()\n a = yield\n try:\n # If these guys cause a crash because the test really screwed things up,\n # we override the error message with the better message returned by\n # a.result() in the finally block.\n browser.disable_pyproxy_tracing()\n browser.restore_state()\n finally:\n # if there was an error in the body of the test, flush it out by calling\n # get_result (we don't want to override the error message by raising a\n # different error here.)\n a.get_result()\n if browser.force_test_fail:\n raise Exception(\"Test failure explicitly requested but no error was raised.\")\n assert browser.run_js(\"return pyodide._module.hiwire.stack_length()\") == 0\n if trace_pyproxies and trace_hiwire_refs:\n delta_proxies = browser.get_num_proxies() - init_num_proxies\n delta_keys = browser.get_num_hiwire_keys() - init_num_keys\n assert (delta_proxies, delta_keys) == (0, 0) or delta_keys < 0\n if trace_hiwire_refs:\n delta_keys = browser.get_num_hiwire_keys() - init_num_keys\n assert delta_keys <= 0\n\n\ndef package_is_built(package_name):\n return _package_is_built(package_name, pytest.pyodide_dist_dir)\n\n\ndef strip_assertions_stderr(messages: Sequence[str]) -> list[str]:\n \"\"\"Strip additional messages on stderr included when ASSERTIONS=1\"\"\"\n res = []\n for msg in messages:\n if msg.strip() in [\n \"sigaction: signal type not supported: this is a no-op.\",\n \"Calling stub instead of siginterrupt()\",\n \"warning: no blob constructor, cannot create blobs with mimetypes\",\n \"warning: no BlobBuilder\",\n ]:\n continue\n res.append(msg)\n return res\n", "path": "conftest.py" } ]
[ { "content": "\"\"\"\nVarious common utilities for testing.\n\"\"\"\nimport os\nimport pathlib\nimport re\nimport sys\nfrom collections.abc import Sequence\n\nimport pytest\n\nROOT_PATH = pathlib.Path(__file__).parents[0].resolve()\nDIST_PATH = ROOT_PATH / \"dist\"\n\nsys.path.append(str(ROOT_PATH / \"pyodide-build\"))\nsys.path.append(str(ROOT_PATH / \"src\" / \"py\"))\n\nimport pytest_pyodide.runner\nfrom pytest_pyodide.utils import package_is_built as _package_is_built\n\nos.environ[\"IN_PYTEST\"] = \"1\"\n\n# There are a bunch of global objects that occasionally enter the hiwire cache\n# but never leave. The refcount checks get angry about them if they aren't preloaded.\n# We need to go through and touch them all once to keep everything okay.\npytest_pyodide.runner.INITIALIZE_SCRIPT = \"\"\"\n pyodide.globals.get;\n pyodide._api.pyodide_code.eval_code;\n pyodide._api.pyodide_code.eval_code_async;\n pyodide._api.pyodide_code.find_imports;\n pyodide._api.pyodide_ffi.register_js_module;\n pyodide._api.pyodide_ffi.unregister_js_module;\n pyodide._api.importlib.invalidate_caches;\n pyodide._api.package_loader.unpack_buffer;\n pyodide._api.package_loader.get_dynlibs;\n pyodide._api.package_loader.sub_resource_hash;\n pyodide.runPython(\"\");\n pyodide.pyimport(\"pyodide.ffi.wrappers\").destroy();\n pyodide.pyimport(\"pyodide.http\").destroy();\n pyodide.pyimport(\"pyodide_js._api\")\n\"\"\"\n\nonly_node = pytest.mark.xfail_browsers(\n chrome=\"node only\", firefox=\"node only\", safari=\"node only\"\n)\n\n\ndef pytest_addoption(parser):\n group = parser.getgroup(\"general\")\n group.addoption(\n \"--run-xfail\",\n action=\"store_true\",\n help=\"If provided, tests marked as xfail will be run\",\n )\n group.addoption(\n \"--skip-passed\",\n action=\"store_true\",\n help=(\n \"If provided, tests that passed on the last run will be skipped. \"\n \"CAUTION: this will skip tests even if tests are modified\"\n ),\n )\n\n\ndef maybe_skip_test(item, delayed=False):\n \"\"\"If necessary skip test at the fixture level, to avoid\n loading the selenium_standalone fixture which takes a long time.\n \"\"\"\n browsers = \"|\".join([\"firefox\", \"chrome\", \"node\", \"safari\"])\n is_common_test = str(item.fspath).endswith(\"test_packages_common.py\")\n\n skip_msg = None\n # Testing a package. Skip the test if the package is not built.\n match = re.match(\n r\".*/packages/(?P<name>[\\w\\-]+)/test_[\\w\\-]+\\.py\", str(item.parent.fspath)\n )\n if match and not is_common_test:\n package_name = match.group(\"name\")\n if not package_is_built(package_name) and re.match(\n rf\"test_[\\w\\-\\.]+\\[({browsers})[^\\]]*\\]\", item.name\n ):\n skip_msg = f\"package '{package_name}' is not built.\"\n\n # Common package import test. Skip it if the package is not built.\n if skip_msg is None and is_common_test and item.name.startswith(\"test_import\"):\n if not pytest.pyodide_runtimes:\n skip_msg = \"Not running browser tests\"\n\n else:\n match = re.match(\n rf\"test_import\\[({browsers})-(?P<name>[\\w\\-\\.]+)\\]\", item.name\n )\n if match:\n package_name = match.group(\"name\")\n if not package_is_built(package_name):\n # selenium_standalone as it takes a long time to initialize\n skip_msg = f\"package '{package_name}' is not built.\"\n else:\n raise AssertionError(\n f\"Couldn't parse package name from {item.name}. This should not happen!\"\n ) # If the test is going to be skipped remove the\n\n # TODO: also use this hook to skip doctests we cannot run (or run them\n # inside the selenium wrapper)\n\n if skip_msg is not None:\n if delayed:\n item.add_marker(pytest.mark.skip(reason=skip_msg))\n else:\n pytest.skip(skip_msg)\n\n\ndef pytest_configure(config):\n \"\"\"Monkey patch the function cwd_relative_nodeid\n\n returns the description of a test for the short summary table. Monkey patch\n it to reduce the verbosity of the test names in the table. This leaves\n enough room to see the information about the test failure in the summary.\n \"\"\"\n global CONFIG\n\n old_cwd_relative_nodeid = config.cwd_relative_nodeid\n\n def cwd_relative_nodeid(*args):\n result = old_cwd_relative_nodeid(*args)\n result = result.replace(\"src/tests/\", \"\")\n result = result.replace(\"packages/\", \"\")\n result = result.replace(\"::test_\", \"::\")\n return result\n\n config.cwd_relative_nodeid = cwd_relative_nodeid\n\n pytest.pyodide_dist_dir = config.getoption(\"--dist-dir\")\n\n\ndef pytest_collection_modifyitems(config, items):\n \"\"\"Called after collect is completed.\n Parameters\n ----------\n config : pytest config\n items : list of collected items\n \"\"\"\n prev_test_result = {}\n if config.getoption(\"--skip-passed\"):\n cache = config.cache\n prev_test_result = cache.get(\"cache/lasttestresult\", {})\n\n skipped_docstrings = [\n \"_pyodide._base.CodeRunner\",\n \"pyodide.http.open_url\",\n \"pyodide.http.pyfetch\",\n ]\n\n for item in items:\n if isinstance(item, pytest.DoctestItem) and item.name in skipped_docstrings:\n item.add_marker(pytest.mark.skip(reason=\"skipped docstring\"))\n continue\n if prev_test_result.get(item.nodeid) in (\"passed\", \"warnings\", \"skip_passed\"):\n item.add_marker(pytest.mark.skip(reason=\"previously passed\"))\n continue\n\n maybe_skip_test(item, delayed=True)\n\n\n# Save test results to a cache\n# Code adapted from: https://github.com/pytest-dev/pytest/blob/main/src/_pytest/pastebin.py\[email protected](trylast=True)\ndef pytest_terminal_summary(terminalreporter):\n tr = terminalreporter\n cache = tr.config.cache\n assert cache\n\n test_result = {}\n for status in tr.stats:\n if status in (\"warnings\", \"deselected\"):\n continue\n\n for test in tr.stats[status]:\n if test.when != \"call\": # discard results from setup/teardown\n continue\n\n try:\n if test.longrepr and test.longrepr[2] in \"previously passed\":\n test_result[test.nodeid] = \"skip_passed\"\n else:\n test_result[test.nodeid] = test.outcome\n except Exception:\n pass\n\n cache.set(\"cache/lasttestresult\", test_result)\n\n\[email protected](hookwrapper=True)\ndef pytest_runtest_call(item):\n \"\"\"We want to run extra verification at the start and end of each test to\n check that we haven't leaked memory. According to pytest issue #5044, it's\n not possible to \"Fail\" a test from a fixture (no matter what you do, pytest\n sets the test status to \"Error\"). The approach suggested there is hook\n pytest_runtest_call as we do here. To get access to the selenium fixture, we\n imitate the definition of pytest_pyfunc_call:\n https://github.com/pytest-dev/pytest/blob/6.2.2/src/_pytest/python.py#L177\n\n Pytest issue #5044:\n https://github.com/pytest-dev/pytest/issues/5044\n \"\"\"\n browser = None\n for fixture in item._fixtureinfo.argnames:\n if fixture.startswith(\"selenium\"):\n browser = item.funcargs[fixture]\n break\n\n if not browser or not browser.pyodide_loaded:\n yield\n return\n\n trace_pyproxies = pytest.mark.skip_pyproxy_check.mark not in item.own_markers\n trace_hiwire_refs = (\n trace_pyproxies and pytest.mark.skip_refcount_check.mark not in item.own_markers\n )\n yield from extra_checks_test_wrapper(browser, trace_hiwire_refs, trace_pyproxies)\n\n\ndef extra_checks_test_wrapper(browser, trace_hiwire_refs, trace_pyproxies):\n \"\"\"Extra conditions for test to pass:\n 1. No explicit request for test to fail\n 2. No leaked JsRefs\n 3. No leaked PyProxys\n \"\"\"\n browser.clear_force_test_fail()\n init_num_keys = browser.get_num_hiwire_keys()\n if trace_pyproxies:\n browser.enable_pyproxy_tracing()\n init_num_proxies = browser.get_num_proxies()\n a = yield\n try:\n # If these guys cause a crash because the test really screwed things up,\n # we override the error message with the better message returned by\n # a.result() in the finally block.\n browser.disable_pyproxy_tracing()\n browser.restore_state()\n finally:\n # if there was an error in the body of the test, flush it out by calling\n # get_result (we don't want to override the error message by raising a\n # different error here.)\n a.get_result()\n if browser.force_test_fail:\n raise Exception(\"Test failure explicitly requested but no error was raised.\")\n assert browser.run_js(\"return pyodide._module.hiwire.stack_length()\") == 0\n if trace_pyproxies and trace_hiwire_refs:\n delta_proxies = browser.get_num_proxies() - init_num_proxies\n delta_keys = browser.get_num_hiwire_keys() - init_num_keys\n assert (delta_proxies, delta_keys) == (0, 0) or delta_keys < 0\n if trace_hiwire_refs:\n delta_keys = browser.get_num_hiwire_keys() - init_num_keys\n assert delta_keys <= 0\n\n\ndef package_is_built(package_name):\n return _package_is_built(package_name, pytest.pyodide_dist_dir)\n\n\ndef strip_assertions_stderr(messages: Sequence[str]) -> list[str]:\n \"\"\"Strip additional messages on stderr included when ASSERTIONS=1\"\"\"\n res = []\n for msg in messages:\n if msg.strip() in [\n \"sigaction: signal type not supported: this is a no-op.\",\n \"Calling stub instead of siginterrupt()\",\n \"warning: no blob constructor, cannot create blobs with mimetypes\",\n \"warning: no BlobBuilder\",\n ]:\n continue\n res.append(msg)\n return res\n", "path": "conftest.py" } ]
diff --git a/conftest.py b/conftest.py index c5f0a1df72a..fd8ad7a78b5 100644 --- a/conftest.py +++ b/conftest.py @@ -40,6 +40,10 @@ pyodide.pyimport("pyodide_js._api") """ +only_node = pytest.mark.xfail_browsers( + chrome="node only", firefox="node only", safari="node only" +) + def pytest_addoption(parser): group = parser.getgroup("general") diff --git a/src/js/streams.ts b/src/js/streams.ts index 5bf9613da30..007dde028cd 100644 --- a/src/js/streams.ts +++ b/src/js/streams.ts @@ -90,14 +90,37 @@ function syncSleep(timeout: number): boolean { } } -function readHelper(devops: Reader, buffer: Uint8Array): number { +/** + * Calls the callback and handle node EAGAIN errors. + * + * In the long run, it may be helpful to allow C code to handle these errors on + * their own, at least if the Emscripten file descriptor has O_NONBLOCK on it. + * That way the code could do other periodic tasks in the delay loop. + * + * This code is outside of the stream handler itself so if the user wants to + * inject some code in this loop they could do it with: + * ```js + * read(buffer) { + * try { + * return doTheRead(); + * } catch(e) { + * if (e && e.code === "EAGAIN") { + * // do periodic tasks + * } + * // in every case rethrow the error + * throw e; + * } + * } + * ``` + */ +function handleEAGAIN(cb: () => number): number { while (true) { try { - return devops.read(buffer); + return cb(); } catch (e: any) { if (e && e.code === "EAGAIN") { - // Presumably this means we're in node and tried to read from an - // O_NONBLOCK file descriptor. Synchronously sleep for 100ms as + // Presumably this means we're in node and tried to read from/write to + // an O_NONBLOCK file descriptor. Synchronously sleep for 100ms as // requested by EAGAIN and try again. In case for some reason we fail to // sleep, propagate the error (it will turn into an EOFError). if (syncSleep(100)) { @@ -109,6 +132,44 @@ function readHelper(devops: Reader, buffer: Uint8Array): number { } } +function readWriteHelper(stream: Stream, cb: () => number, method: string) { + let nbytes; + try { + nbytes = handleEAGAIN(cb); + } catch (e: any) { + if (e && e.code && Module.ERRNO_CODES[e.code]) { + throw new FS.ErrnoError(Module.ERRNO_CODES[e.code]); + } + if (isErrnoError(e)) { + // the handler set an errno, propagate it + throw e; + } + console.error("Error thrown in read:"); + console.error(e); + throw new FS.ErrnoError(cDefs.EIO); + } + if (nbytes === undefined) { + // Prevent an infinite loop caused by incorrect code that doesn't return a + // value + // Maybe we should set nbytes = buffer.length here instead? + console.warn( + `${method} returned undefined; a correct implementation must return a number`, + ); + throw new FS.ErrnoError(cDefs.EIO); + } + if (nbytes !== 0) { + stream.node.timestamp = Date.now(); + } + return nbytes; +} + +const prepareBuffer = ( + buffer: Uint8Array, + offset: number, + length: number, +): Uint8Array => + API.typedArrayAsUint8Array(buffer).subarray(offset, offset + length); + const stream_ops: StreamOps = { open: function (stream) { const devops = DEVOPS[stream.node.rdev]; @@ -130,69 +191,12 @@ const stream_ops: StreamOps = { } }, read: function (stream, buffer, offset, length, pos /* ignored */) { - buffer = API.typedArrayAsUint8Array(buffer).subarray( - offset, - offset + length, - ); - let bytesRead; - try { - bytesRead = readHelper(stream.devops, buffer); - } catch (e: any) { - if (e && e.code && Module.ERRNO_CODES[e.code]) { - throw new FS.ErrnoError(Module.ERRNO_CODES[e.code]); - } - if (isErrnoError(e)) { - // the handler set an errno, propagate it - throw e; - } - console.error("Error thrown in read:"); - console.error(e); - throw new FS.ErrnoError(cDefs.EIO); - } - if (bytesRead === undefined) { - // Prevent an infinite loop caused by incorrect code that doesn't return a - // value - // - // Maybe we should set bytesWritten = buffer.length here instead? - console.warn( - "read returned undefined; a correct implementation must return a number", - ); - throw new FS.ErrnoError(cDefs.EIO); - } - if (bytesRead !== 0) { - stream.node.timestamp = Date.now(); - } - return bytesRead; + buffer = prepareBuffer(buffer, offset, length); + return readWriteHelper(stream, () => stream.devops.read(buffer), "read"); }, write: function (stream, buffer, offset, length, pos /* ignored */): number { - buffer = API.typedArrayAsUint8Array(buffer); - let bytesWritten; - try { - bytesWritten = stream.devops.write( - buffer.subarray(offset, offset + length), - ); - } catch (e) { - if (isErrnoError(e)) { - throw e; - } - console.error("Error thrown in write:"); - console.error(e); - throw new FS.ErrnoError(cDefs.EIO); - } - if (bytesWritten === undefined) { - // Prevent an infinite loop caused by incorrect code that doesn't return a - // value - // - // Maybe we should set bytesWritten = buffer.length here instead? - console.warn( - "write returned undefined; a correct implementation must return a number", - ); - throw new FS.ErrnoError(cDefs.EIO); - } - if (length) { - stream.node.timestamp = Date.now(); - } - return bytesWritten; + buffer = prepareBuffer(buffer, offset, length); + return readWriteHelper(stream, () => stream.devops.write(buffer), "write"); }, }; @@ -560,20 +564,7 @@ class LegacyReader { if (this.saved) { return this.saved; } - let val; - try { - val = this.infunc(); - } catch (e) { - if (isErrnoError(e)) { - // Allow infunc to set other errno - throw e; - } - // Since we're throwing a new error without the traceback, let people know - // what the original cause was. - console.error("Error thrown in stdin:"); - console.error(e); - throw new FS.ErrnoError(cDefs.EIO); - } + let val = this.infunc(); if (typeof val === "number") { return val; } diff --git a/src/tests/test_streams.py b/src/tests/test_streams.py index 6638819d882..0f8b59c856a 100644 --- a/src/tests/test_streams.py +++ b/src/tests/test_streams.py @@ -1,7 +1,7 @@ import pytest from pytest_pyodide import run_in_pyodide -from conftest import strip_assertions_stderr +from conftest import only_node, strip_assertions_stderr @pytest.mark.skip_refcount_check @@ -576,3 +576,49 @@ def test_custom_stdout_interrupts(selenium, method): pyodide.setStdout(); """ ) + + +@only_node +@run_in_pyodide +def test_node_eagain(selenium): + from pyodide.code import run_js + + result = run_js( + """ + pyodide.setStdin({ + i: 0, + stdin() { + this.i ++; + if (this.i < 3) { + throw {code: "EAGAIN"}; + } + this.i = 0; + return "abcdefg"; + } + }); + let result = []; + pyodide.setStdout({ + i: 0, + write(a) { + this.i ++; + if (this.i < 3) { + throw {code: "EAGAIN"}; + } + this.i = 0; + result.push(new TextDecoder().decode(a)); + return a.length; + } + }); + result + """ + ) + try: + assert input() == "abcdefg" + print("hi there!") + assert result[0] == "hi there!\n" + finally: + run_js( + """ + pyodide.setStdin(); + """ + )
falconry__falcon-1985
StaticRouteAsync leaves open files When using static routes with a [`falcon.asgi.App`](https://falcon.readthedocs.io/en/stable/api/app.html#asgi-app), it seems that the `_AsyncFileReader` wrapper does not implement any `.close()` method, so files are left open. On CPython, I wasn't able to demonstrate any practical impact of this bug as the file object in question is refcounted to 0 and garbage collected as soon as it goes out of scope. However, that isn't the case when running `uvicorn` on PyPy 3.7, as PyPy uses a different GC implementation. Test case in point: ``` import io import logging import os.path import unittest.mock import falcon.asgi logging.basicConfig( format='%(asctime)s [%(levelname)s] %(message)s', level=logging.INFO) class DebugIO(io.BytesIO): @classmethod def open(cls, *args, **kwargs): return cls(b'Test data!\n') def close(self): logging.info(f'{self}.close()') super().close() app = falcon.asgi.App() app.add_static_route('/files', '/tmp') debug = unittest.mock.patch('io.open', DebugIO.open) debug.start() ``` StaticRouteAsync leaves open files When using static routes with a [`falcon.asgi.App`](https://falcon.readthedocs.io/en/stable/api/app.html#asgi-app), it seems that the `_AsyncFileReader` wrapper does not implement any `.close()` method, so files are left open. On CPython, I wasn't able to demonstrate any practical impact of this bug as the file object in question is refcounted to 0 and garbage collected as soon as it goes out of scope. However, that isn't the case when running `uvicorn` on PyPy 3.7, as PyPy uses a different GC implementation. Test case in point: ``` import io import logging import os.path import unittest.mock import falcon.asgi logging.basicConfig( format='%(asctime)s [%(levelname)s] %(message)s', level=logging.INFO) class DebugIO(io.BytesIO): @classmethod def open(cls, *args, **kwargs): return cls(b'Test data!\n') def close(self): logging.info(f'{self}.close()') super().close() app = falcon.asgi.App() app.add_static_route('/files', '/tmp') debug = unittest.mock.patch('io.open', DebugIO.open) debug.start() ```
[ { "content": "from functools import partial\nimport io\nimport os\nimport pathlib\nimport re\n\nimport falcon\nfrom falcon.util.sync import get_running_loop\n\n\ndef _open_range(file_path, req_range):\n \"\"\"Open a file for a ranged request.\n\n Args:\n file_path (str): Path to the file to open.\n req_range (Optional[Tuple[int, int]]): Request.range value.\n Returns:\n tuple: Three-member tuple of (stream, content-length, content-range).\n If req_range is ``None`` or ignored, content-range will be\n ``None``; otherwise, the stream will be appropriately seeked and\n possibly bounded, and the content-range will be a tuple of\n (start, end, size).\n \"\"\"\n fh = io.open(file_path, 'rb')\n size = os.fstat(fh.fileno()).st_size\n if req_range is None:\n return fh, size, None\n\n start, end = req_range\n if size == 0:\n # NOTE(tipabu): Ignore Range headers for zero-byte files; just serve\n # the empty body since Content-Range can't be used to express a\n # zero-byte body.\n return fh, 0, None\n\n if start < 0 and end == -1:\n # NOTE(tipabu): Special case: only want the last N bytes.\n start = max(start, -size)\n fh.seek(start, os.SEEK_END)\n # NOTE(vytas): Wrap in order to prevent sendfile from being used, as\n # its implementation was found to be buggy in many popular WSGI\n # servers for open files with a non-zero offset.\n return _BoundedFile(fh, -start), -start, (size + start, size - 1, size)\n\n if start >= size:\n fh.close()\n raise falcon.HTTPRangeNotSatisfiable(size)\n\n fh.seek(start)\n if end == -1:\n # NOTE(vytas): Wrap in order to prevent sendfile from being used, as\n # its implementation was found to be buggy in many popular WSGI\n # servers for open files with a non-zero offset.\n length = size - start\n return _BoundedFile(fh, length), length, (start, size - 1, size)\n\n end = min(end, size - 1)\n length = end - start + 1\n return _BoundedFile(fh, length), length, (start, end, size)\n\n\nclass _BoundedFile:\n \"\"\"Wrap a file to only allow part of it to be read.\n\n Args:\n fh: The file object to wrap. Should be opened in binary mode,\n and already seeked to an appropriate position. The object must\n expose a ``.close()`` method.\n length (int): Number of bytes that may be read.\n \"\"\"\n\n def __init__(self, fh, length):\n self.fh = fh\n self.close = fh.close\n self.remaining = length\n\n def read(self, size=-1):\n \"\"\"Read the underlying file object, within the specified bounds.\"\"\"\n if size < 0:\n size = self.remaining\n else:\n size = min(size, self.remaining)\n data = self.fh.read(size)\n self.remaining -= len(data)\n return data\n\n\nclass StaticRoute:\n \"\"\"Represents a static route.\n\n Args:\n prefix (str): The path prefix to match for this route. If the\n path in the requested URI starts with this string, the remainder\n of the path will be appended to the source directory to\n determine the file to serve. This is done in a secure manner\n to prevent an attacker from requesting a file outside the\n specified directory.\n\n Note that static routes are matched in LIFO order, and are only\n attempted after checking dynamic routes and sinks.\n\n directory (Union[str, pathlib.Path]): The source directory from which to\n serve files. Must be an absolute path.\n downloadable (bool): Set to ``True`` to include a\n Content-Disposition header in the response. The \"filename\"\n directive is simply set to the name of the requested file.\n fallback_filename (str): Fallback filename used when the requested file\n is not found. Can be a relative path inside the prefix folder or\n any valid absolute path.\n\n Note:\n If the fallback file is served instead of the requested file,\n the response Content-Type header, as well as the\n Content-Disposition header (provided it was requested with the\n `downloadable` parameter described above), are derived from the\n fallback filename, as opposed to the requested filename.\n \"\"\"\n\n # NOTE(kgriffs): Don't allow control characters and reserved chars\n _DISALLOWED_CHARS_PATTERN = re.compile('[\\x00-\\x1f\\x80-\\x9f\\ufffd~?<>:*|\\'\"]')\n\n # NOTE(kgriffs): If somehow an executable code exploit is triggerable, this\n # minimizes how much can be included in the payload.\n _MAX_NON_PREFIXED_LEN = 512\n\n def __init__(self, prefix, directory, downloadable=False, fallback_filename=None):\n if not prefix.startswith('/'):\n raise ValueError(\"prefix must start with '/'\")\n\n # TODO(vgerak): Remove the check when py3.5 is dropped.\n if isinstance(directory, pathlib.Path):\n directory = str(directory)\n\n self._directory = os.path.normpath(directory)\n if not os.path.isabs(self._directory):\n raise ValueError('directory must be an absolute path')\n\n if fallback_filename is None:\n self._fallback_filename = None\n else:\n self._fallback_filename = os.path.normpath(\n os.path.join(self._directory, fallback_filename)\n )\n if not os.path.isfile(self._fallback_filename):\n raise ValueError('fallback_filename is not a file')\n\n # NOTE(kgriffs): Ensure it ends with a path separator to ensure\n # we only match on the complete segment. Don't raise an error\n # because most people won't expect to have to append a slash.\n if not prefix.endswith('/'):\n prefix += '/'\n\n self._prefix = prefix\n self._downloadable = downloadable\n\n def match(self, path):\n \"\"\"Check whether the given path matches this route.\"\"\"\n if self._fallback_filename is None:\n return path.startswith(self._prefix)\n return path.startswith(self._prefix) or path == self._prefix[:-1]\n\n def __call__(self, req, resp):\n \"\"\"Resource responder for this route.\"\"\"\n\n without_prefix = req.path[len(self._prefix) :]\n\n # NOTE(kgriffs): Check surrounding whitespace and strip trailing\n # periods, which are illegal on windows\n # NOTE(CaselIT): An empty filename is allowed when fallback_filename is provided\n if (\n not (without_prefix or self._fallback_filename is not None)\n or without_prefix.strip().rstrip('.') != without_prefix\n or self._DISALLOWED_CHARS_PATTERN.search(without_prefix)\n or '\\\\' in without_prefix\n or '//' in without_prefix\n or len(without_prefix) > self._MAX_NON_PREFIXED_LEN\n ):\n\n raise falcon.HTTPNotFound()\n\n normalized = os.path.normpath(without_prefix)\n\n if normalized.startswith('../') or normalized.startswith('/'):\n raise falcon.HTTPNotFound()\n\n file_path = os.path.join(self._directory, normalized)\n\n # NOTE(kgriffs): Final sanity-check just to be safe. This check\n # should never succeed, but this should guard against us having\n # overlooked something.\n if '..' in file_path or not file_path.startswith(self._directory):\n raise falcon.HTTPNotFound()\n\n req_range = req.range\n if req.range_unit != 'bytes':\n req_range = None\n try:\n stream, length, content_range = _open_range(file_path, req_range)\n resp.set_stream(stream, length)\n except IOError:\n if self._fallback_filename is None:\n raise falcon.HTTPNotFound()\n try:\n stream, length, content_range = _open_range(\n self._fallback_filename, req_range\n )\n resp.set_stream(stream, length)\n file_path = self._fallback_filename\n except IOError:\n raise falcon.HTTPNotFound()\n\n suffix = os.path.splitext(file_path)[1]\n resp.content_type = resp.options.static_media_types.get(\n suffix, 'application/octet-stream'\n )\n resp.accept_ranges = 'bytes'\n\n if self._downloadable:\n resp.downloadable_as = os.path.basename(file_path)\n if content_range:\n resp.status = falcon.HTTP_206\n resp.content_range = content_range\n\n\nclass StaticRouteAsync(StaticRoute):\n \"\"\"Subclass of StaticRoute with modifications to support ASGI apps.\"\"\"\n\n async def __call__(self, req, resp):\n super().__call__(req, resp)\n\n # NOTE(kgriffs): Fixup resp.stream so that it is non-blocking\n resp.stream = _AsyncFileReader(resp.stream)\n\n\nclass _AsyncFileReader:\n \"\"\"Adapts a standard file I/O object so that reads are non-blocking.\"\"\"\n\n def __init__(self, file):\n self._file = file\n self._loop = get_running_loop()\n\n async def read(self, size=-1):\n return await self._loop.run_in_executor(None, partial(self._file.read, size))\n", "path": "falcon/routing/static.py" } ]
[ { "content": "from functools import partial\nimport io\nimport os\nimport pathlib\nimport re\n\nimport falcon\nfrom falcon.util.sync import get_running_loop\n\n\ndef _open_range(file_path, req_range):\n \"\"\"Open a file for a ranged request.\n\n Args:\n file_path (str): Path to the file to open.\n req_range (Optional[Tuple[int, int]]): Request.range value.\n Returns:\n tuple: Three-member tuple of (stream, content-length, content-range).\n If req_range is ``None`` or ignored, content-range will be\n ``None``; otherwise, the stream will be appropriately seeked and\n possibly bounded, and the content-range will be a tuple of\n (start, end, size).\n \"\"\"\n fh = io.open(file_path, 'rb')\n size = os.fstat(fh.fileno()).st_size\n if req_range is None:\n return fh, size, None\n\n start, end = req_range\n if size == 0:\n # NOTE(tipabu): Ignore Range headers for zero-byte files; just serve\n # the empty body since Content-Range can't be used to express a\n # zero-byte body.\n return fh, 0, None\n\n if start < 0 and end == -1:\n # NOTE(tipabu): Special case: only want the last N bytes.\n start = max(start, -size)\n fh.seek(start, os.SEEK_END)\n # NOTE(vytas): Wrap in order to prevent sendfile from being used, as\n # its implementation was found to be buggy in many popular WSGI\n # servers for open files with a non-zero offset.\n return _BoundedFile(fh, -start), -start, (size + start, size - 1, size)\n\n if start >= size:\n fh.close()\n raise falcon.HTTPRangeNotSatisfiable(size)\n\n fh.seek(start)\n if end == -1:\n # NOTE(vytas): Wrap in order to prevent sendfile from being used, as\n # its implementation was found to be buggy in many popular WSGI\n # servers for open files with a non-zero offset.\n length = size - start\n return _BoundedFile(fh, length), length, (start, size - 1, size)\n\n end = min(end, size - 1)\n length = end - start + 1\n return _BoundedFile(fh, length), length, (start, end, size)\n\n\nclass _BoundedFile:\n \"\"\"Wrap a file to only allow part of it to be read.\n\n Args:\n fh: The file object to wrap. Should be opened in binary mode,\n and already seeked to an appropriate position. The object must\n expose a ``.close()`` method.\n length (int): Number of bytes that may be read.\n \"\"\"\n\n def __init__(self, fh, length):\n self.fh = fh\n self.close = fh.close\n self.remaining = length\n\n def read(self, size=-1):\n \"\"\"Read the underlying file object, within the specified bounds.\"\"\"\n if size < 0:\n size = self.remaining\n else:\n size = min(size, self.remaining)\n data = self.fh.read(size)\n self.remaining -= len(data)\n return data\n\n\nclass StaticRoute:\n \"\"\"Represents a static route.\n\n Args:\n prefix (str): The path prefix to match for this route. If the\n path in the requested URI starts with this string, the remainder\n of the path will be appended to the source directory to\n determine the file to serve. This is done in a secure manner\n to prevent an attacker from requesting a file outside the\n specified directory.\n\n Note that static routes are matched in LIFO order, and are only\n attempted after checking dynamic routes and sinks.\n\n directory (Union[str, pathlib.Path]): The source directory from which to\n serve files. Must be an absolute path.\n downloadable (bool): Set to ``True`` to include a\n Content-Disposition header in the response. The \"filename\"\n directive is simply set to the name of the requested file.\n fallback_filename (str): Fallback filename used when the requested file\n is not found. Can be a relative path inside the prefix folder or\n any valid absolute path.\n\n Note:\n If the fallback file is served instead of the requested file,\n the response Content-Type header, as well as the\n Content-Disposition header (provided it was requested with the\n `downloadable` parameter described above), are derived from the\n fallback filename, as opposed to the requested filename.\n \"\"\"\n\n # NOTE(kgriffs): Don't allow control characters and reserved chars\n _DISALLOWED_CHARS_PATTERN = re.compile('[\\x00-\\x1f\\x80-\\x9f\\ufffd~?<>:*|\\'\"]')\n\n # NOTE(kgriffs): If somehow an executable code exploit is triggerable, this\n # minimizes how much can be included in the payload.\n _MAX_NON_PREFIXED_LEN = 512\n\n def __init__(self, prefix, directory, downloadable=False, fallback_filename=None):\n if not prefix.startswith('/'):\n raise ValueError(\"prefix must start with '/'\")\n\n # TODO(vgerak): Remove the check when py3.5 is dropped.\n if isinstance(directory, pathlib.Path):\n directory = str(directory)\n\n self._directory = os.path.normpath(directory)\n if not os.path.isabs(self._directory):\n raise ValueError('directory must be an absolute path')\n\n if fallback_filename is None:\n self._fallback_filename = None\n else:\n self._fallback_filename = os.path.normpath(\n os.path.join(self._directory, fallback_filename)\n )\n if not os.path.isfile(self._fallback_filename):\n raise ValueError('fallback_filename is not a file')\n\n # NOTE(kgriffs): Ensure it ends with a path separator to ensure\n # we only match on the complete segment. Don't raise an error\n # because most people won't expect to have to append a slash.\n if not prefix.endswith('/'):\n prefix += '/'\n\n self._prefix = prefix\n self._downloadable = downloadable\n\n def match(self, path):\n \"\"\"Check whether the given path matches this route.\"\"\"\n if self._fallback_filename is None:\n return path.startswith(self._prefix)\n return path.startswith(self._prefix) or path == self._prefix[:-1]\n\n def __call__(self, req, resp):\n \"\"\"Resource responder for this route.\"\"\"\n\n without_prefix = req.path[len(self._prefix) :]\n\n # NOTE(kgriffs): Check surrounding whitespace and strip trailing\n # periods, which are illegal on windows\n # NOTE(CaselIT): An empty filename is allowed when fallback_filename is provided\n if (\n not (without_prefix or self._fallback_filename is not None)\n or without_prefix.strip().rstrip('.') != without_prefix\n or self._DISALLOWED_CHARS_PATTERN.search(without_prefix)\n or '\\\\' in without_prefix\n or '//' in without_prefix\n or len(without_prefix) > self._MAX_NON_PREFIXED_LEN\n ):\n\n raise falcon.HTTPNotFound()\n\n normalized = os.path.normpath(without_prefix)\n\n if normalized.startswith('../') or normalized.startswith('/'):\n raise falcon.HTTPNotFound()\n\n file_path = os.path.join(self._directory, normalized)\n\n # NOTE(kgriffs): Final sanity-check just to be safe. This check\n # should never succeed, but this should guard against us having\n # overlooked something.\n if '..' in file_path or not file_path.startswith(self._directory):\n raise falcon.HTTPNotFound()\n\n req_range = req.range\n if req.range_unit != 'bytes':\n req_range = None\n try:\n stream, length, content_range = _open_range(file_path, req_range)\n resp.set_stream(stream, length)\n except IOError:\n if self._fallback_filename is None:\n raise falcon.HTTPNotFound()\n try:\n stream, length, content_range = _open_range(\n self._fallback_filename, req_range\n )\n resp.set_stream(stream, length)\n file_path = self._fallback_filename\n except IOError:\n raise falcon.HTTPNotFound()\n\n suffix = os.path.splitext(file_path)[1]\n resp.content_type = resp.options.static_media_types.get(\n suffix, 'application/octet-stream'\n )\n resp.accept_ranges = 'bytes'\n\n if self._downloadable:\n resp.downloadable_as = os.path.basename(file_path)\n if content_range:\n resp.status = falcon.HTTP_206\n resp.content_range = content_range\n\n\nclass StaticRouteAsync(StaticRoute):\n \"\"\"Subclass of StaticRoute with modifications to support ASGI apps.\"\"\"\n\n async def __call__(self, req, resp):\n super().__call__(req, resp)\n\n # NOTE(kgriffs): Fixup resp.stream so that it is non-blocking\n resp.stream = _AsyncFileReader(resp.stream)\n\n\nclass _AsyncFileReader:\n \"\"\"Adapts a standard file I/O object so that reads are non-blocking.\"\"\"\n\n def __init__(self, file):\n self._file = file\n self._loop = get_running_loop()\n\n async def read(self, size=-1):\n return await self._loop.run_in_executor(None, partial(self._file.read, size))\n\n async def close(self):\n await self._loop.run_in_executor(None, self._file.close)\n", "path": "falcon/routing/static.py" } ]
diff --git a/docs/_newsfragments/1963.bugfix.rst b/docs/_newsfragments/1963.bugfix.rst new file mode 100644 index 000000000..b917bc17f --- /dev/null +++ b/docs/_newsfragments/1963.bugfix.rst @@ -0,0 +1,3 @@ +Previously, files could be left open when serving via an ASGI static route +(depending on the underlying GC implementation). This has been fixed so that a +file is closed explicitly after rendering the response. diff --git a/falcon/routing/static.py b/falcon/routing/static.py index 19d4ddc7a..2d23e0555 100644 --- a/falcon/routing/static.py +++ b/falcon/routing/static.py @@ -241,3 +241,6 @@ def __init__(self, file): async def read(self, size=-1): return await self._loop.run_in_executor(None, partial(self._file.read, size)) + + async def close(self): + await self._loop.run_in_executor(None, self._file.close) diff --git a/tests/test_static.py b/tests/test_static.py index 67467edb9..591276da4 100644 --- a/tests/test_static.py +++ b/tests/test_static.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import errno import io import os @@ -47,11 +45,14 @@ def __init__(self, size): fd = FakeFD(1337) fd._stat = FakeStat(len(data)) fake_file.fileno = lambda: fd + + patch.current_file = fake_file return fake_file monkeypatch.setattr(io, 'open', open) monkeypatch.setattr(os, 'fstat', lambda fileno: fileno._stat) + patch.current_file = None return patch @@ -575,3 +576,16 @@ def test_bounded_file_wrapper(): assert not buffer.closed fh.close() assert buffer.closed + + +def test_file_closed(client, patch_open): + patch_open(b'test_data') + + client.app.add_static_route('/static', '/var/www/statics') + + resp = client.simulate_request(path='/static/foo/bar.txt') + assert resp.status_code == 200 + assert resp.text == 'test_data' + + assert patch_open.current_file is not None + assert patch_open.current_file.closed
ansible__ansible-modules-core-4649
ios_facts: exception due to missing itertools <!--- Verify first that your issue/request is not already reported in GitHub --> ##### ISSUE TYPE - Bug Report ##### COMPONENT NAME ios_facts ##### ANSIBLE VERSION <!--- Paste verbatim output from “ansible --version” between quotes below --> ``` ansible 2.2.0 (devel 9963ae1d3e) last updated 2016/09/02 19:50:22 (GMT +1100) lib/ansible/modules/core: (detached HEAD 7e79c59d38) last updated 2016/09/02 19:50:32 (GMT +1100) lib/ansible/modules/extras: (detached HEAD e8a5442345) last updated 2016/09/02 19:50:32 (GMT +1100) config file = configured module search path = Default w/o overrides ``` ##### CONFIGURATION <!--- defaults --> ##### OS / ENVIRONMENT <!--- Mention the OS you are running Ansible from, and the OS you are managing, or say “N/A” for anything that is not platform-specific. --> CentOS 7 ##### SUMMARY <!--- Explain the problem briefly --> Running against Cisco ASR1000 resulted in exceptions being thrown. ##### STEPS TO REPRODUCE ansible -m ios_facts -a "host=asr01.lab username=pbaker password=xxxxxx gather_subset=interfaces" localhost -vvv ##### RESULTS AND SUGGESTED FIX Initial exception was not very helpful. ``` An exception occurred during task execution. The full traceback is: Traceback (most recent call last): File "/tmp/ansible_l3i6QO/ansible_module_ios_facts.py", line 455, in <module> main() File "/tmp/ansible_l3i6QO/ansible_module_ios_facts.py", line 444, in main module.exit_json(out=module.from_json(runner.items)) File "/tmp/ansible_l3i6QO/ansible_modlib.zip/ansible/module_utils/basic.py", line 1781, in from_json File "/usr/lib64/python2.7/json/__init__.py", line 338, in loads return _default_decoder.decode(s) File "/usr/lib64/python2.7/json/decoder.py", line 365, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) TypeError: expected string or buffer ``` Comparing ios_facts.py to other _facts.py modules revealed the following line was missing, adding it back in seemed to help. ``` @@ -440,6 +440,7 @@ def main(): inst.populate() facts.update(inst.facts) except Exception: + raise module.exit_json(out=module.from_json(runner.items)) ansible_facts = dict() ``` Which led to this traceback ``` An exception occurred during task execution. The full traceback is: Traceback (most recent call last): File "/tmp/ansible_HvEaaO/ansible_module_ios_facts.py", line 455, in <module> main() File "/tmp/ansible_HvEaaO/ansible_module_ios_facts.py", line 440, in main inst.populate() File "/tmp/ansible_HvEaaO/ansible_module_ios_facts.py", line 238, in populate self.populate_ipv6_interfaces(data) File "/tmp/ansible_HvEaaO/ansible_module_ios_facts.py", line 272, in populate_ipv6_interfaces for addr, subnet in itertools.izip(addresses, subnets): NameError: global name 'itertools' is not defined ``` So I made the following modification ``` @@ -128,7 +128,7 @@ import re from ansible.module_utils.basic import get_exception from ansible.module_utils.netcli import CommandRunner, AddCommandError from ansible.module_utils.ios import NetworkModule - +import itertools def add_command(runner, command): try: ``` Note: I'm very new to ansible, github and python, so sorry if I have gone against conventions in any way!
[ { "content": "#!/usr/bin/python\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n#\nDOCUMENTATION = \"\"\"\n---\nmodule: ios_facts\nversion_added: \"2.2\"\nauthor: \"Peter Sprygada (@privateip)\"\nshort_description: Collect facts from remote devices running IOS\ndescription:\n - Collects a base set of device facts from a remote device that\n is running IOS. This module prepends all of the\n base network fact keys with C(ansible_net_<fact>). The facts\n module will always collect a base set of facts from the device\n and can enable or disable collection of additional facts.\nextends_documentation_fragment: ios\noptions:\n gather_subset:\n description:\n - When supplied, this argument will restrict the facts collected\n to a given subset. Possible values for this argument include\n all, hardware, config, and interfaces. Can specify a list of\n values to include a larger subset. Values can also be used\n with an initial M(!) to specify that a specific subset should\n not be collected.\n required: false\n default: '!config'\n\"\"\"\n\nEXAMPLES = \"\"\"\n# Collect all facts from the device\n- ios_facts:\n gather_subset: all\n\n# Collect only the config and default facts\n- ios_facts:\n gather_subset:\n - config\n\n# Do not collect hardware facts\n- ios_facts:\n gather_subset:\n - \"!hardware\"\n\"\"\"\n\nRETURN = \"\"\"\nansible_net_gather_subset:\n description: The list of fact subsets collected from the device\n returned: always\n type: list\n\n# default\nansible_net_model:\n description: The model name returned from the device\n returned: always\n type: str\nansible_net_serialnum:\n description: The serial number of the remote device\n returned: always\n type: str\nansible_net_version:\n description: The operating system version running on the remote device\n returned: always\n type: str\nansible_net_hostname:\n description: The configured hostname of the device\n returned: always\n type: string\nansible_net_image:\n description: The image file the device is running\n returned: always\n type: string\n\n# hardware\nansible_net_filesystems:\n description: All file system names availabe on the device\n returned: when hardware is configured\n type: list\nansible_net_memfree_mb:\n description: The available free memory on the remote device in Mb\n returned: when hardware is configured\n type: int\nansible_net_memtotal_mb:\n description: The total memory on the remote device in Mb\n returned: when hardware is configured\n type: int\n\n# config\nansible_net_config:\n description: The current active config from the device\n returned: when config is configured\n type: str\n\n# interfaces\nansible_net_all_ipv4_addresses:\n description: All IPv4 addresses configured on the device\n returned: when interfaces is configured\n type: list\nansible_net_all_ipv6_addresses:\n description: All IPv6 addresses configured on the device\n returned: when interfaces is configured\n type: list\nansible_net_interfaces:\n description: A hash of all interfaces running on the system\n returned: when interfaces is configured\n type: dict\nansible_net_neighbors:\n description: The list of LLDP neighbors from the remote device\n returned: when interfaces is configured\n type: dict\n\"\"\"\nimport re\n\nfrom ansible.module_utils.basic import get_exception\nfrom ansible.module_utils.netcli import CommandRunner, AddCommandError\nfrom ansible.module_utils.ios import NetworkModule\n\n\ndef add_command(runner, command):\n try:\n runner.add_command(command)\n except AddCommandError:\n # AddCommandError is raised for any issue adding a command to\n # the runner. Silently ignore the exception in this case\n pass\n\nclass FactsBase(object):\n\n def __init__(self, runner):\n self.runner = runner\n self.facts = dict()\n\n self.commands()\n\nclass Default(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'show version')\n\n def populate(self):\n data = self.runner.get_command('show version')\n\n self.facts['version'] = self.parse_version(data)\n self.facts['serialnum'] = self.parse_serialnum(data)\n self.facts['model'] = self.parse_model(data)\n self.facts['image'] = self.parse_image(data)\n self.facts['hostname'] = self.parse_hostname(data)\n\n def parse_version(self, data):\n match = re.search(r'Version (\\S+),', data)\n if match:\n return match.group(1)\n\n def parse_hostname(self, data):\n match = re.search(r'^(.+) uptime', data, re.M)\n if match:\n return match.group(1)\n\n def parse_model(self, data):\n match = re.search(r'^Cisco (.+) \\(revision', data, re.M)\n if match:\n return match.group(1)\n\n def parse_image(self, data):\n match = re.search(r'image file is \"(.+)\"', data)\n if match:\n return match.group(1)\n\n def parse_serialnum(self, data):\n match = re.search(r'board ID (\\S+)', data)\n if match:\n return match.group(1)\n\n\nclass Hardware(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'dir all-filesystems | include Directory')\n add_command(self.runner, 'show version')\n add_command(self.runner, 'show memory statistics | include Processor')\n\n def populate(self):\n data = self.runner.get_command('dir all-filesystems | include Directory')\n self.facts['filesystems'] = self.parse_filesystems(data)\n\n data = self.runner.get_command('show memory statistics | include Processor')\n match = re.findall('\\s(\\d+)\\s', data)\n if match:\n self.facts['memtotal_mb'] = int(match[0]) / 1024\n self.facts['memfree_mb'] = int(match[1]) / 1024\n\n def parse_filesystems(self, data):\n return re.findall(r'^Directory of (\\S+)/', data, re.M)\n\n\nclass Config(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'show running-config')\n\n def populate(self):\n self.facts['config'] = self.runner.get_command('show running-config')\n\n\nclass Interfaces(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'show interfaces')\n add_command(self.runner, 'show ipv6 interface')\n add_command(self.runner, 'show lldp')\n add_command(self.runner, 'show lldp neighbors detail')\n\n def populate(self):\n self.facts['all_ipv4_addresses'] = list()\n self.facts['all_ipv6_addresses'] = list()\n\n data = self.runner.get_command('show interfaces')\n interfaces = self.parse_interfaces(data)\n self.facts['interfaces'] = self.populate_interfaces(interfaces)\n\n data = self.runner.get_command('show ipv6 interface')\n if len(data) > 0:\n data = self.parse_interfaces(data)\n self.populate_ipv6_interfaces(data)\n\n if 'LLDP is not enabled' not in self.runner.get_command('show lldp'):\n neighbors = self.runner.get_command('show lldp neighbors detail')\n self.facts['neighbors'] = self.parse_neighbors(neighbors)\n\n def populate_interfaces(self, interfaces):\n facts = dict()\n for key, value in interfaces.iteritems():\n intf = dict()\n intf['description'] = self.parse_description(value)\n intf['macaddress'] = self.parse_macaddress(value)\n\n ipv4 = self.parse_ipv4(value)\n intf['ipv4'] = self.parse_ipv4(value)\n if ipv4:\n self.add_ip_address(ipv4['address'], 'ipv4')\n\n intf['mtu'] = self.parse_mtu(value)\n intf['bandwidth'] = self.parse_bandwidth(value)\n intf['mediatype'] = self.parse_mediatype(value)\n intf['duplex'] = self.parse_duplex(value)\n intf['lineprotocol'] = self.parse_lineprotocol(value)\n intf['operstatus'] = self.parse_operstatus(value)\n intf['type'] = self.parse_type(value)\n\n facts[key] = intf\n return facts\n\n def populate_ipv6_interfaces(self, data):\n for key, value in data.iteritems():\n self.facts['interfaces'][key]['ipv6'] = list()\n addresses = re.findall(r'\\s+(.+), subnet', value, re.M)\n subnets = re.findall(r', subnet is (.+)$', value, re.M)\n for addr, subnet in itertools.izip(addresses, subnets):\n ipv6 = dict(address=addr.strip(), subnet=subnet.strip())\n self.add_ip_address(addr.strip(), 'ipv6')\n self.facts['interfaces'][key]['ipv6'].append(ipv6)\n\n def add_ip_address(self, address, family):\n if family == 'ipv4':\n self.facts['all_ipv4_addresses'].append(address)\n else:\n self.facts['all_ipv6_addresses'].append(address)\n\n def parse_neighbors(self, neighbors):\n facts = dict()\n for entry in neighbors.split('------------------------------------------------'):\n if entry == '':\n continue\n intf = self.parse_lldp_intf(entry)\n if intf not in facts:\n facts[intf] = list()\n fact = dict()\n fact['host'] = self.parse_lldp_host(entry)\n fact['port'] = self.parse_lldp_port(entry)\n facts[intf].append(fact)\n return facts\n\n def parse_interfaces(self, data):\n parsed = dict()\n for line in data.split('\\n'):\n if len(line) == 0:\n continue\n elif line[0] == ' ':\n parsed[key] += '\\n%s' % line\n else:\n match = re.match(r'^(\\S+)', line)\n if match:\n key = match.group(1)\n parsed[key] = line\n return parsed\n\n def parse_description(self, data):\n match = re.search(r'Description: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_macaddress(self, data):\n match = re.search(r'address is (\\S+)', data)\n if match:\n return match.group(1)\n\n def parse_ipv4(self, data):\n match = re.search(r'Internet address is (\\S+)', data)\n if match:\n addr, masklen = match.group(1).split('/')\n return dict(address=addr, masklen=int(masklen))\n\n def parse_mtu(self, data):\n match = re.search(r'MTU (\\d+)', data)\n if match:\n return int(match.group(1))\n\n def parse_bandwidth(self, data):\n match = re.search(r'BW (\\d+)', data)\n if match:\n return int(match.group(1))\n\n def parse_duplex(self, data):\n match = re.search(r'(\\w+) Duplex', data, re.M)\n if match:\n return match.group(1)\n\n def parse_mediatype(self, data):\n match = re.search(r'media type is (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_type(self, data):\n match = re.search(r'Hardware is (.+),', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lineprotocol(self, data):\n match = re.search(r'line protocol is (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_operstatus(self, data):\n match = re.search(r'^(?:.+) is (.+),', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lldp_intf(self, data):\n match = re.search(r'^Local Intf: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lldp_host(self, data):\n match = re.search(r'System Name: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lldp_port(self, data):\n match = re.search(r'Port id: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n\nFACT_SUBSETS = dict(\n default=Default,\n hardware=Hardware,\n interfaces=Interfaces,\n config=Config,\n)\n\nVALID_SUBSETS = frozenset(FACT_SUBSETS.keys())\n\ndef main():\n spec = dict(\n gather_subset=dict(default=['!config'], type='list')\n )\n\n module = NetworkModule(argument_spec=spec, supports_check_mode=True)\n\n gather_subset = module.params['gather_subset']\n\n runable_subsets = set()\n exclude_subsets = set()\n\n for subset in gather_subset:\n if subset == 'all':\n runable_subsets.update(VALID_SUBSETS)\n continue\n\n if subset.startswith('!'):\n subset = subset[1:]\n if subset == 'all':\n exclude_subsets.update(VALID_SUBSETS)\n continue\n exclude = True\n else:\n exclude = False\n\n if subset not in VALID_SUBSETS:\n module.fail_json(msg='Bad subset')\n\n if exclude:\n exclude_subsets.add(subset)\n else:\n runable_subsets.add(subset)\n\n if not runable_subsets:\n runable_subsets.update(VALID_SUBSETS)\n\n runable_subsets.difference_update(exclude_subsets)\n runable_subsets.add('default')\n\n facts = dict()\n facts['gather_subset'] = list(runable_subsets)\n\n runner = CommandRunner(module)\n\n instances = list()\n for key in runable_subsets:\n instances.append(FACT_SUBSETS[key](runner))\n\n runner.run()\n\n try:\n for inst in instances:\n inst.populate()\n facts.update(inst.facts)\n except Exception:\n module.exit_json(out=module.from_json(runner.items))\n\n ansible_facts = dict()\n for key, value in facts.iteritems():\n key = 'ansible_net_%s' % key\n ansible_facts[key] = value\n\n module.exit_json(ansible_facts=ansible_facts)\n\n\nif __name__ == '__main__':\n main()\n", "path": "network/ios/ios_facts.py" } ]
[ { "content": "#!/usr/bin/python\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n#\nDOCUMENTATION = \"\"\"\n---\nmodule: ios_facts\nversion_added: \"2.2\"\nauthor: \"Peter Sprygada (@privateip)\"\nshort_description: Collect facts from remote devices running IOS\ndescription:\n - Collects a base set of device facts from a remote device that\n is running IOS. This module prepends all of the\n base network fact keys with C(ansible_net_<fact>). The facts\n module will always collect a base set of facts from the device\n and can enable or disable collection of additional facts.\nextends_documentation_fragment: ios\noptions:\n gather_subset:\n description:\n - When supplied, this argument will restrict the facts collected\n to a given subset. Possible values for this argument include\n all, hardware, config, and interfaces. Can specify a list of\n values to include a larger subset. Values can also be used\n with an initial M(!) to specify that a specific subset should\n not be collected.\n required: false\n default: '!config'\n\"\"\"\n\nEXAMPLES = \"\"\"\n# Collect all facts from the device\n- ios_facts:\n gather_subset: all\n\n# Collect only the config and default facts\n- ios_facts:\n gather_subset:\n - config\n\n# Do not collect hardware facts\n- ios_facts:\n gather_subset:\n - \"!hardware\"\n\"\"\"\n\nRETURN = \"\"\"\nansible_net_gather_subset:\n description: The list of fact subsets collected from the device\n returned: always\n type: list\n\n# default\nansible_net_model:\n description: The model name returned from the device\n returned: always\n type: str\nansible_net_serialnum:\n description: The serial number of the remote device\n returned: always\n type: str\nansible_net_version:\n description: The operating system version running on the remote device\n returned: always\n type: str\nansible_net_hostname:\n description: The configured hostname of the device\n returned: always\n type: string\nansible_net_image:\n description: The image file the device is running\n returned: always\n type: string\n\n# hardware\nansible_net_filesystems:\n description: All file system names availabe on the device\n returned: when hardware is configured\n type: list\nansible_net_memfree_mb:\n description: The available free memory on the remote device in Mb\n returned: when hardware is configured\n type: int\nansible_net_memtotal_mb:\n description: The total memory on the remote device in Mb\n returned: when hardware is configured\n type: int\n\n# config\nansible_net_config:\n description: The current active config from the device\n returned: when config is configured\n type: str\n\n# interfaces\nansible_net_all_ipv4_addresses:\n description: All IPv4 addresses configured on the device\n returned: when interfaces is configured\n type: list\nansible_net_all_ipv6_addresses:\n description: All IPv6 addresses configured on the device\n returned: when interfaces is configured\n type: list\nansible_net_interfaces:\n description: A hash of all interfaces running on the system\n returned: when interfaces is configured\n type: dict\nansible_net_neighbors:\n description: The list of LLDP neighbors from the remote device\n returned: when interfaces is configured\n type: dict\n\"\"\"\nimport re\nimport itertools\n\nfrom ansible.module_utils.basic import get_exception\nfrom ansible.module_utils.netcli import CommandRunner, AddCommandError\nfrom ansible.module_utils.ios import NetworkModule\n\n\ndef add_command(runner, command):\n try:\n runner.add_command(command)\n except AddCommandError:\n # AddCommandError is raised for any issue adding a command to\n # the runner. Silently ignore the exception in this case\n pass\n\nclass FactsBase(object):\n\n def __init__(self, runner):\n self.runner = runner\n self.facts = dict()\n\n self.commands()\n\nclass Default(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'show version')\n\n def populate(self):\n data = self.runner.get_command('show version')\n\n self.facts['version'] = self.parse_version(data)\n self.facts['serialnum'] = self.parse_serialnum(data)\n self.facts['model'] = self.parse_model(data)\n self.facts['image'] = self.parse_image(data)\n self.facts['hostname'] = self.parse_hostname(data)\n\n def parse_version(self, data):\n match = re.search(r'Version (\\S+),', data)\n if match:\n return match.group(1)\n\n def parse_hostname(self, data):\n match = re.search(r'^(.+) uptime', data, re.M)\n if match:\n return match.group(1)\n\n def parse_model(self, data):\n match = re.search(r'^Cisco (.+) \\(revision', data, re.M)\n if match:\n return match.group(1)\n\n def parse_image(self, data):\n match = re.search(r'image file is \"(.+)\"', data)\n if match:\n return match.group(1)\n\n def parse_serialnum(self, data):\n match = re.search(r'board ID (\\S+)', data)\n if match:\n return match.group(1)\n\n\nclass Hardware(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'dir all-filesystems | include Directory')\n add_command(self.runner, 'show version')\n add_command(self.runner, 'show memory statistics | include Processor')\n\n def populate(self):\n data = self.runner.get_command('dir all-filesystems | include Directory')\n self.facts['filesystems'] = self.parse_filesystems(data)\n\n data = self.runner.get_command('show memory statistics | include Processor')\n match = re.findall('\\s(\\d+)\\s', data)\n if match:\n self.facts['memtotal_mb'] = int(match[0]) / 1024\n self.facts['memfree_mb'] = int(match[1]) / 1024\n\n def parse_filesystems(self, data):\n return re.findall(r'^Directory of (\\S+)/', data, re.M)\n\n\nclass Config(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'show running-config')\n\n def populate(self):\n self.facts['config'] = self.runner.get_command('show running-config')\n\n\nclass Interfaces(FactsBase):\n\n def commands(self):\n add_command(self.runner, 'show interfaces')\n add_command(self.runner, 'show ipv6 interface')\n add_command(self.runner, 'show lldp')\n add_command(self.runner, 'show lldp neighbors detail')\n\n def populate(self):\n self.facts['all_ipv4_addresses'] = list()\n self.facts['all_ipv6_addresses'] = list()\n\n data = self.runner.get_command('show interfaces')\n interfaces = self.parse_interfaces(data)\n self.facts['interfaces'] = self.populate_interfaces(interfaces)\n\n data = self.runner.get_command('show ipv6 interface')\n if len(data) > 0:\n data = self.parse_interfaces(data)\n self.populate_ipv6_interfaces(data)\n\n if 'LLDP is not enabled' not in self.runner.get_command('show lldp'):\n neighbors = self.runner.get_command('show lldp neighbors detail')\n self.facts['neighbors'] = self.parse_neighbors(neighbors)\n\n def populate_interfaces(self, interfaces):\n facts = dict()\n for key, value in interfaces.iteritems():\n intf = dict()\n intf['description'] = self.parse_description(value)\n intf['macaddress'] = self.parse_macaddress(value)\n\n ipv4 = self.parse_ipv4(value)\n intf['ipv4'] = self.parse_ipv4(value)\n if ipv4:\n self.add_ip_address(ipv4['address'], 'ipv4')\n\n intf['mtu'] = self.parse_mtu(value)\n intf['bandwidth'] = self.parse_bandwidth(value)\n intf['mediatype'] = self.parse_mediatype(value)\n intf['duplex'] = self.parse_duplex(value)\n intf['lineprotocol'] = self.parse_lineprotocol(value)\n intf['operstatus'] = self.parse_operstatus(value)\n intf['type'] = self.parse_type(value)\n\n facts[key] = intf\n return facts\n\n def populate_ipv6_interfaces(self, data):\n for key, value in data.iteritems():\n self.facts['interfaces'][key]['ipv6'] = list()\n addresses = re.findall(r'\\s+(.+), subnet', value, re.M)\n subnets = re.findall(r', subnet is (.+)$', value, re.M)\n for addr, subnet in itertools.izip(addresses, subnets):\n ipv6 = dict(address=addr.strip(), subnet=subnet.strip())\n self.add_ip_address(addr.strip(), 'ipv6')\n self.facts['interfaces'][key]['ipv6'].append(ipv6)\n\n def add_ip_address(self, address, family):\n if family == 'ipv4':\n self.facts['all_ipv4_addresses'].append(address)\n else:\n self.facts['all_ipv6_addresses'].append(address)\n\n def parse_neighbors(self, neighbors):\n facts = dict()\n for entry in neighbors.split('------------------------------------------------'):\n if entry == '':\n continue\n intf = self.parse_lldp_intf(entry)\n if intf not in facts:\n facts[intf] = list()\n fact = dict()\n fact['host'] = self.parse_lldp_host(entry)\n fact['port'] = self.parse_lldp_port(entry)\n facts[intf].append(fact)\n return facts\n\n def parse_interfaces(self, data):\n parsed = dict()\n for line in data.split('\\n'):\n if len(line) == 0:\n continue\n elif line[0] == ' ':\n parsed[key] += '\\n%s' % line\n else:\n match = re.match(r'^(\\S+)', line)\n if match:\n key = match.group(1)\n parsed[key] = line\n return parsed\n\n def parse_description(self, data):\n match = re.search(r'Description: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_macaddress(self, data):\n match = re.search(r'address is (\\S+)', data)\n if match:\n return match.group(1)\n\n def parse_ipv4(self, data):\n match = re.search(r'Internet address is (\\S+)', data)\n if match:\n addr, masklen = match.group(1).split('/')\n return dict(address=addr, masklen=int(masklen))\n\n def parse_mtu(self, data):\n match = re.search(r'MTU (\\d+)', data)\n if match:\n return int(match.group(1))\n\n def parse_bandwidth(self, data):\n match = re.search(r'BW (\\d+)', data)\n if match:\n return int(match.group(1))\n\n def parse_duplex(self, data):\n match = re.search(r'(\\w+) Duplex', data, re.M)\n if match:\n return match.group(1)\n\n def parse_mediatype(self, data):\n match = re.search(r'media type is (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_type(self, data):\n match = re.search(r'Hardware is (.+),', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lineprotocol(self, data):\n match = re.search(r'line protocol is (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_operstatus(self, data):\n match = re.search(r'^(?:.+) is (.+),', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lldp_intf(self, data):\n match = re.search(r'^Local Intf: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lldp_host(self, data):\n match = re.search(r'System Name: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n def parse_lldp_port(self, data):\n match = re.search(r'Port id: (.+)$', data, re.M)\n if match:\n return match.group(1)\n\n\nFACT_SUBSETS = dict(\n default=Default,\n hardware=Hardware,\n interfaces=Interfaces,\n config=Config,\n)\n\nVALID_SUBSETS = frozenset(FACT_SUBSETS.keys())\n\ndef main():\n spec = dict(\n gather_subset=dict(default=['!config'], type='list')\n )\n\n module = NetworkModule(argument_spec=spec, supports_check_mode=True)\n\n gather_subset = module.params['gather_subset']\n\n runable_subsets = set()\n exclude_subsets = set()\n\n for subset in gather_subset:\n if subset == 'all':\n runable_subsets.update(VALID_SUBSETS)\n continue\n\n if subset.startswith('!'):\n subset = subset[1:]\n if subset == 'all':\n exclude_subsets.update(VALID_SUBSETS)\n continue\n exclude = True\n else:\n exclude = False\n\n if subset not in VALID_SUBSETS:\n module.fail_json(msg='Bad subset')\n\n if exclude:\n exclude_subsets.add(subset)\n else:\n runable_subsets.add(subset)\n\n if not runable_subsets:\n runable_subsets.update(VALID_SUBSETS)\n\n runable_subsets.difference_update(exclude_subsets)\n runable_subsets.add('default')\n\n facts = dict()\n facts['gather_subset'] = list(runable_subsets)\n\n runner = CommandRunner(module)\n\n instances = list()\n for key in runable_subsets:\n instances.append(FACT_SUBSETS[key](runner))\n\n runner.run()\n\n try:\n for inst in instances:\n inst.populate()\n facts.update(inst.facts)\n except Exception:\n module.exit_json(out=module.from_json(runner.items))\n\n ansible_facts = dict()\n for key, value in facts.iteritems():\n key = 'ansible_net_%s' % key\n ansible_facts[key] = value\n\n module.exit_json(ansible_facts=ansible_facts)\n\n\nif __name__ == '__main__':\n main()\n", "path": "network/ios/ios_facts.py" } ]
diff --git a/network/ios/ios_facts.py b/network/ios/ios_facts.py index d842c2b4c09..884e9b5b296 100644 --- a/network/ios/ios_facts.py +++ b/network/ios/ios_facts.py @@ -124,6 +124,7 @@ type: dict """ import re +import itertools from ansible.module_utils.basic import get_exception from ansible.module_utils.netcli import CommandRunner, AddCommandError
fossasia__open-event-server-395
list_events url is inconsistent in API v2 The url is `/events/` whereas it should be `/events` to be consistent with other urls.
[ { "content": "from flask.ext.restplus import Resource, Namespace, fields\n\nfrom open_event.models.event import Event as EventModel\nfrom .helpers import get_object_list, get_object_or_404\n\napi = Namespace('events', description='Events')\n\nEVENT = api.model('Event', {\n 'id': fields.Integer(required=True),\n 'name': fields.String,\n 'email': fields.String,\n 'color': fields.String,\n 'logo': fields.String,\n 'start_time': fields.DateTime,\n 'end_time': fields.DateTime,\n 'latitude': fields.Float,\n 'longitude': fields.Float,\n 'slogan': fields.String,\n 'url': fields.String,\n 'location_name': fields.String,\n})\n\n\[email protected]('/<int:event_id>')\[email protected]('event_id')\[email protected](404, 'Event not found')\nclass Event(Resource):\n @api.doc('get_event')\n @api.marshal_with(EVENT)\n def get(self, event_id):\n \"\"\"Fetch an event given its id\"\"\"\n return get_object_or_404(EventModel, event_id)\n\n\[email protected]('/')\nclass EventList(Resource):\n @api.doc('list_events')\n @api.marshal_list_with(EVENT)\n def get(self):\n \"\"\"List all events\"\"\"\n return get_object_list(EventModel)\n", "path": "open_event/api/events.py" } ]
[ { "content": "from flask.ext.restplus import Resource, Namespace, fields\n\nfrom open_event.models.event import Event as EventModel\nfrom .helpers import get_object_list, get_object_or_404\n\napi = Namespace('events', description='Events')\n\nEVENT = api.model('Event', {\n 'id': fields.Integer(required=True),\n 'name': fields.String,\n 'email': fields.String,\n 'color': fields.String,\n 'logo': fields.String,\n 'start_time': fields.DateTime,\n 'end_time': fields.DateTime,\n 'latitude': fields.Float,\n 'longitude': fields.Float,\n 'slogan': fields.String,\n 'url': fields.String,\n 'location_name': fields.String,\n})\n\n\[email protected]('/<int:event_id>')\[email protected]('event_id')\[email protected](404, 'Event not found')\nclass Event(Resource):\n @api.doc('get_event')\n @api.marshal_with(EVENT)\n def get(self, event_id):\n \"\"\"Fetch an event given its id\"\"\"\n return get_object_or_404(EventModel, event_id)\n\n\[email protected]('')\nclass EventList(Resource):\n @api.doc('list_events')\n @api.marshal_list_with(EVENT)\n def get(self):\n \"\"\"List all events\"\"\"\n return get_object_list(EventModel)\n", "path": "open_event/api/events.py" } ]
diff --git a/open_event/api/events.py b/open_event/api/events.py index 86b3ed133b..ef679005aa 100644 --- a/open_event/api/events.py +++ b/open_event/api/events.py @@ -32,7 +32,7 @@ def get(self, event_id): return get_object_or_404(EventModel, event_id) [email protected]('/') [email protected]('') class EventList(Resource): @api.doc('list_events') @api.marshal_list_with(EVENT)
ansible__ansible-modules-core-3683
docker_service module does not work ##### ISSUE TYPE - Bug Report ##### COMPONENT NAME docker_service ##### ANSIBLE VERSION ``` ansible 2.2.0 (devel 9ad5a32208) last updated 2016/05/17 15:58:35 (GMT +000) lib/ansible/modules/core: (detached HEAD 127d518011) last updated 2016/05/17 13:42:30 (GMT +000) lib/ansible/modules/extras: (detached HEAD f953d5dc0c) last updated 2016/05/17 13:42:40 (GMT +000) config file = configured module search path = Default w/o overrides ``` ##### CONFIGURATION None ##### OS / ENVIRONMENT Fedora 23 ##### SUMMARY docker_service does not work the way it is documented, and throws a traceback as posted under ##### STEPS TO REPRODUCE The [docker-compose.yaml](https://github.com/rafabene/devops-demo/blob/master/compose/docker-compose.yml) file I am using is - ``` version: "2" networks: mynet: services: db: container_name: "db" image: postgres networks: - mynet ports: - "5432:5432" environment: - POSTGRES_USER=ticketmonster - POSTGRES_PASSWORD=ticketmonster-docker modcluster: container_name: "modcluster" networks: - mynet image: karm/mod_cluster-master-dockerhub environment: - MODCLUSTER_NET=192. 172. 10. 179. 213. - MODCLUSTER_PORT=80 ports: - "80:80" wildfly: image: rafabene/wildfly-ticketmonster-ha #build: ../Dockerfiles/ticketmonster-ha/ networks: - mynet ``` The ansible playbook I created - ``` - name: deploy docker compose artifacts hosts: localhost connection: local tasks: - name: compose_up docker_service: project_src: /root/ticket_monster/ project_name: Ticket Monster state: present - name: scale_3 docker_service: project_src: /root/ticket_monster/ state: present scale: {'wildfly': 3} - name: scale_2 docker_service: project_src: /root/ticket_monster/ state: present scale: {'wildfly': 2} - name: compose_down docker_service: project_src: /root/ticket_monster/ state: absent ``` To reproduce, run - `ansible-playbook compose_playbook.yaml` ##### EXPECTED RESULTS ``` bash # ansible-playbook compose_playbook.yaml [WARNING]: Host file not found: /etc/ansible/hosts [WARNING]: provided hosts list is empty, only localhost is available PLAY [deploy docker compose artifacts] ***************************************** TASK [setup] ******************************************************************* ok: [localhost] TASK [compose_up] ************************************************************** changed: [localhost] TASK [scale_3] ***************************************************************** changed: [localhost] TASK [scale_2] ***************************************************************** changed: [localhost] TASK [compose_down] ************************************************************ changed: [localhost] PLAY RECAP ********************************************************************* localhost : ok=5 changed=4 unreachable=0 failed=0 ``` ##### ACTUAL RESULTS ``` bash # ansible-playbook --step -vvv compose_playbook.yaml No config file found; using defaults [WARNING]: Host file not found: /etc/ansible/hosts [WARNING]: provided hosts list is empty, only localhost is available PLAYBOOK: compose_playbook.yaml ************************************************ 1 plays in compose_playbook.yaml PLAY [deploy docker compose artifacts] ***************************************** Perform task: TASK: setup (N)o/(y)es/(c)ontinue: y Perform task: TASK: setup (N)o/(y)es/(c)ontinue: ******************************* TASK [setup] ******************************************************************* <127.0.0.1> ESTABLISH LOCAL CONNECTION FOR USER: root <127.0.0.1> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo $HOME/.ansible/tmp/ansible-tmp-1463502948.45-35859527533704 `" && echo ansible-tmp-1463502948.45-35859527533704="` echo $HOME/.ansible/tmp/ansible-tmp-1463502948.45-35859527533704 `" )' <127.0.0.1> PUT /tmp/tmp549TTU TO /root/.ansible/tmp/ansible-tmp-1463502948.45-35859527533704/setup.py <127.0.0.1> EXEC /bin/sh -c 'chmod -R u+x /root/.ansible/tmp/ansible-tmp-1463502948.45-35859527533704/' <127.0.0.1> EXEC /bin/sh -c 'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /bin/python /root/.ansible/tmp/ansible-tmp-1463502948.45-35859527533704/setup.py; rm -rf "/root/.ansible/tmp/ansible-tmp-1463502948.45-35859527533704/" > /dev/null 2>&1' ok: [localhost] Perform task: TASK: compose_up (N)o/(y)es/(c)ontinue: y Perform task: TASK: compose_up (N)o/(y)es/(c)ontinue: ************************** TASK [compose_up] ************************************************************** task path: /root/compose_env/compose_playbook.yaml:5 <127.0.0.1> ESTABLISH LOCAL CONNECTION FOR USER: root <127.0.0.1> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo $HOME/.ansible/tmp/ansible-tmp-1463502951.99-46439211651353 `" && echo ansible-tmp-1463502951.99-46439211651353="` echo $HOME/.ansible/tmp/ansible-tmp-1463502951.99-46439211651353 `" )' <127.0.0.1> PUT /tmp/tmpbxDxDC TO /root/.ansible/tmp/ansible-tmp-1463502951.99-46439211651353/docker_service.py <127.0.0.1> EXEC /bin/sh -c 'chmod -R u+x /root/.ansible/tmp/ansible-tmp-1463502951.99-46439211651353/' <127.0.0.1> EXEC /bin/sh -c 'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /bin/python /root/.ansible/tmp/ansible-tmp-1463502951.99-46439211651353/docker_service.py; rm -rf "/root/.ansible/tmp/ansible-tmp-1463502951.99-46439211651353/" > /dev/null 2>&1' An exception occurred during task execution. The full traceback is: Traceback (most recent call last): File "/tmp/ansible_iVSEP_/ansible_module_docker_service.py", line 760, in <module> main() File "/tmp/ansible_iVSEP_/ansible_module_docker_service.py", line 755, in main result = ContainerManager(client).exec_module() File "/tmp/ansible_iVSEP_/ansible_module_docker_service.py", line 437, in __init__ super(ContainerManager, self).__init__(module=client.module) TypeError: __init__() got an unexpected keyword argument 'module' fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "invocation": {"module_name": "docker_service"}, "module_stderr": "Traceback (most recent call last):\n File \"/tmp/ansible_iVSEP_/ansible_module_docker_service.py\", line 760, in <module>\n main()\n File \"/tmp/ansible_iVSEP_/ansible_module_docker_service.py\", line 755, in main\n result = ContainerManager(client).exec_module()\n File \"/tmp/ansible_iVSEP_/ansible_module_docker_service.py\", line 437, in __init__\n super(ContainerManager, self).__init__(module=client.module)\nTypeError: __init__() got an unexpected keyword argument 'module'\n", "module_stdout": "", "msg": "MODULE FAILURE", "parsed": false} NO MORE HOSTS LEFT ************************************************************* to retry, use: --limit @compose_playbook.retry PLAY RECAP ********************************************************************* localhost : ok=1 changed=0 unreachable=0 failed=1 ```
[ { "content": "#!/usr/bin/python\n#\n# Copyright 2016 Red Hat | Ansible\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\nDOCUMENTATION = '''\n\nmodule: docker_service\n\nshort_description: Manage docker services and containers.\n\nversion_added: \"2.1\"\n\nauthor: \"Chris Houseknecht (@chouseknecht)\"\n\ndescription:\n - Consumes docker compose to start, shutdown and scale services.\n - Works with compose versions 1 and 2.\n - Compose can be read from a docker-compose.yml (or .yaml) file or inline using the C(definition) option.\n - See the examples for more details.\n - Supports check mode.\n\noptions:\n project_src:\n description:\n - Path to a directory containing a docker-compose.yml or docker-compose.yaml file.\n - Mutually exclusive with C(definition).\n - Required when no C(definition) is provided.\n type: path\n required: false\n project_name:\n description:\n - Provide a project name. If not provided, the project name is taken from the basename of C(project_src).\n - Required when no C(definition) is provided.\n type: str\n required: false\n files:\n description:\n - List of file names relative to C(project_src). Overrides docker-compose.yml or docker-compose.yaml.\n - Files are loaded and merged in the order given.\n type: list\n required: false\n state:\n description:\n - Desired state of the project.\n - Specifying I(present) is the same as running I(docker-compose up).\n - Specifying I(absent) is the same as running I(docker-compose down).\n choices:\n - absent\n - present\n default: present\n type: str\n required: false\n services:\n description:\n - When C(state) is I(present) run I(docker-compose up) on a subset of services.\n type: list\n required: false\n scale:\n description:\n - When C(sate) is I(present) scale services. Provide a dictionary of key/value pairs where the key\n is the name of the service and the value is an integer count for the number of containers.\n type: complex\n required: false\n dependencies:\n description:\n - When C(state) is I(present) specify whether or not to include linked services.\n type: bool\n required: false\n default: true\n definition:\n description:\n - Provide docker-compose yaml describing one or more services, networks and volumes.\n - Mutually exclusive with C(project_src) and C(project_files).\n type: complex\n required: false\n hostname_check:\n description:\n - Whether or not to check the Docker daemon's hostname against the name provided in the client certificate.\n type: bool\n required: false\n default: false\n recreate:\n description:\n - By default containers will be recreated when their configuration differs from the service definition.\n - Setting to I(never) ignores configuration differences and leaves existing containers unchanged.\n - Setting to I(always) forces recreation of all existing containers.\n type: str\n required: false\n choices:\n - always\n - never\n - smart\n default: smart\n build:\n description:\n - Whether or not to build images before starting containers.\n - Missing images will always be built.\n - If an image is present and C(build) is false, the image will not be built.\n - If an image is present and C(build) is true, the image will be built.\n type: bool\n required: false\n default: true\n remove_images:\n description:\n - Use with state I(absent) to remove the all images or only local images.\n type: str\n required: false\n default: null\n remove_volumes:\n description:\n - Use with state I(absent) to remove data volumes.\n required: false\n type: bool\n default: false\n stopped:\n description:\n - Use with state I(present) to leave the containers in an exited or non-running state.\n required: false\n type: bool\n default: false\n restarted:\n description:\n - Use with state I(present) to restart all containers.\n required: false\n type: bool\n default: false\n debug:\n description:\n - Include I(actions) in the return values.\n required: false\n type: bool\n default: false\n\nextends_documentation_fragment:\n - docker\n\nrequirements:\n - \"python >= 2.6\"\n - \"docker-compose >= 1.7.0\"\n - \"Docker API >= 1.20\"\n'''\n\nEXAMPLES = '''\n# Examples use the django example at U(https://docs.docker.com/compose/django/). Follow it to create the flask\n# directory\n\n- name: Run using a project directory\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n state: absent\n\n - docker_service:\n project_src: flask\n register: output\n\n - debug: var=output\n\n - docker_service:\n project_src: flask\n build: no\n register: output\n\n - debug: var=output\n\n - assert:\n that: \"not output.changed \"\n\n - docker_service:\n project_src: flask\n build: no\n stopped: true\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"not web.flask_web_1.state.running\"\n - \"not db.flask_db_1.state.running\"\n\n - docker_service:\n project_src: flask\n build: no\n restarted: true\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"web.flask_web_1.state.running\"\n - \"db.flask_db_1.state.running\"\n\n- name: Scale the web service to 2\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n scale:\n web: 2\n register: output\n\n - debug: var=output\n\n- name: Run with inline v2 compose\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n state: absent\n\n - docker_service:\n project_name: flask\n definition:\n version: '2'\n services:\n db:\n image: postgres\n web:\n build: \"{{ playbook_dir }}/flask\"\n command: \"python manage.py runserver 0.0.0.0:8000\"\n volumes:\n - \"{{ playbook_dir }}/flask:/code\"\n ports:\n - \"8000:8000\"\n depends_on:\n - db\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"web.flask_web_1.state.running\"\n - \"db.flask_db_1.state.running\"\n\n- name: Run with inline v1 compose\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n state: absent\n\n - docker_service:\n project_name: flask\n definition:\n db:\n image: postgres\n web:\n build: \"{{ playbook_dir }}/flask\"\n command: \"python manage.py runserver 0.0.0.0:8000\"\n volumes:\n - \"{{ playbook_dir }}/flask:/code\"\n ports:\n - \"8000:8000\"\n links:\n - db\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"web.flask_web_1.state.running\"\n - \"db.flask_db_1.state.running\"\n'''\n\nRETURN = '''\nservice:\n description: Name of the service.\n returned: success\n type: complex\n contains:\n container_name:\n description: Name of the container. Format is I(project_service_#).\n returned: success\n type: complex\n contains:\n cmd:\n description: One or more commands to be executed in the container.\n returned: success\n type: list\n example: [\"postgres\"]\n image:\n description: Name of the image from which the container was built.\n returned: success\n type: str\n example: postgres\n labels:\n description: Meta data assigned to the container.\n returned: success\n type: complex\n example: {...}\n networks:\n description: Contains a dictionary for each network to which the container is a member.\n returned: success\n type: complex\n contains:\n IPAddress:\n description: The IP address assigned to the container.\n returned: success\n type: string\n example: 172.17.0.2\n IPPrefixLen:\n description: Number of bits used by the subnet.\n returned: success\n type: int\n example: 16\n aliases:\n description: Aliases assigned to the container by the network.\n returned: success\n type: list\n example: ['db']\n globalIPv6:\n description: IPv6 address assigned to the container.\n returned: success\n type: str\n example: ''\n globalIPv6PrefixLen:\n description: IPv6 subnet length.\n returned: success\n type: int\n example: 0\n links:\n description: List of container names to which this container is linked.\n returned: success\n type: list\n example: null\n macAddress:\n description: Mac Address assigned to the virtual NIC.\n returned: success\n type: str\n example: \"02:42:ac:11:00:02\"\n state:\n description: Information regarding the current disposition of the container.\n returned: success\n type: complex\n contains:\n running:\n description: Whether or not the container is up with a running process.\n returned: success\n type: bool\n example: true\n status:\n description: Description of the running state.\n returned: success\n type: str\n example: running\n\nactions:\n description: Provides the actions to be taken on each service as determined by compose.\n returned: when in check mode or I(debug) true\n type: complex\n contains:\n service_name:\n description: Name of the service.\n returned: always\n type: complex\n contains:\n action:\n description: A descriptive name of the action to be performed on the set of containers\n within the service.\n returned: always\n type: list\n contains:\n id:\n description: the container's long ID\n returned: always\n type: string\n name:\n description: the container's name\n returned: always\n type: string\n short_id:\n description: the container's short ID\n returned: always\n type: string\n'''\n\nHAS_COMPOSE = True\nHAS_COMPOSE_EXC = None\n\nimport yaml\n\nfrom ansible.module_utils.basic import *\n\ntry:\n from compose.cli.command import project_from_options\n from compose.service import ConvergenceStrategy\n from compose.cli.main import convergence_strategy_from_opts, build_action_from_opts, image_type_from_opt\nexcept ImportError as exc:\n HAS_COMPOSE = False\n HAS_COMPOSE_EXC = str(exc)\n\nfrom ansible.module_utils.docker_common import *\n\n\nAUTH_PARAM_MAPPING = {\n u'docker_host': u'--host',\n u'tls': u'--tls',\n u'cacert_path': u'--tlscacert',\n u'cert_path': u'--tlscert',\n u'key_path': u'--tlskey',\n u'tls_verify': u'--tlsverify'\n}\n\n\nclass ContainerManager(DockerBaseClass):\n\n def __init__(self, client):\n\n super(ContainerManager, self).__init__(module=client.module)\n\n self.client = client\n self.project_src = None\n self.files = None\n self.project_name = None\n self.state = None\n self.definition = None\n self.hostname_check = None\n self.timeout = None\n self.remove_images = None\n self.remove_orphans = None\n self.remove_volumes = None\n self.stopped = None\n self.restarted = None\n self.recreate = None\n self.build = None\n self.dependencies = None\n self.services = None\n self.scale = None\n self.debug = None\n\n for key, value in client.module.params.items():\n setattr(self, key, value)\n\n self.check_mode = client.check_mode\n\n if not self.debug:\n self.debug = client.module._debug\n\n self.options = dict()\n self.options.update(self._get_auth_options())\n self.options[u'--skip-hostname-check'] = (not self.hostname_check)\n\n if self.project_name:\n self.options[u'--project-name'] = self.project_name\n\n if self.files:\n self.options[u'--file'] = self.files\n\n if not HAS_COMPOSE:\n self.client.fail(\"Unable to load docker-compose. Try `pip install docker-compose`. Error: %s\" % HAS_COMPOSE_EXC)\n\n self.log(\"options: \")\n self.log(self.options, pretty_print=True)\n\n if self.definition:\n if not self.project_name:\n self.client.fail(\"Parameter error - project_name required when providing definition.\")\n\n self.project_src = tempfile.mkdtemp(prefix=\"ansible\")\n compose_file = os.path.join(self.project_src, \"docker-compose.yml\")\n try:\n self.log('writing: ')\n self.log(yaml.dump(self.definition, default_flow_style=False))\n with open(compose_file, 'w') as f:\n f.write(yaml.dump(self.definition, default_flow_style=False))\n except Exception as exc:\n self.client.fail(\"Error writing to %s - %s\" % (compose_file, str(exc)))\n else:\n if not self.project_src:\n self.client.fail(\"Parameter error - project_src required.\")\n\n try:\n self.log(\"project_src: %s\" % self.project_src)\n self.project = project_from_options(self.project_src, self.options)\n except Exception as exc:\n self.client.fail(\"Configuration error - %s\" % str(exc))\n\n def exec_module(self):\n result = dict()\n\n if self.state == 'present':\n result = self.cmd_up()\n elif self.state == 'absent':\n result = self.cmd_down()\n\n if self.definition:\n compose_file = os.path.join(self.project_src, \"docker-compose.yml\")\n self.log(\"removing %s\" % compose_file)\n os.remove(compose_file)\n self.log(\"removing %s\" % self.project_src)\n os.rmdir(self.project_src)\n\n if not self.check_mode and not self.debug and result.get('actions'):\n result.pop('actions')\n\n return result\n\n def _get_auth_options(self):\n options = dict()\n for key, value in self.client.auth_params.items():\n if value is not None:\n option = AUTH_PARAM_MAPPING.get(key)\n if option:\n options[option] = value\n return options\n\n def cmd_up(self):\n\n start_deps = self.dependencies\n service_names = self.services\n detached = True\n result = dict(changed=False, actions=dict(), ansible_facts=dict())\n\n up_options = {\n u'--no-recreate': False,\n u'--build': self.build,\n u'--no-build': False,\n u'--no-deps': False,\n u'--force-recreate': False,\n }\n\n if self.recreate == 'never':\n up_options[u'--no-recreate'] = True\n elif self.recreate == 'always':\n up_options[u'--force-recreate'] = True\n\n if self.remove_orphans:\n up_options[u'--remove-orphans'] = True\n\n converge = convergence_strategy_from_opts(up_options)\n self.log(\"convergence strategy: %s\" % converge)\n\n for service in self.project.services:\n if not service_names or service.name in service_names:\n plan = service.convergence_plan(strategy=converge)\n if plan.action != 'noop':\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name] = dict()\n result['actions'][service.name][plan.action] = []\n for container in plan.containers:\n result['actions'][service.name][plan.action].append(dict(\n id=container.id,\n name=container.name,\n short_id=container.short_id,\n ))\n\n if not self.check_mode and result['changed']:\n try:\n self.project.up(\n service_names=service_names,\n start_deps=start_deps,\n strategy=converge,\n do_build=build_action_from_opts(up_options),\n detached=detached,\n remove_orphans=self.remove_orphans)\n except Exception as exc:\n self.client.fail(\"Error bring %s up - %s\" % (self.project.name, str(exc)))\n\n if self.stopped:\n result.update(self.cmd_stop(service_names))\n\n if self.restarted:\n result.update(self.cmd_restart(service_names))\n\n if self.scale:\n result.update(self.cmd_scale())\n\n for service in self.project.services:\n result['ansible_facts'][service.name] = dict()\n for container in service.containers(stopped=True):\n inspection = container.inspect()\n # pare down the inspection data to the most useful bits\n facts = dict()\n facts['cmd'] = inspection['Config']['Cmd']\n facts['labels'] = inspection['Config']['Labels']\n facts['image'] = inspection['Config']['Image']\n facts['state'] = dict(\n running=inspection['State']['Running'],\n status=inspection['State']['Status'],\n )\n facts['networks'] = dict()\n for key, value in inspection['NetworkSettings']['Networks'].items():\n facts['networks'][key] = dict(\n aliases=inspection['NetworkSettings']['Networks'][key]['Aliases'],\n globalIPv6=inspection['NetworkSettings']['Networks'][key]['GlobalIPv6Address'],\n globalIPv6PrefixLen=inspection['NetworkSettings']['Networks'][key]['GlobalIPv6PrefixLen'],\n IPAddress=inspection['NetworkSettings']['Networks'][key]['IPAddress'],\n IPPrefixLen=inspection['NetworkSettings']['Networks'][key]['IPPrefixLen'],\n links=inspection['NetworkSettings']['Networks'][key]['Links'],\n macAddress=inspection['NetworkSettings']['Networks'][key]['MacAddress'],\n )\n result['ansible_facts'][service.name][container.name] = facts\n\n return result\n\n def cmd_down(self):\n result = dict(\n changed=False,\n actions=dict(),\n )\n\n for service in self.project.services:\n containers = service.containers(stopped=True)\n if len(containers):\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name] = dict()\n result['actions'][service.name]['deleted'] = [container.name for container in containers]\n\n if not self.check_mode and result['changed']:\n image_type = image_type_from_opt('--rmi', self.remove_images)\n try:\n self.project.down(image_type, self.remove_volumes, self.remove_orphans)\n except Exception as exc:\n self.client.fail(\"Error bringing %s down - %s\" % (self.project.name, str(exc)))\n\n return result\n\n def cmd_stop(self, service_names):\n result = dict(\n changed=False,\n actions=dict()\n )\n for service in self.project.services:\n if not service_names or service.name in service_names:\n result['actions'][service.name] = dict()\n result['actions'][service.name]['stop'] = []\n for container in service.containers(stopped=False):\n result['changed'] = True\n if self.debug:\n result['actions'][service.name]['stop'].append(dict(\n id=container.id,\n name=container.name,\n short_id=container.short_id,\n ))\n\n if not self.check_mode and result['changed']:\n try:\n self.project.stop(service_names=service_names)\n except Exception as exc:\n self.client.fail(\"Error stopping services for %s - %s\" % (self.project.name, str(exc)))\n\n return result\n\n def cmd_restart(self, service_names):\n result = dict(\n changed=False,\n actions=dict()\n )\n\n for service in self.project.services:\n if not service_names or service.name in service_names:\n result['actions'][service.name] = dict()\n result['actions'][service.name]['restart'] = []\n for container in service.containers(stopped=True):\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name]['restart'].append(dict(\n id=container.id,\n name=container.name,\n short_id=container.short_id,\n ))\n\n if not self.check_mode and result['changed']:\n try:\n self.project.restart(service_names=service_names)\n except Exception as exc:\n self.client.fail(\"Error restarting services for %s - %s\" % (self.project.name, str(exc)))\n\n return result\n\n def cmd_scale(self):\n result = dict(\n changed=False,\n actions=dict()\n )\n\n for service in self.project.services:\n if service.name in self.scale:\n result['actions'][service.name] = dict()\n containers = service.containers(stopped=True)\n if len(containers) != self.scale[service.name]:\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name]['scale'] = self.scale[service.name] - len(containers)\n if not self.check_mode:\n try:\n service.scale(self.scale[service.name])\n except Exception as exc:\n self.client.fail(\"Error scaling %s - %s\" % (service.name, str(exc)))\n return result\n\n\ndef main():\n argument_spec = dict(\n project_src=dict(type='path'),\n project_name=dict(type='str',),\n files=dict(type='list'),\n state=dict(type='str', choices=['absent', 'present'], default='present'),\n definition=dict(type='dict'),\n hostname_check=dict(type='bool', default=False),\n recreate=dict(type='str', choices=['always','never','smart'], default='smart'),\n build=dict(type='bool', default=True),\n remove_images=dict(type='str', choices=['all', 'local']),\n remove_volumes=dict(type='bool', default=False),\n remove_orphans=dict(type='bool', default=False),\n stopped=dict(type='bool', default=False),\n restarted=dict(type='bool', default=False),\n scale=dict(type='dict'),\n services=dict(type='list'),\n dependencies=dict(type='bool', default=True),\n debug=dict(type='bool', default=False)\n )\n\n mutually_exclusive = [\n ('definition', 'project_src'),\n ('definition', 'files')\n ]\n\n client = AnsibleDockerClient(\n argument_spec=argument_spec,\n mutually_exclusive=mutually_exclusive,\n supports_check_mode=True\n )\n\n result = ContainerManager(client).exec_module()\n client.module.exit_json(**result)\n\n\nif __name__ == '__main__':\n main()\n", "path": "cloud/docker/docker_service.py" } ]
[ { "content": "#!/usr/bin/python\n#\n# Copyright 2016 Red Hat | Ansible\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\nDOCUMENTATION = '''\n\nmodule: docker_service\n\nshort_description: Manage docker services and containers.\n\nversion_added: \"2.1\"\n\nauthor: \"Chris Houseknecht (@chouseknecht)\"\n\ndescription:\n - Consumes docker compose to start, shutdown and scale services.\n - Works with compose versions 1 and 2.\n - Compose can be read from a docker-compose.yml (or .yaml) file or inline using the C(definition) option.\n - See the examples for more details.\n - Supports check mode.\n\noptions:\n project_src:\n description:\n - Path to a directory containing a docker-compose.yml or docker-compose.yaml file.\n - Mutually exclusive with C(definition).\n - Required when no C(definition) is provided.\n type: path\n required: false\n project_name:\n description:\n - Provide a project name. If not provided, the project name is taken from the basename of C(project_src).\n - Required when no C(definition) is provided.\n type: str\n required: false\n files:\n description:\n - List of file names relative to C(project_src). Overrides docker-compose.yml or docker-compose.yaml.\n - Files are loaded and merged in the order given.\n type: list\n required: false\n state:\n description:\n - Desired state of the project.\n - Specifying I(present) is the same as running I(docker-compose up).\n - Specifying I(absent) is the same as running I(docker-compose down).\n choices:\n - absent\n - present\n default: present\n type: str\n required: false\n services:\n description:\n - When C(state) is I(present) run I(docker-compose up) on a subset of services.\n type: list\n required: false\n scale:\n description:\n - When C(sate) is I(present) scale services. Provide a dictionary of key/value pairs where the key\n is the name of the service and the value is an integer count for the number of containers.\n type: complex\n required: false\n dependencies:\n description:\n - When C(state) is I(present) specify whether or not to include linked services.\n type: bool\n required: false\n default: true\n definition:\n description:\n - Provide docker-compose yaml describing one or more services, networks and volumes.\n - Mutually exclusive with C(project_src) and C(project_files).\n type: complex\n required: false\n hostname_check:\n description:\n - Whether or not to check the Docker daemon's hostname against the name provided in the client certificate.\n type: bool\n required: false\n default: false\n recreate:\n description:\n - By default containers will be recreated when their configuration differs from the service definition.\n - Setting to I(never) ignores configuration differences and leaves existing containers unchanged.\n - Setting to I(always) forces recreation of all existing containers.\n type: str\n required: false\n choices:\n - always\n - never\n - smart\n default: smart\n build:\n description:\n - Whether or not to build images before starting containers.\n - Missing images will always be built.\n - If an image is present and C(build) is false, the image will not be built.\n - If an image is present and C(build) is true, the image will be built.\n type: bool\n required: false\n default: true\n remove_images:\n description:\n - Use with state I(absent) to remove the all images or only local images.\n type: str\n required: false\n default: null\n remove_volumes:\n description:\n - Use with state I(absent) to remove data volumes.\n required: false\n type: bool\n default: false\n stopped:\n description:\n - Use with state I(present) to leave the containers in an exited or non-running state.\n required: false\n type: bool\n default: false\n restarted:\n description:\n - Use with state I(present) to restart all containers.\n required: false\n type: bool\n default: false\n debug:\n description:\n - Include I(actions) in the return values.\n required: false\n type: bool\n default: false\n\nextends_documentation_fragment:\n - docker\n\nrequirements:\n - \"python >= 2.6\"\n - \"docker-compose >= 1.7.0\"\n - \"Docker API >= 1.20\"\n'''\n\nEXAMPLES = '''\n# Examples use the django example at U(https://docs.docker.com/compose/django/). Follow it to create the flask\n# directory\n\n- name: Run using a project directory\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n state: absent\n\n - docker_service:\n project_src: flask\n register: output\n\n - debug: var=output\n\n - docker_service:\n project_src: flask\n build: no\n register: output\n\n - debug: var=output\n\n - assert:\n that: \"not output.changed \"\n\n - docker_service:\n project_src: flask\n build: no\n stopped: true\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"not web.flask_web_1.state.running\"\n - \"not db.flask_db_1.state.running\"\n\n - docker_service:\n project_src: flask\n build: no\n restarted: true\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"web.flask_web_1.state.running\"\n - \"db.flask_db_1.state.running\"\n\n- name: Scale the web service to 2\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n scale:\n web: 2\n register: output\n\n - debug: var=output\n\n- name: Run with inline v2 compose\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n state: absent\n\n - docker_service:\n project_name: flask\n definition:\n version: '2'\n services:\n db:\n image: postgres\n web:\n build: \"{{ playbook_dir }}/flask\"\n command: \"python manage.py runserver 0.0.0.0:8000\"\n volumes:\n - \"{{ playbook_dir }}/flask:/code\"\n ports:\n - \"8000:8000\"\n depends_on:\n - db\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"web.flask_web_1.state.running\"\n - \"db.flask_db_1.state.running\"\n\n- name: Run with inline v1 compose\n hosts: localhost\n connection: local\n gather_facts: no\n tasks:\n - docker_service:\n project_src: flask\n state: absent\n\n - docker_service:\n project_name: flask\n definition:\n db:\n image: postgres\n web:\n build: \"{{ playbook_dir }}/flask\"\n command: \"python manage.py runserver 0.0.0.0:8000\"\n volumes:\n - \"{{ playbook_dir }}/flask:/code\"\n ports:\n - \"8000:8000\"\n links:\n - db\n register: output\n\n - debug: var=output\n\n - assert:\n that:\n - \"web.flask_web_1.state.running\"\n - \"db.flask_db_1.state.running\"\n'''\n\nRETURN = '''\nservice:\n description: Name of the service.\n returned: success\n type: complex\n contains:\n container_name:\n description: Name of the container. Format is I(project_service_#).\n returned: success\n type: complex\n contains:\n cmd:\n description: One or more commands to be executed in the container.\n returned: success\n type: list\n example: [\"postgres\"]\n image:\n description: Name of the image from which the container was built.\n returned: success\n type: str\n example: postgres\n labels:\n description: Meta data assigned to the container.\n returned: success\n type: complex\n example: {...}\n networks:\n description: Contains a dictionary for each network to which the container is a member.\n returned: success\n type: complex\n contains:\n IPAddress:\n description: The IP address assigned to the container.\n returned: success\n type: string\n example: 172.17.0.2\n IPPrefixLen:\n description: Number of bits used by the subnet.\n returned: success\n type: int\n example: 16\n aliases:\n description: Aliases assigned to the container by the network.\n returned: success\n type: list\n example: ['db']\n globalIPv6:\n description: IPv6 address assigned to the container.\n returned: success\n type: str\n example: ''\n globalIPv6PrefixLen:\n description: IPv6 subnet length.\n returned: success\n type: int\n example: 0\n links:\n description: List of container names to which this container is linked.\n returned: success\n type: list\n example: null\n macAddress:\n description: Mac Address assigned to the virtual NIC.\n returned: success\n type: str\n example: \"02:42:ac:11:00:02\"\n state:\n description: Information regarding the current disposition of the container.\n returned: success\n type: complex\n contains:\n running:\n description: Whether or not the container is up with a running process.\n returned: success\n type: bool\n example: true\n status:\n description: Description of the running state.\n returned: success\n type: str\n example: running\n\nactions:\n description: Provides the actions to be taken on each service as determined by compose.\n returned: when in check mode or I(debug) true\n type: complex\n contains:\n service_name:\n description: Name of the service.\n returned: always\n type: complex\n contains:\n action:\n description: A descriptive name of the action to be performed on the set of containers\n within the service.\n returned: always\n type: list\n contains:\n id:\n description: the container's long ID\n returned: always\n type: string\n name:\n description: the container's name\n returned: always\n type: string\n short_id:\n description: the container's short ID\n returned: always\n type: string\n'''\n\nHAS_COMPOSE = True\nHAS_COMPOSE_EXC = None\n\nimport yaml\n\nfrom ansible.module_utils.basic import *\n\ntry:\n from compose.cli.command import project_from_options\n from compose.service import ConvergenceStrategy\n from compose.cli.main import convergence_strategy_from_opts, build_action_from_opts, image_type_from_opt\nexcept ImportError as exc:\n HAS_COMPOSE = False\n HAS_COMPOSE_EXC = str(exc)\n\nfrom ansible.module_utils.docker_common import *\n\n\nAUTH_PARAM_MAPPING = {\n u'docker_host': u'--host',\n u'tls': u'--tls',\n u'cacert_path': u'--tlscacert',\n u'cert_path': u'--tlscert',\n u'key_path': u'--tlskey',\n u'tls_verify': u'--tlsverify'\n}\n\n\nclass ContainerManager(DockerBaseClass):\n\n def __init__(self, client):\n\n super(ContainerManager, self).__init__()\n\n self.client = client\n self.project_src = None\n self.files = None\n self.project_name = None\n self.state = None\n self.definition = None\n self.hostname_check = None\n self.timeout = None\n self.remove_images = None\n self.remove_orphans = None\n self.remove_volumes = None\n self.stopped = None\n self.restarted = None\n self.recreate = None\n self.build = None\n self.dependencies = None\n self.services = None\n self.scale = None\n self.debug = None\n\n for key, value in client.module.params.items():\n setattr(self, key, value)\n\n self.check_mode = client.check_mode\n\n if not self.debug:\n self.debug = client.module._debug\n\n self.options = dict()\n self.options.update(self._get_auth_options())\n self.options[u'--skip-hostname-check'] = (not self.hostname_check)\n\n if self.project_name:\n self.options[u'--project-name'] = self.project_name\n\n if self.files:\n self.options[u'--file'] = self.files\n\n if not HAS_COMPOSE:\n self.client.fail(\"Unable to load docker-compose. Try `pip install docker-compose`. Error: %s\" % HAS_COMPOSE_EXC)\n\n self.log(\"options: \")\n self.log(self.options, pretty_print=True)\n\n if self.definition:\n if not self.project_name:\n self.client.fail(\"Parameter error - project_name required when providing definition.\")\n\n self.project_src = tempfile.mkdtemp(prefix=\"ansible\")\n compose_file = os.path.join(self.project_src, \"docker-compose.yml\")\n try:\n self.log('writing: ')\n self.log(yaml.dump(self.definition, default_flow_style=False))\n with open(compose_file, 'w') as f:\n f.write(yaml.dump(self.definition, default_flow_style=False))\n except Exception as exc:\n self.client.fail(\"Error writing to %s - %s\" % (compose_file, str(exc)))\n else:\n if not self.project_src:\n self.client.fail(\"Parameter error - project_src required.\")\n\n try:\n self.log(\"project_src: %s\" % self.project_src)\n self.project = project_from_options(self.project_src, self.options)\n except Exception as exc:\n self.client.fail(\"Configuration error - %s\" % str(exc))\n\n def exec_module(self):\n result = dict()\n\n if self.state == 'present':\n result = self.cmd_up()\n elif self.state == 'absent':\n result = self.cmd_down()\n\n if self.definition:\n compose_file = os.path.join(self.project_src, \"docker-compose.yml\")\n self.log(\"removing %s\" % compose_file)\n os.remove(compose_file)\n self.log(\"removing %s\" % self.project_src)\n os.rmdir(self.project_src)\n\n if not self.check_mode and not self.debug and result.get('actions'):\n result.pop('actions')\n\n return result\n\n def _get_auth_options(self):\n options = dict()\n for key, value in self.client.auth_params.items():\n if value is not None:\n option = AUTH_PARAM_MAPPING.get(key)\n if option:\n options[option] = value\n return options\n\n def cmd_up(self):\n\n start_deps = self.dependencies\n service_names = self.services\n detached = True\n result = dict(changed=False, actions=dict(), ansible_facts=dict())\n\n up_options = {\n u'--no-recreate': False,\n u'--build': self.build,\n u'--no-build': False,\n u'--no-deps': False,\n u'--force-recreate': False,\n }\n\n if self.recreate == 'never':\n up_options[u'--no-recreate'] = True\n elif self.recreate == 'always':\n up_options[u'--force-recreate'] = True\n\n if self.remove_orphans:\n up_options[u'--remove-orphans'] = True\n\n converge = convergence_strategy_from_opts(up_options)\n self.log(\"convergence strategy: %s\" % converge)\n\n for service in self.project.services:\n if not service_names or service.name in service_names:\n plan = service.convergence_plan(strategy=converge)\n if plan.action != 'noop':\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name] = dict()\n result['actions'][service.name][plan.action] = []\n for container in plan.containers:\n result['actions'][service.name][plan.action].append(dict(\n id=container.id,\n name=container.name,\n short_id=container.short_id,\n ))\n\n if not self.check_mode and result['changed']:\n try:\n self.project.up(\n service_names=service_names,\n start_deps=start_deps,\n strategy=converge,\n do_build=build_action_from_opts(up_options),\n detached=detached,\n remove_orphans=self.remove_orphans)\n except Exception as exc:\n self.client.fail(\"Error bring %s up - %s\" % (self.project.name, str(exc)))\n\n if self.stopped:\n result.update(self.cmd_stop(service_names))\n\n if self.restarted:\n result.update(self.cmd_restart(service_names))\n\n if self.scale:\n result.update(self.cmd_scale())\n\n for service in self.project.services:\n result['ansible_facts'][service.name] = dict()\n for container in service.containers(stopped=True):\n inspection = container.inspect()\n # pare down the inspection data to the most useful bits\n facts = dict()\n facts['cmd'] = inspection['Config']['Cmd']\n facts['labels'] = inspection['Config']['Labels']\n facts['image'] = inspection['Config']['Image']\n facts['state'] = dict(\n running=inspection['State']['Running'],\n status=inspection['State']['Status'],\n )\n facts['networks'] = dict()\n for key, value in inspection['NetworkSettings']['Networks'].items():\n facts['networks'][key] = dict(\n aliases=inspection['NetworkSettings']['Networks'][key]['Aliases'],\n globalIPv6=inspection['NetworkSettings']['Networks'][key]['GlobalIPv6Address'],\n globalIPv6PrefixLen=inspection['NetworkSettings']['Networks'][key]['GlobalIPv6PrefixLen'],\n IPAddress=inspection['NetworkSettings']['Networks'][key]['IPAddress'],\n IPPrefixLen=inspection['NetworkSettings']['Networks'][key]['IPPrefixLen'],\n links=inspection['NetworkSettings']['Networks'][key]['Links'],\n macAddress=inspection['NetworkSettings']['Networks'][key]['MacAddress'],\n )\n result['ansible_facts'][service.name][container.name] = facts\n\n return result\n\n def cmd_down(self):\n result = dict(\n changed=False,\n actions=dict(),\n )\n\n for service in self.project.services:\n containers = service.containers(stopped=True)\n if len(containers):\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name] = dict()\n result['actions'][service.name]['deleted'] = [container.name for container in containers]\n\n if not self.check_mode and result['changed']:\n image_type = image_type_from_opt('--rmi', self.remove_images)\n try:\n self.project.down(image_type, self.remove_volumes, self.remove_orphans)\n except Exception as exc:\n self.client.fail(\"Error bringing %s down - %s\" % (self.project.name, str(exc)))\n\n return result\n\n def cmd_stop(self, service_names):\n result = dict(\n changed=False,\n actions=dict()\n )\n for service in self.project.services:\n if not service_names or service.name in service_names:\n result['actions'][service.name] = dict()\n result['actions'][service.name]['stop'] = []\n for container in service.containers(stopped=False):\n result['changed'] = True\n if self.debug:\n result['actions'][service.name]['stop'].append(dict(\n id=container.id,\n name=container.name,\n short_id=container.short_id,\n ))\n\n if not self.check_mode and result['changed']:\n try:\n self.project.stop(service_names=service_names)\n except Exception as exc:\n self.client.fail(\"Error stopping services for %s - %s\" % (self.project.name, str(exc)))\n\n return result\n\n def cmd_restart(self, service_names):\n result = dict(\n changed=False,\n actions=dict()\n )\n\n for service in self.project.services:\n if not service_names or service.name in service_names:\n result['actions'][service.name] = dict()\n result['actions'][service.name]['restart'] = []\n for container in service.containers(stopped=True):\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name]['restart'].append(dict(\n id=container.id,\n name=container.name,\n short_id=container.short_id,\n ))\n\n if not self.check_mode and result['changed']:\n try:\n self.project.restart(service_names=service_names)\n except Exception as exc:\n self.client.fail(\"Error restarting services for %s - %s\" % (self.project.name, str(exc)))\n\n return result\n\n def cmd_scale(self):\n result = dict(\n changed=False,\n actions=dict()\n )\n\n for service in self.project.services:\n if service.name in self.scale:\n result['actions'][service.name] = dict()\n containers = service.containers(stopped=True)\n if len(containers) != self.scale[service.name]:\n result['changed'] = True\n if self.debug or self.check_mode:\n result['actions'][service.name]['scale'] = self.scale[service.name] - len(containers)\n if not self.check_mode:\n try:\n service.scale(self.scale[service.name])\n except Exception as exc:\n self.client.fail(\"Error scaling %s - %s\" % (service.name, str(exc)))\n return result\n\n\ndef main():\n argument_spec = dict(\n project_src=dict(type='path'),\n project_name=dict(type='str',),\n files=dict(type='list'),\n state=dict(type='str', choices=['absent', 'present'], default='present'),\n definition=dict(type='dict'),\n hostname_check=dict(type='bool', default=False),\n recreate=dict(type='str', choices=['always','never','smart'], default='smart'),\n build=dict(type='bool', default=True),\n remove_images=dict(type='str', choices=['all', 'local']),\n remove_volumes=dict(type='bool', default=False),\n remove_orphans=dict(type='bool', default=False),\n stopped=dict(type='bool', default=False),\n restarted=dict(type='bool', default=False),\n scale=dict(type='dict'),\n services=dict(type='list'),\n dependencies=dict(type='bool', default=True),\n debug=dict(type='bool', default=False)\n )\n\n mutually_exclusive = [\n ('definition', 'project_src'),\n ('definition', 'files')\n ]\n\n client = AnsibleDockerClient(\n argument_spec=argument_spec,\n mutually_exclusive=mutually_exclusive,\n supports_check_mode=True\n )\n\n result = ContainerManager(client).exec_module()\n client.module.exit_json(**result)\n\n\nif __name__ == '__main__':\n main()\n", "path": "cloud/docker/docker_service.py" } ]
diff --git a/cloud/docker/docker_service.py b/cloud/docker/docker_service.py index 315657acf80..266ab372a5c 100644 --- a/cloud/docker/docker_service.py +++ b/cloud/docker/docker_service.py @@ -434,7 +434,7 @@ class ContainerManager(DockerBaseClass): def __init__(self, client): - super(ContainerManager, self).__init__(module=client.module) + super(ContainerManager, self).__init__() self.client = client self.project_src = None
django-json-api__django-rest-framework-json-api-1105
django-admin loaddata drf_example falied (venv) PS C:\django-rest-framework-json-api> (venv) PS C:\django-rest-framework-json-api> django-admin loaddata drf_example --settings=example.settings System check identified some issues: WARNINGS: example.Author: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.AuthorBio: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.AuthorBioMetadata: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.AuthorType: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.Blog: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.Comment: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.Company: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.Entry: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.LabResults: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.Project: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.ProjectType: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. example.TaggedItem: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. tests.BasicModel: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. tests.ForeignKeySource: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. tests.ForeignKeyTarget: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. tests.ManyToManySource: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. tests.ManyToManyTarget: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. tests.NestedRelatedSource: (models.W042) Auto-created primary key used when not defining a primary key type, by default 'django.db.models.AutoField'. HINT: Configure the DEFAULT_AUTO_FIELD setting or the AppConfig.default_auto_field attribute to point to a subclass of AutoField, e.g. 'django.db.models.BigAutoField'. Traceback (most recent call last): File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\db\models\options.py", line 668, in get_field return self.fields_map[field_name] KeyError: 'type' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\serializers\json.py", line 70, in Deserializer yield from PythonDeserializer(objects, **options) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\serializers\python.py", line 131, in Deserializer field = Model._meta.get_field(field_name) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\db\models\options.py", line 670, in get_field raise FieldDoesNotExist( django.core.exceptions.FieldDoesNotExist: Author has no field named 'type' The above exception was the direct cause of the following exception: Traceback (most recent call last): File "c:\python39\lib\runpy.py", line 197, in _run_module_as_main return _run_code(code, main_globals, None, File "c:\python39\lib\runpy.py", line 87, in _run_code exec(code, run_globals) File "C:\django-rest-framework-json-api\venv\Scripts\django-admin.exe\__main__.py", line 7, in <module> File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\__init__.py", line 446, in execute_from_command_line utility.execute() File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\__init__.py", line 440, in execute self.fetch_command(subcommand).run_from_argv(self.argv) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\base.py", line 402, in run_from_argv self.execute(*args, **cmd_options) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\base.py", line 448, in execute output = self.handle(*args, **options) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\commands\loaddata.py", line 102, in handle self.loaddata(fixture_labels) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\commands\loaddata.py", line 163, in loaddata self.load_label(fixture_label) File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\management\commands\loaddata.py", line 251, in load_label for obj in objects: File "C:\django-rest-framework-json-api\venv\lib\site-packages\django\core\serializers\json.py", line 74, in Deserializer raise DeserializationError() from exc django.core.serializers.base.DeserializationError: Problem installing fixture 'C:\django-rest-framework-json-api\example\fixtures\drf_example.json': (venv) PS C:\django-rest-framework-json-api>
[ { "content": "import os\n\nSITE_ID = 1\nDEBUG = True\n\nMEDIA_ROOT = os.path.normcase(os.path.dirname(os.path.abspath(__file__)))\nMEDIA_URL = \"/media/\"\nUSE_TZ = False\n\nDATABASE_ENGINE = \"sqlite3\"\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": \"drf_example\",\n }\n}\n\nINSTALLED_APPS = [\n \"django.contrib.contenttypes\",\n \"django.contrib.staticfiles\",\n \"django.contrib.sites\",\n \"django.contrib.sessions\",\n \"django.contrib.auth\",\n \"rest_framework_json_api\",\n \"rest_framework\",\n \"polymorphic\",\n \"example\",\n \"django_filters\",\n \"tests\",\n]\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n # insert your TEMPLATE_DIRS here\n ],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n # Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this\n # list if you haven't customized them:\n \"django.contrib.auth.context_processors.auth\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.i18n\",\n \"django.template.context_processors.media\",\n \"django.template.context_processors.static\",\n \"django.template.context_processors.tz\",\n \"django.contrib.messages.context_processors.messages\",\n ],\n },\n },\n]\n\nSTATIC_URL = \"/static/\"\n\nROOT_URLCONF = \"example.urls\"\n\nSECRET_KEY = \"abc123\"\n\nPASSWORD_HASHERS = (\"django.contrib.auth.hashers.UnsaltedMD5PasswordHasher\",)\n\nINTERNAL_IPS = (\"127.0.0.1\",)\n\nJSON_API_FORMAT_FIELD_NAMES = \"camelize\"\nJSON_API_FORMAT_TYPES = \"camelize\"\nREST_FRAMEWORK = {\n \"PAGE_SIZE\": 5,\n \"EXCEPTION_HANDLER\": \"rest_framework_json_api.exceptions.exception_handler\",\n \"DEFAULT_PAGINATION_CLASS\": \"rest_framework_json_api.pagination.JsonApiPageNumberPagination\", # noqa: B950\n \"DEFAULT_PARSER_CLASSES\": (\n \"rest_framework_json_api.parsers.JSONParser\",\n \"rest_framework.parsers.FormParser\",\n \"rest_framework.parsers.MultiPartParser\",\n ),\n \"DEFAULT_RENDERER_CLASSES\": (\n \"rest_framework_json_api.renderers.JSONRenderer\",\n # If you're performance testing, you will want to use the browseable API\n # without forms, as the forms can generate their own queries.\n # If performance testing, enable:\n # 'example.utils.BrowsableAPIRendererWithoutForms',\n # Otherwise, to play around with the browseable API, enable:\n \"rest_framework_json_api.renderers.BrowsableAPIRenderer\",\n ),\n \"DEFAULT_METADATA_CLASS\": \"rest_framework_json_api.metadata.JSONAPIMetadata\",\n \"DEFAULT_SCHEMA_CLASS\": \"rest_framework_json_api.schemas.openapi.AutoSchema\",\n \"DEFAULT_FILTER_BACKENDS\": (\n \"rest_framework_json_api.filters.OrderingFilter\",\n \"rest_framework_json_api.django_filters.DjangoFilterBackend\",\n \"rest_framework.filters.SearchFilter\",\n ),\n \"SEARCH_PARAM\": \"filter[search]\",\n \"TEST_REQUEST_RENDERER_CLASSES\": (\n \"rest_framework_json_api.renderers.JSONRenderer\",\n ),\n \"TEST_REQUEST_DEFAULT_FORMAT\": \"vnd.api+json\",\n}\n", "path": "example/settings/dev.py" } ]
[ { "content": "import os\n\nSITE_ID = 1\nDEBUG = True\n\nMEDIA_ROOT = os.path.normcase(os.path.dirname(os.path.abspath(__file__)))\nMEDIA_URL = \"/media/\"\nUSE_TZ = False\nDEFAULT_AUTO_FIELD = \"django.db.models.AutoField\"\n\nDATABASE_ENGINE = \"sqlite3\"\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.sqlite3\",\n \"NAME\": \"drf_example\",\n }\n}\n\nINSTALLED_APPS = [\n \"django.contrib.contenttypes\",\n \"django.contrib.staticfiles\",\n \"django.contrib.sites\",\n \"django.contrib.sessions\",\n \"django.contrib.auth\",\n \"rest_framework_json_api\",\n \"rest_framework\",\n \"polymorphic\",\n \"example\",\n \"django_filters\",\n \"tests\",\n]\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n # insert your TEMPLATE_DIRS here\n ],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n # Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this\n # list if you haven't customized them:\n \"django.contrib.auth.context_processors.auth\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.i18n\",\n \"django.template.context_processors.media\",\n \"django.template.context_processors.static\",\n \"django.template.context_processors.tz\",\n \"django.contrib.messages.context_processors.messages\",\n ],\n },\n },\n]\n\nSTATIC_URL = \"/static/\"\n\nROOT_URLCONF = \"example.urls\"\n\nSECRET_KEY = \"abc123\"\n\nPASSWORD_HASHERS = (\"django.contrib.auth.hashers.UnsaltedMD5PasswordHasher\",)\n\nINTERNAL_IPS = (\"127.0.0.1\",)\n\nJSON_API_FORMAT_FIELD_NAMES = \"camelize\"\nJSON_API_FORMAT_TYPES = \"camelize\"\nREST_FRAMEWORK = {\n \"PAGE_SIZE\": 5,\n \"EXCEPTION_HANDLER\": \"rest_framework_json_api.exceptions.exception_handler\",\n \"DEFAULT_PAGINATION_CLASS\": \"rest_framework_json_api.pagination.JsonApiPageNumberPagination\", # noqa: B950\n \"DEFAULT_PARSER_CLASSES\": (\n \"rest_framework_json_api.parsers.JSONParser\",\n \"rest_framework.parsers.FormParser\",\n \"rest_framework.parsers.MultiPartParser\",\n ),\n \"DEFAULT_RENDERER_CLASSES\": (\n \"rest_framework_json_api.renderers.JSONRenderer\",\n # If you're performance testing, you will want to use the browseable API\n # without forms, as the forms can generate their own queries.\n # If performance testing, enable:\n # 'example.utils.BrowsableAPIRendererWithoutForms',\n # Otherwise, to play around with the browseable API, enable:\n \"rest_framework_json_api.renderers.BrowsableAPIRenderer\",\n ),\n \"DEFAULT_METADATA_CLASS\": \"rest_framework_json_api.metadata.JSONAPIMetadata\",\n \"DEFAULT_SCHEMA_CLASS\": \"rest_framework_json_api.schemas.openapi.AutoSchema\",\n \"DEFAULT_FILTER_BACKENDS\": (\n \"rest_framework_json_api.filters.OrderingFilter\",\n \"rest_framework_json_api.django_filters.DjangoFilterBackend\",\n \"rest_framework.filters.SearchFilter\",\n ),\n \"SEARCH_PARAM\": \"filter[search]\",\n \"TEST_REQUEST_RENDERER_CLASSES\": (\n \"rest_framework_json_api.renderers.JSONRenderer\",\n ),\n \"TEST_REQUEST_DEFAULT_FORMAT\": \"vnd.api+json\",\n}\n", "path": "example/settings/dev.py" } ]
diff --git a/example/fixtures/drf_example.json b/example/fixtures/drf_example.json index 498c0d1c..944f502c 100644 --- a/example/fixtures/drf_example.json +++ b/example/fixtures/drf_example.json @@ -26,8 +26,9 @@ "created_at": "2016-05-02T10:09:48.277", "modified_at": "2016-05-02T10:09:48.277", "name": "Alice", + "full_name": "Alice Test", "email": "[email protected]", - "type": null + "author_type": null } }, { @@ -37,8 +38,9 @@ "created_at": "2016-05-02T10:09:57.133", "modified_at": "2016-05-02T10:09:57.133", "name": "Bob", + "full_name": "Bob Test", "email": "[email protected]", - "type": null + "author_type": null } }, { diff --git a/example/settings/dev.py b/example/settings/dev.py index 8e13ec15..c5405338 100644 --- a/example/settings/dev.py +++ b/example/settings/dev.py @@ -6,6 +6,7 @@ MEDIA_ROOT = os.path.normcase(os.path.dirname(os.path.abspath(__file__))) MEDIA_URL = "/media/" USE_TZ = False +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" DATABASE_ENGINE = "sqlite3"
pandas-dev__pandas-5411
BLD: plot failures in master This started after I merged #5375 (which passed cleanly before merging) https://travis-ci.org/pydata/pandas/jobs/13376953
[ { "content": "from datetime import datetime, timedelta\nimport re\nimport sys\n\nimport numpy as np\n\nimport pandas.lib as lib\nimport pandas.tslib as tslib\nimport pandas.core.common as com\nfrom pandas.compat import StringIO, callable\nimport pandas.compat as compat\n\ntry:\n import dateutil\n from dateutil.parser import parse, DEFAULTPARSER\n from dateutil.relativedelta import relativedelta\n\n # raise exception if dateutil 2.0 install on 2.x platform\n if (sys.version_info[0] == 2 and\n dateutil.__version__ == '2.0'): # pragma: no cover\n raise Exception('dateutil 2.0 incompatible with Python 2.x, you must '\n 'install version 1.5 or 2.1+!')\nexcept ImportError: # pragma: no cover\n print('Please install python-dateutil via easy_install or some method!')\n raise # otherwise a 2nd import won't show the message\n\n\ndef _infer_tzinfo(start, end):\n def _infer(a, b):\n tz = a.tzinfo\n if b and b.tzinfo:\n if not (tslib.get_timezone(tz) == tslib.get_timezone(b.tzinfo)):\n raise AssertionError('Inputs must both have the same timezone,'\n ' {0} != {1}'.format(tz, b.tzinfo))\n return tz\n tz = None\n if start is not None:\n tz = _infer(start, end)\n elif end is not None:\n tz = _infer(end, start)\n return tz\n\n\ndef _maybe_get_tz(tz):\n if isinstance(tz, compat.string_types):\n import pytz\n tz = pytz.timezone(tz)\n if com.is_integer(tz):\n import pytz\n tz = pytz.FixedOffset(tz / 60)\n return tz\n\n\ndef to_datetime(arg, errors='ignore', dayfirst=False, utc=None, box=True,\n format=None, coerce=False, unit='ns'):\n \"\"\"\n Convert argument to datetime\n\n Parameters\n ----------\n arg : string, datetime, array of strings (with possible NAs)\n errors : {'ignore', 'raise'}, default 'ignore'\n Errors are ignored by default (values left untouched)\n dayfirst : boolean, default False\n If True parses dates with the day first, eg 20/01/2005\n Warning: dayfirst=True is not strict, but will prefer to parse\n with day first (this is a known bug).\n utc : boolean, default None\n Return UTC DatetimeIndex if True (converting any tz-aware\n datetime.datetime objects as well)\n box : boolean, default True\n If True returns a DatetimeIndex, if False returns ndarray of values\n format : string, default None\n strftime to parse time, eg \"%d/%m/%Y\"\n coerce : force errors to NaT (False by default)\n unit : unit of the arg (D,s,ms,us,ns) denote the unit in epoch\n (e.g. a unix timestamp), which is an integer/float number\n\n Returns\n -------\n ret : datetime if parsing succeeded\n \"\"\"\n from pandas import Timestamp\n from pandas.core.series import Series\n from pandas.tseries.index import DatetimeIndex\n\n def _convert_listlike(arg, box):\n\n if isinstance(arg, (list,tuple)):\n arg = np.array(arg, dtype='O')\n\n if com.is_datetime64_ns_dtype(arg):\n if box and not isinstance(arg, DatetimeIndex):\n try:\n return DatetimeIndex(arg, tz='utc' if utc else None)\n except ValueError:\n pass\n\n return arg\n\n arg = com._ensure_object(arg)\n try:\n if format is not None:\n result = None\n\n # shortcut formatting here\n if format == '%Y%m%d':\n try:\n result = _attempt_YYYYMMDD(arg)\n except:\n raise ValueError(\"cannot convert the input to '%Y%m%d' date format\")\n\n # fallback\n if result is None:\n result = tslib.array_strptime(arg, format, coerce=coerce)\n else:\n result = tslib.array_to_datetime(arg, raise_=errors == 'raise',\n utc=utc, dayfirst=dayfirst,\n coerce=coerce, unit=unit)\n if com.is_datetime64_dtype(result) and box:\n result = DatetimeIndex(result, tz='utc' if utc else None)\n return result\n\n except ValueError as e:\n try:\n values, tz = tslib.datetime_to_datetime64(arg)\n return DatetimeIndex._simple_new(values, None, tz=tz)\n except (ValueError, TypeError):\n raise e\n\n if arg is None:\n return arg\n elif isinstance(arg, Timestamp):\n return arg\n elif isinstance(arg, Series):\n values = _convert_listlike(arg.values, box=False)\n return Series(values, index=arg.index, name=arg.name)\n elif com.is_list_like(arg):\n return _convert_listlike(arg, box=box)\n\n return _convert_listlike(np.array([ arg ]), box=box)[0]\n\nclass DateParseError(ValueError):\n pass\n\ndef _attempt_YYYYMMDD(arg):\n \"\"\" try to parse the YYYYMMDD/%Y%m%d format, try to deal with NaT-like,\n arg is a passed in as an object dtype, but could really be ints/strings with nan-like/or floats (e.g. with nan) \"\"\"\n\n def calc(carg):\n # calculate the actual result\n carg = carg.astype(object)\n return lib.try_parse_year_month_day(carg/10000,carg/100 % 100, carg % 100)\n\n def calc_with_mask(carg,mask):\n result = np.empty(carg.shape, dtype='M8[ns]')\n iresult = result.view('i8')\n iresult[-mask] = tslib.iNaT\n result[mask] = calc(carg[mask].astype(np.float64).astype(np.int64)).astype('M8[ns]')\n return result\n\n # try intlike / strings that are ints\n try:\n return calc(arg.astype(np.int64))\n except:\n pass\n\n # a float with actual np.nan\n try:\n carg = arg.astype(np.float64)\n return calc_with_mask(carg,com.notnull(carg))\n except:\n pass\n\n # string with NaN-like\n try:\n mask = ~lib.ismember(arg, tslib._nat_strings)\n return calc_with_mask(arg,mask)\n except:\n pass\n\n return None\n\n# patterns for quarters like '4Q2005', '05Q1'\nqpat1full = re.compile(r'(\\d)Q(\\d\\d\\d\\d)')\nqpat2full = re.compile(r'(\\d\\d\\d\\d)Q(\\d)')\nqpat1 = re.compile(r'(\\d)Q(\\d\\d)')\nqpat2 = re.compile(r'(\\d\\d)Q(\\d)')\nypat = re.compile(r'(\\d\\d\\d\\d)$')\nhas_time = re.compile('(.+)([\\s]|T)+(.+)')\n\n\ndef parse_time_string(arg, freq=None, dayfirst=None, yearfirst=None):\n \"\"\"\n Try hard to parse datetime string, leveraging dateutil plus some extra\n goodies like quarter recognition.\n\n Parameters\n ----------\n arg : compat.string_types\n freq : str or DateOffset, default None\n Helps with interpreting time string if supplied\n dayfirst : bool, default None\n If None uses default from print_config\n yearfirst : bool, default None\n If None uses default from print_config\n\n Returns\n -------\n datetime, datetime/dateutil.parser._result, str\n \"\"\"\n from pandas.core.config import get_option\n from pandas.tseries.offsets import DateOffset\n from pandas.tseries.frequencies import (_get_rule_month, _month_numbers,\n _get_freq_str)\n\n if not isinstance(arg, compat.string_types):\n return arg\n\n arg = arg.upper()\n\n default = datetime(1, 1, 1).replace(hour=0, minute=0,\n second=0, microsecond=0)\n\n # special handling for possibilities eg, 2Q2005, 2Q05, 2005Q1, 05Q1\n if len(arg) in [4, 6]:\n m = ypat.match(arg)\n if m:\n ret = default.replace(year=int(m.group(1)))\n return ret, ret, 'year'\n\n add_century = False\n if len(arg) == 4:\n add_century = True\n qpats = [(qpat1, 1), (qpat2, 0)]\n else:\n qpats = [(qpat1full, 1), (qpat2full, 0)]\n\n for pat, yfirst in qpats:\n qparse = pat.match(arg)\n if qparse is not None:\n if yfirst:\n yi, qi = 1, 2\n else:\n yi, qi = 2, 1\n q = int(qparse.group(yi))\n y_str = qparse.group(qi)\n y = int(y_str)\n if add_century:\n y += 2000\n\n if freq is not None:\n # hack attack, #1228\n mnum = _month_numbers[_get_rule_month(freq)] + 1\n month = (mnum + (q - 1) * 3) % 12 + 1\n if month > mnum:\n y -= 1\n else:\n month = (q - 1) * 3 + 1\n\n ret = default.replace(year=y, month=month)\n return ret, ret, 'quarter'\n\n is_mo_str = freq is not None and freq == 'M'\n is_mo_off = getattr(freq, 'rule_code', None) == 'M'\n is_monthly = is_mo_str or is_mo_off\n if len(arg) == 6 and is_monthly:\n try:\n ret = _try_parse_monthly(arg)\n if ret is not None:\n return ret, ret, 'month'\n except Exception:\n pass\n\n # montly f7u12\n mresult = _attempt_monthly(arg)\n if mresult:\n return mresult\n\n if dayfirst is None:\n dayfirst = get_option(\"display.date_dayfirst\")\n if yearfirst is None:\n yearfirst = get_option(\"display.date_yearfirst\")\n\n try:\n parsed, reso = dateutil_parse(arg, default, dayfirst=dayfirst,\n yearfirst=yearfirst)\n except Exception as e:\n # TODO: allow raise of errors within instead\n raise DateParseError(e)\n\n if parsed is None:\n raise DateParseError(\"Could not parse %s\" % arg)\n\n return parsed, parsed, reso # datetime, resolution\n\n\ndef dateutil_parse(timestr, default,\n ignoretz=False, tzinfos=None,\n **kwargs):\n \"\"\" lifted from dateutil to get resolution\"\"\"\n from dateutil import tz\n import time\n fobj = StringIO(str(timestr))\n\n res = DEFAULTPARSER._parse(fobj, **kwargs)\n\n if res is None:\n raise ValueError(\"unknown string format\")\n\n repl = {}\n reso = None\n for attr in [\"year\", \"month\", \"day\", \"hour\",\n \"minute\", \"second\", \"microsecond\"]:\n value = getattr(res, attr)\n if value is not None:\n repl[attr] = value\n reso = attr\n\n if reso is None:\n raise ValueError(\"Cannot parse date.\")\n\n if reso == 'microsecond' and repl['microsecond'] == 0:\n reso = 'second'\n\n ret = default.replace(**repl)\n if res.weekday is not None and not res.day:\n ret = ret + relativedelta.relativedelta(weekday=res.weekday)\n if not ignoretz:\n if callable(tzinfos) or tzinfos and res.tzname in tzinfos:\n if callable(tzinfos):\n tzdata = tzinfos(res.tzname, res.tzoffset)\n else:\n tzdata = tzinfos.get(res.tzname)\n if isinstance(tzdata, datetime.tzinfo):\n tzinfo = tzdata\n elif isinstance(tzdata, compat.string_types):\n tzinfo = tz.tzstr(tzdata)\n elif isinstance(tzdata, int):\n tzinfo = tz.tzoffset(res.tzname, tzdata)\n else:\n raise ValueError(\"offset must be tzinfo subclass, \"\n \"tz string, or int offset\")\n ret = ret.replace(tzinfo=tzinfo)\n elif res.tzname and res.tzname in time.tzname:\n ret = ret.replace(tzinfo=tz.tzlocal())\n elif res.tzoffset == 0:\n ret = ret.replace(tzinfo=tz.tzutc())\n elif res.tzoffset:\n ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset))\n return ret, reso\n\n\ndef _attempt_monthly(val):\n pats = ['%Y-%m', '%m-%Y', '%b %Y', '%b-%Y']\n for pat in pats:\n try:\n ret = datetime.strptime(val, pat)\n return ret, ret, 'month'\n except Exception:\n pass\n\n\ndef _try_parse_monthly(arg):\n base = 2000\n add_base = False\n default = datetime(1, 1, 1).replace(hour=0, minute=0, second=0,\n microsecond=0)\n\n if len(arg) == 4:\n add_base = True\n y = int(arg[:2])\n m = int(arg[2:4])\n elif len(arg) >= 6: # 201201\n y = int(arg[:4])\n m = int(arg[4:6])\n if add_base:\n y += base\n ret = default.replace(year=y, month=m)\n return ret\n\n\nnormalize_date = tslib.normalize_date\n\n\ndef format(dt):\n \"\"\"Returns date in YYYYMMDD format.\"\"\"\n return dt.strftime('%Y%m%d')\n\nOLE_TIME_ZERO = datetime(1899, 12, 30, 0, 0, 0)\n\n\ndef ole2datetime(oledt):\n \"\"\"function for converting excel date to normal date format\"\"\"\n val = float(oledt)\n\n # Excel has a bug where it thinks the date 2/29/1900 exists\n # we just reject any date before 3/1/1900.\n if val < 61:\n raise ValueError(\"Value is outside of acceptable range: %s \" % val)\n\n return OLE_TIME_ZERO + timedelta(days=val)\n", "path": "pandas/tseries/tools.py" } ]
[ { "content": "from datetime import datetime, timedelta\nimport re\nimport sys\n\nimport numpy as np\n\nimport pandas.lib as lib\nimport pandas.tslib as tslib\nimport pandas.core.common as com\nfrom pandas.compat import StringIO, callable\nimport pandas.compat as compat\n\ntry:\n import dateutil\n from dateutil.parser import parse, DEFAULTPARSER\n from dateutil.relativedelta import relativedelta\n\n # raise exception if dateutil 2.0 install on 2.x platform\n if (sys.version_info[0] == 2 and\n dateutil.__version__ == '2.0'): # pragma: no cover\n raise Exception('dateutil 2.0 incompatible with Python 2.x, you must '\n 'install version 1.5 or 2.1+!')\nexcept ImportError: # pragma: no cover\n print('Please install python-dateutil via easy_install or some method!')\n raise # otherwise a 2nd import won't show the message\n\n\ndef _infer_tzinfo(start, end):\n def _infer(a, b):\n tz = a.tzinfo\n if b and b.tzinfo:\n if not (tslib.get_timezone(tz) == tslib.get_timezone(b.tzinfo)):\n raise AssertionError('Inputs must both have the same timezone,'\n ' {0} != {1}'.format(tz, b.tzinfo))\n return tz\n tz = None\n if start is not None:\n tz = _infer(start, end)\n elif end is not None:\n tz = _infer(end, start)\n return tz\n\n\ndef _maybe_get_tz(tz):\n if isinstance(tz, compat.string_types):\n import pytz\n tz = pytz.timezone(tz)\n if com.is_integer(tz):\n import pytz\n tz = pytz.FixedOffset(tz / 60)\n return tz\n\n\ndef to_datetime(arg, errors='ignore', dayfirst=False, utc=None, box=True,\n format=None, coerce=False, unit='ns'):\n \"\"\"\n Convert argument to datetime\n\n Parameters\n ----------\n arg : string, datetime, array of strings (with possible NAs)\n errors : {'ignore', 'raise'}, default 'ignore'\n Errors are ignored by default (values left untouched)\n dayfirst : boolean, default False\n If True parses dates with the day first, eg 20/01/2005\n Warning: dayfirst=True is not strict, but will prefer to parse\n with day first (this is a known bug).\n utc : boolean, default None\n Return UTC DatetimeIndex if True (converting any tz-aware\n datetime.datetime objects as well)\n box : boolean, default True\n If True returns a DatetimeIndex, if False returns ndarray of values\n format : string, default None\n strftime to parse time, eg \"%d/%m/%Y\"\n coerce : force errors to NaT (False by default)\n unit : unit of the arg (D,s,ms,us,ns) denote the unit in epoch\n (e.g. a unix timestamp), which is an integer/float number\n\n Returns\n -------\n ret : datetime if parsing succeeded\n \"\"\"\n from pandas import Timestamp\n from pandas.core.series import Series\n from pandas.tseries.index import DatetimeIndex\n\n def _convert_listlike(arg, box):\n\n if isinstance(arg, (list,tuple)):\n arg = np.array(arg, dtype='O')\n\n if com.is_datetime64_ns_dtype(arg):\n if box and not isinstance(arg, DatetimeIndex):\n try:\n return DatetimeIndex(arg, tz='utc' if utc else None)\n except ValueError:\n pass\n\n return arg\n\n arg = com._ensure_object(arg)\n try:\n if format is not None:\n result = None\n\n # shortcut formatting here\n if format == '%Y%m%d':\n try:\n result = _attempt_YYYYMMDD(arg)\n except:\n raise ValueError(\"cannot convert the input to '%Y%m%d' date format\")\n\n # fallback\n if result is None:\n result = tslib.array_strptime(arg, format, coerce=coerce)\n else:\n result = tslib.array_to_datetime(arg, raise_=errors == 'raise',\n utc=utc, dayfirst=dayfirst,\n coerce=coerce, unit=unit)\n if com.is_datetime64_dtype(result) and box:\n result = DatetimeIndex(result, tz='utc' if utc else None)\n return result\n\n except ValueError as e:\n try:\n values, tz = tslib.datetime_to_datetime64(arg)\n return DatetimeIndex._simple_new(values, None, tz=tz)\n except (ValueError, TypeError):\n raise e\n\n if arg is None:\n return arg\n elif isinstance(arg, Timestamp):\n return arg\n elif isinstance(arg, Series):\n values = _convert_listlike(arg.values, box=False)\n return Series(values, index=arg.index, name=arg.name)\n elif com.is_list_like(arg):\n return _convert_listlike(arg, box=box)\n\n return _convert_listlike(np.array([ arg ]), box=box)[0]\n\nclass DateParseError(ValueError):\n pass\n\ndef _attempt_YYYYMMDD(arg):\n \"\"\" try to parse the YYYYMMDD/%Y%m%d format, try to deal with NaT-like,\n arg is a passed in as an object dtype, but could really be ints/strings with nan-like/or floats (e.g. with nan) \"\"\"\n\n def calc(carg):\n # calculate the actual result\n carg = carg.astype(object)\n return lib.try_parse_year_month_day(carg/10000,carg/100 % 100, carg % 100)\n\n def calc_with_mask(carg,mask):\n result = np.empty(carg.shape, dtype='M8[ns]')\n iresult = result.view('i8')\n iresult[-mask] = tslib.iNaT\n result[mask] = calc(carg[mask].astype(np.float64).astype(np.int64)).astype('M8[ns]')\n return result\n\n # try intlike / strings that are ints\n try:\n return calc(arg.astype(np.int64))\n except:\n pass\n\n # a float with actual np.nan\n try:\n carg = arg.astype(np.float64)\n return calc_with_mask(carg,com.notnull(carg))\n except:\n pass\n\n # string with NaN-like\n try:\n mask = ~lib.ismember(arg, tslib._nat_strings)\n return calc_with_mask(arg,mask)\n except:\n pass\n\n return None\n\n# patterns for quarters like '4Q2005', '05Q1'\nqpat1full = re.compile(r'(\\d)Q(\\d\\d\\d\\d)')\nqpat2full = re.compile(r'(\\d\\d\\d\\d)Q(\\d)')\nqpat1 = re.compile(r'(\\d)Q(\\d\\d)')\nqpat2 = re.compile(r'(\\d\\d)Q(\\d)')\nypat = re.compile(r'(\\d\\d\\d\\d)$')\nhas_time = re.compile('(.+)([\\s]|T)+(.+)')\n\n\ndef parse_time_string(arg, freq=None, dayfirst=None, yearfirst=None):\n \"\"\"\n Try hard to parse datetime string, leveraging dateutil plus some extra\n goodies like quarter recognition.\n\n Parameters\n ----------\n arg : compat.string_types\n freq : str or DateOffset, default None\n Helps with interpreting time string if supplied\n dayfirst : bool, default None\n If None uses default from print_config\n yearfirst : bool, default None\n If None uses default from print_config\n\n Returns\n -------\n datetime, datetime/dateutil.parser._result, str\n \"\"\"\n from pandas.core.config import get_option\n from pandas.tseries.offsets import DateOffset\n from pandas.tseries.frequencies import (_get_rule_month, _month_numbers,\n _get_freq_str)\n\n if not isinstance(arg, compat.string_types):\n return arg\n\n arg = arg.upper()\n\n default = datetime(1, 1, 1).replace(hour=0, minute=0,\n second=0, microsecond=0)\n\n # special handling for possibilities eg, 2Q2005, 2Q05, 2005Q1, 05Q1\n if len(arg) in [4, 6]:\n m = ypat.match(arg)\n if m:\n ret = default.replace(year=int(m.group(1)))\n return ret, ret, 'year'\n\n add_century = False\n if len(arg) == 4:\n add_century = True\n qpats = [(qpat1, 1), (qpat2, 0)]\n else:\n qpats = [(qpat1full, 1), (qpat2full, 0)]\n\n for pat, yfirst in qpats:\n qparse = pat.match(arg)\n if qparse is not None:\n if yfirst:\n yi, qi = 1, 2\n else:\n yi, qi = 2, 1\n q = int(qparse.group(yi))\n y_str = qparse.group(qi)\n y = int(y_str)\n if add_century:\n y += 2000\n\n if freq is not None:\n # hack attack, #1228\n mnum = _month_numbers[_get_rule_month(freq)] + 1\n month = (mnum + (q - 1) * 3) % 12 + 1\n if month > mnum:\n y -= 1\n else:\n month = (q - 1) * 3 + 1\n\n ret = default.replace(year=y, month=month)\n return ret, ret, 'quarter'\n\n is_mo_str = freq is not None and freq == 'M'\n is_mo_off = getattr(freq, 'rule_code', None) == 'M'\n is_monthly = is_mo_str or is_mo_off\n if len(arg) == 6 and is_monthly:\n try:\n ret = _try_parse_monthly(arg)\n if ret is not None:\n return ret, ret, 'month'\n except Exception:\n pass\n\n # montly f7u12\n mresult = _attempt_monthly(arg)\n if mresult:\n return mresult\n\n if dayfirst is None:\n dayfirst = get_option(\"display.date_dayfirst\")\n if yearfirst is None:\n yearfirst = get_option(\"display.date_yearfirst\")\n\n try:\n parsed, reso = dateutil_parse(arg, default, dayfirst=dayfirst,\n yearfirst=yearfirst)\n except Exception as e:\n # TODO: allow raise of errors within instead\n raise DateParseError(e)\n\n if parsed is None:\n raise DateParseError(\"Could not parse %s\" % arg)\n\n return parsed, parsed, reso # datetime, resolution\n\n\ndef dateutil_parse(timestr, default,\n ignoretz=False, tzinfos=None,\n **kwargs):\n \"\"\" lifted from dateutil to get resolution\"\"\"\n from dateutil import tz\n import time\n fobj = StringIO(str(timestr))\n\n res = DEFAULTPARSER._parse(fobj, **kwargs)\n\n # dateutil 2.2 compat\n if isinstance(res, tuple):\n res, _ = res\n\n if res is None:\n raise ValueError(\"unknown string format\")\n\n repl = {}\n reso = None\n for attr in [\"year\", \"month\", \"day\", \"hour\",\n \"minute\", \"second\", \"microsecond\"]:\n value = getattr(res, attr)\n if value is not None:\n repl[attr] = value\n reso = attr\n\n if reso is None:\n raise ValueError(\"Cannot parse date.\")\n\n if reso == 'microsecond' and repl['microsecond'] == 0:\n reso = 'second'\n\n ret = default.replace(**repl)\n if res.weekday is not None and not res.day:\n ret = ret + relativedelta.relativedelta(weekday=res.weekday)\n if not ignoretz:\n if callable(tzinfos) or tzinfos and res.tzname in tzinfos:\n if callable(tzinfos):\n tzdata = tzinfos(res.tzname, res.tzoffset)\n else:\n tzdata = tzinfos.get(res.tzname)\n if isinstance(tzdata, datetime.tzinfo):\n tzinfo = tzdata\n elif isinstance(tzdata, compat.string_types):\n tzinfo = tz.tzstr(tzdata)\n elif isinstance(tzdata, int):\n tzinfo = tz.tzoffset(res.tzname, tzdata)\n else:\n raise ValueError(\"offset must be tzinfo subclass, \"\n \"tz string, or int offset\")\n ret = ret.replace(tzinfo=tzinfo)\n elif res.tzname and res.tzname in time.tzname:\n ret = ret.replace(tzinfo=tz.tzlocal())\n elif res.tzoffset == 0:\n ret = ret.replace(tzinfo=tz.tzutc())\n elif res.tzoffset:\n ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset))\n return ret, reso\n\n\ndef _attempt_monthly(val):\n pats = ['%Y-%m', '%m-%Y', '%b %Y', '%b-%Y']\n for pat in pats:\n try:\n ret = datetime.strptime(val, pat)\n return ret, ret, 'month'\n except Exception:\n pass\n\n\ndef _try_parse_monthly(arg):\n base = 2000\n add_base = False\n default = datetime(1, 1, 1).replace(hour=0, minute=0, second=0,\n microsecond=0)\n\n if len(arg) == 4:\n add_base = True\n y = int(arg[:2])\n m = int(arg[2:4])\n elif len(arg) >= 6: # 201201\n y = int(arg[:4])\n m = int(arg[4:6])\n if add_base:\n y += base\n ret = default.replace(year=y, month=m)\n return ret\n\n\nnormalize_date = tslib.normalize_date\n\n\ndef format(dt):\n \"\"\"Returns date in YYYYMMDD format.\"\"\"\n return dt.strftime('%Y%m%d')\n\nOLE_TIME_ZERO = datetime(1899, 12, 30, 0, 0, 0)\n\n\ndef ole2datetime(oledt):\n \"\"\"function for converting excel date to normal date format\"\"\"\n val = float(oledt)\n\n # Excel has a bug where it thinks the date 2/29/1900 exists\n # we just reject any date before 3/1/1900.\n if val < 61:\n raise ValueError(\"Value is outside of acceptable range: %s \" % val)\n\n return OLE_TIME_ZERO + timedelta(days=val)\n", "path": "pandas/tseries/tools.py" } ]
diff --git a/ci/requirements-3.3.txt b/ci/requirements-3.3.txt index 318030e733158..94a77bbc06024 100644 --- a/ci/requirements-3.3.txt +++ b/ci/requirements-3.3.txt @@ -1,4 +1,4 @@ -python-dateutil==2.1 +python-dateutil==2.2 pytz==2013b openpyxl==1.6.2 xlsxwriter==0.4.3 diff --git a/pandas/tseries/tools.py b/pandas/tseries/tools.py index 3d8803237931d..af1a31bcec311 100644 --- a/pandas/tseries/tools.py +++ b/pandas/tseries/tools.py @@ -305,6 +305,10 @@ def dateutil_parse(timestr, default, res = DEFAULTPARSER._parse(fobj, **kwargs) + # dateutil 2.2 compat + if isinstance(res, tuple): + res, _ = res + if res is None: raise ValueError("unknown string format")
comic__grand-challenge.org-3363
Mismatch in evaluation jobs when challenge admin pre-runs algorithm on cases from the phases archive `create_algorithm_jobs_for_evaluation` exits successfully but the evaluation remains in the executing algorithm state. Occurs when the challenge admin uses the try out algorithm page and selects an image from the archive linked to the phase. Probably the jobs need filtering by creator?
[ { "content": "import logging\nfrom tempfile import TemporaryDirectory\nfrom typing import NamedTuple\n\nimport boto3\nfrom botocore.exceptions import ClientError\nfrom celery import chain, group, shared_task\nfrom django.conf import settings\nfrom django.core.cache import cache\nfrom django.core.files.base import File\nfrom django.db import transaction\nfrom django.db.models import Count, Q\nfrom django.db.transaction import on_commit\nfrom django.utils._os import safe_join\nfrom redis.exceptions import LockError\n\nfrom grandchallenge.algorithms.exceptions import TooManyJobsScheduled\nfrom grandchallenge.algorithms.models import Algorithm, AlgorithmImage, Job\nfrom grandchallenge.archives.models import Archive\nfrom grandchallenge.cases.tasks import build_images\nfrom grandchallenge.components.tasks import (\n _retry,\n add_file_to_component_interface_value,\n add_image_to_component_interface_value,\n)\nfrom grandchallenge.core.cache import _cache_key_from_method\nfrom grandchallenge.core.templatetags.remove_whitespace import oxford_comma\nfrom grandchallenge.credits.models import Credit\nfrom grandchallenge.notifications.models import Notification, NotificationType\nfrom grandchallenge.subdomains.utils import reverse\n\nlogger = logging.getLogger(__name__)\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-micro-short\"])\ndef run_algorithm_job_for_inputs(\n *, job_pk, upload_session_pks, user_upload_pks\n):\n with transaction.atomic():\n job = Job.objects.get(pk=job_pk)\n\n assignment_tasks = []\n\n if upload_session_pks:\n assignment_tasks.extend(\n chain(\n build_images.signature(\n kwargs={\"upload_session_pk\": upload_session_pk},\n immutable=True,\n ),\n add_image_to_component_interface_value.signature(\n kwargs={\n \"component_interface_value_pk\": civ_pk,\n \"upload_session_pk\": upload_session_pk,\n },\n immutable=True,\n ),\n )\n for civ_pk, upload_session_pk in upload_session_pks.items()\n )\n\n if user_upload_pks:\n assignment_tasks.extend(\n add_file_to_component_interface_value.signature(\n kwargs={\n \"component_interface_value_pk\": civ_pk,\n \"user_upload_pk\": user_upload_pk,\n \"target_pk\": job.algorithm_image.algorithm.pk,\n \"target_app\": \"algorithms\",\n \"target_model\": \"algorithm\",\n },\n immutable=True,\n )\n for civ_pk, user_upload_pk in user_upload_pks.items()\n )\n\n canvas = chain(\n group(assignment_tasks),\n execute_algorithm_job_for_inputs.signature(\n kwargs={\"job_pk\": job_pk}, immutable=True\n ),\n )\n\n on_commit(canvas.apply_async)\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-micro-short\"])\ndef execute_algorithm_job_for_inputs(*, job_pk):\n with transaction.atomic():\n job = Job.objects.get(pk=job_pk)\n\n # Notify the job creator on failure\n linked_task = send_failed_job_notification.signature(\n kwargs={\"job_pk\": str(job.pk)}, immutable=True\n )\n\n # check if all ComponentInterfaceValue's have a value.\n missing_inputs = list(\n civ for civ in job.inputs.all() if not civ.has_value\n )\n\n if missing_inputs:\n job.update_status(\n status=job.CANCELLED,\n error_message=(\n f\"Job can't be started, input is missing for \"\n f\"{oxford_comma([c.interface.title for c in missing_inputs])}\"\n ),\n )\n on_commit(linked_task.apply_async)\n else:\n job.task_on_success = linked_task\n job.save()\n on_commit(\n execute_algorithm_job.signature(\n kwargs={\"job_pk\": job_pk}, immutable=True\n ).apply_async\n )\n\n\n@shared_task(\n **settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-micro-short\"],\n throws=(TooManyJobsScheduled,),\n)\ndef execute_algorithm_job(*, job_pk, retries=0):\n def retry_with_delay():\n _retry(\n task=execute_algorithm_job,\n signature_kwargs={\n \"kwargs\": {\n \"job_pk\": job_pk,\n },\n \"immutable\": True,\n },\n retries=retries,\n )\n\n with transaction.atomic():\n if Job.objects.active().count() >= settings.ALGORITHMS_MAX_ACTIVE_JOBS:\n logger.info(\"Retrying task as too many jobs scheduled\")\n retry_with_delay()\n raise TooManyJobsScheduled\n\n job = Job.objects.get(pk=job_pk)\n on_commit(job.execute)\n\n\n@shared_task(\n **settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-2xlarge\"],\n throws=(\n TooManyJobsScheduled,\n LockError,\n ),\n)\ndef create_algorithm_jobs_for_archive(\n *, archive_pks, archive_item_pks=None, algorithm_pks=None, retries=0\n):\n def retry_with_delay():\n _retry(\n task=create_algorithm_jobs_for_archive,\n signature_kwargs={\n \"kwargs\": {\n \"archive_pks\": archive_pks,\n \"archive_item_pks\": archive_item_pks,\n \"algorithm_pks\": algorithm_pks,\n },\n \"immutable\": True,\n },\n retries=retries,\n )\n\n if Job.objects.active().count() >= settings.ALGORITHMS_MAX_ACTIVE_JOBS:\n logger.info(\"Retrying task as too many jobs scheduled\")\n retry_with_delay()\n raise TooManyJobsScheduled\n\n for archive in Archive.objects.filter(pk__in=archive_pks).all():\n # Only the archive groups should be able to view the job\n # Can be shared with the algorithm editor if needed\n archive_groups = [\n archive.editors_group,\n archive.uploaders_group,\n archive.users_group,\n ]\n\n if algorithm_pks is not None:\n algorithms = Algorithm.objects.filter(pk__in=algorithm_pks).all()\n else:\n algorithms = archive.algorithms.all()\n\n if archive_item_pks is not None:\n archive_items = archive.items.filter(pk__in=archive_item_pks)\n else:\n archive_items = archive.items.all()\n\n for algorithm in algorithms:\n try:\n with cache.lock(\n _cache_key_from_method(create_algorithm_jobs),\n timeout=settings.CELERY_TASK_TIME_LIMIT,\n blocking_timeout=10,\n ):\n create_algorithm_jobs(\n algorithm_image=algorithm.active_image,\n civ_sets=[\n {*ai.values.all()}\n for ai in archive_items.prefetch_related(\n \"values__interface\"\n )\n ],\n extra_viewer_groups=archive_groups,\n # NOTE: no emails in case the logs leak data\n # to the algorithm editors\n task_on_success=None,\n )\n except (TooManyJobsScheduled, LockError) as error:\n logger.info(f\"Retrying task due to: {error}\")\n retry_with_delay()\n raise\n\n\ndef create_algorithm_jobs(\n *,\n algorithm_image,\n civ_sets,\n extra_viewer_groups=None,\n extra_logs_viewer_groups=None,\n max_jobs=None,\n task_on_success=None,\n task_on_failure=None,\n time_limit=None,\n):\n \"\"\"\n Creates algorithm jobs for sets of component interface values\n\n Parameters\n ----------\n algorithm_image\n The algorithm image to use\n civ_sets\n The sets of component interface values that will be used as input\n for the algorithm image\n extra_viewer_groups\n The groups that will also get permission to view the jobs\n extra_logs_viewer_groups\n The groups that will also get permission to view the logs for\n the jobs\n max_jobs\n The maximum number of jobs to schedule\n task_on_success\n Celery task that is run on job success. This must be able\n to handle being called more than once, and in parallel.\n task_on_failure\n Celery task that is run on job failure\n time_limit\n The time limit for the Job\n \"\"\"\n civ_sets = filter_civs_for_algorithm(\n civ_sets=civ_sets, algorithm_image=algorithm_image\n )\n\n if max_jobs is not None:\n civ_sets = civ_sets[:max_jobs]\n\n if time_limit is None:\n time_limit = settings.ALGORITHMS_JOB_DEFAULT_TIME_LIMIT_SECONDS\n\n jobs = []\n\n for civ_set in civ_sets:\n\n if len(jobs) >= settings.ALGORITHMS_JOB_BATCH_LIMIT:\n raise TooManyJobsScheduled\n\n with transaction.atomic():\n job = Job.objects.create(\n creator=None, # System jobs, so no creator\n algorithm_image=algorithm_image,\n task_on_success=task_on_success,\n task_on_failure=task_on_failure,\n time_limit=time_limit,\n extra_viewer_groups=extra_viewer_groups,\n extra_logs_viewer_groups=extra_logs_viewer_groups,\n input_civ_set=civ_set,\n )\n on_commit(job.execute)\n\n jobs.append(job)\n\n return jobs\n\n\ndef filter_civs_for_algorithm(*, civ_sets, algorithm_image):\n \"\"\"\n Removes sets of civs that are invalid for new jobs\n\n Parameters\n ----------\n civ_sets\n Iterable of sets of ComponentInterfaceValues that are candidate for\n new Jobs\n algorithm_image\n The algorithm image to use for new job\n\n Returns\n -------\n Filtered set of ComponentInterfaceValues\n \"\"\"\n input_interfaces = {*algorithm_image.algorithm.inputs.all()}\n\n existing_jobs = {\n frozenset(j.inputs.all())\n for j in Job.objects.filter(algorithm_image=algorithm_image)\n .annotate(\n inputs_match_count=Count(\n \"inputs\",\n filter=Q(\n inputs__in={civ for civ_set in civ_sets for civ in civ_set}\n ),\n )\n )\n .filter(inputs_match_count=len(input_interfaces))\n .prefetch_related(\"inputs\")\n }\n\n valid_job_inputs = []\n\n for civ_set in civ_sets:\n # Check interfaces are complete\n civ_interfaces = {civ.interface for civ in civ_set}\n if input_interfaces.issubset(civ_interfaces):\n # If the algorithm works with a subset of the interfaces\n # present in the set then only feed these through to the algorithm\n valid_input = {\n civ for civ in civ_set if civ.interface in input_interfaces\n }\n else:\n continue\n\n # Check job has not been run\n if frozenset(valid_input) in existing_jobs:\n continue\n\n valid_job_inputs.append(valid_input)\n\n return valid_job_inputs\n\n\n@shared_task\ndef send_failed_job_notification(*, job_pk):\n job = Job.objects.get(pk=job_pk)\n\n if job.status == Job.FAILURE and job.creator is not None:\n algorithm = job.algorithm_image.algorithm\n url = reverse(\"algorithms:job-list\", kwargs={\"slug\": algorithm.slug})\n Notification.send(\n kind=NotificationType.NotificationTypeChoices.JOB_STATUS,\n actor=job.creator,\n message=f\"Unfortunately one of the jobs for algorithm {algorithm.title} \"\n f\"failed with an error\",\n target=algorithm,\n description=url,\n )\n\n\nclass ChallengeNameAndUrl(NamedTuple):\n short_name: str\n get_absolute_url: str\n\n\n@shared_task\ndef update_associated_challenges():\n from grandchallenge.challenges.models import Challenge\n\n challenge_list = {}\n for algorithm in Algorithm.objects.all():\n challenge_list[algorithm.pk] = [\n ChallengeNameAndUrl(\n short_name=challenge.short_name,\n get_absolute_url=challenge.get_absolute_url(),\n )\n for challenge in Challenge.objects.filter(\n phase__submission__algorithm_image__algorithm=algorithm\n ).distinct()\n ]\n cache.set(\"challenges_for_algorithms\", challenge_list, timeout=None)\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-2xlarge\"])\ndef import_remote_algorithm_image(*, remote_bucket_name, algorithm_image_pk):\n algorithm_image = AlgorithmImage.objects.get(pk=algorithm_image_pk)\n\n if (\n algorithm_image.import_status\n != AlgorithmImage.ImportStatusChoices.INITIALIZED\n ):\n raise RuntimeError(\"Algorithm image is not initialized\")\n\n s3_client = boto3.client(\"s3\")\n\n try:\n response = s3_client.list_objects_v2(\n Bucket=remote_bucket_name,\n Prefix=algorithm_image.image.field.upload_to(algorithm_image, \"-\")[\n :-1\n ],\n )\n except ClientError as error:\n algorithm_image.import_status = (\n AlgorithmImage.ImportStatusChoices.FAILED\n )\n algorithm_image.status = str(error)\n algorithm_image.save()\n raise\n\n output_files = response.get(\"Contents\", [])\n if len(output_files) != 1:\n algorithm_image.import_status = (\n AlgorithmImage.ImportStatusChoices.FAILED\n )\n algorithm_image.status = \"Unique algorithm image file not found\"\n algorithm_image.save()\n raise RuntimeError(algorithm_image.status)\n\n output_file = output_files[0]\n\n # We cannot copy objects directly here as this is likely a cross-region\n # request, so download it then upload\n with TemporaryDirectory() as tmp_dir:\n filename = output_file[\"Key\"].split(\"/\")[-1]\n dest = safe_join(tmp_dir, filename)\n\n s3_client.download_file(\n Filename=dest,\n Bucket=remote_bucket_name,\n Key=output_file[\"Key\"],\n )\n\n with open(dest, \"rb\") as f:\n algorithm_image.image.save(filename, File(f))\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-2xlarge\"])\ndef set_credits_per_job():\n default_credits_per_month = Credit._meta.get_field(\"credits\").get_default()\n default_credits_per_job = Algorithm._meta.get_field(\n \"credits_per_job\"\n ).get_default()\n min_credits_per_job = (\n default_credits_per_month\n / settings.ALGORITHMS_MAX_DEFAULT_JOBS_PER_MONTH\n )\n\n for algorithm in Algorithm.objects.all().iterator():\n if algorithm.average_duration and algorithm.active_image:\n executor = Job(\n algorithm_image=algorithm.active_image\n ).get_executor(backend=settings.COMPONENTS_DEFAULT_BACKEND)\n\n cents_per_job = (\n executor.usd_cents_per_hour\n * algorithm.average_duration.total_seconds()\n / 3600\n )\n\n algorithm.credits_per_job = max(\n int(\n round(\n cents_per_job\n * default_credits_per_month\n / settings.ALGORITHMS_USER_CENTS_PER_MONTH,\n -1,\n )\n ),\n min_credits_per_job,\n )\n else:\n algorithm.credits_per_job = default_credits_per_job\n\n algorithm.save(update_fields=(\"credits_per_job\",))\n", "path": "app/grandchallenge/algorithms/tasks.py" } ]
[ { "content": "import logging\nfrom tempfile import TemporaryDirectory\nfrom typing import NamedTuple\n\nimport boto3\nfrom botocore.exceptions import ClientError\nfrom celery import chain, group, shared_task\nfrom django.conf import settings\nfrom django.core.cache import cache\nfrom django.core.files.base import File\nfrom django.db import transaction\nfrom django.db.models import Count, Q\nfrom django.db.transaction import on_commit\nfrom django.utils._os import safe_join\nfrom redis.exceptions import LockError\n\nfrom grandchallenge.algorithms.exceptions import TooManyJobsScheduled\nfrom grandchallenge.algorithms.models import Algorithm, AlgorithmImage, Job\nfrom grandchallenge.archives.models import Archive\nfrom grandchallenge.cases.tasks import build_images\nfrom grandchallenge.components.tasks import (\n _retry,\n add_file_to_component_interface_value,\n add_image_to_component_interface_value,\n)\nfrom grandchallenge.core.cache import _cache_key_from_method\nfrom grandchallenge.core.templatetags.remove_whitespace import oxford_comma\nfrom grandchallenge.credits.models import Credit\nfrom grandchallenge.notifications.models import Notification, NotificationType\nfrom grandchallenge.subdomains.utils import reverse\n\nlogger = logging.getLogger(__name__)\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-micro-short\"])\ndef run_algorithm_job_for_inputs(\n *, job_pk, upload_session_pks, user_upload_pks\n):\n with transaction.atomic():\n job = Job.objects.get(pk=job_pk)\n\n assignment_tasks = []\n\n if upload_session_pks:\n assignment_tasks.extend(\n chain(\n build_images.signature(\n kwargs={\"upload_session_pk\": upload_session_pk},\n immutable=True,\n ),\n add_image_to_component_interface_value.signature(\n kwargs={\n \"component_interface_value_pk\": civ_pk,\n \"upload_session_pk\": upload_session_pk,\n },\n immutable=True,\n ),\n )\n for civ_pk, upload_session_pk in upload_session_pks.items()\n )\n\n if user_upload_pks:\n assignment_tasks.extend(\n add_file_to_component_interface_value.signature(\n kwargs={\n \"component_interface_value_pk\": civ_pk,\n \"user_upload_pk\": user_upload_pk,\n \"target_pk\": job.algorithm_image.algorithm.pk,\n \"target_app\": \"algorithms\",\n \"target_model\": \"algorithm\",\n },\n immutable=True,\n )\n for civ_pk, user_upload_pk in user_upload_pks.items()\n )\n\n canvas = chain(\n group(assignment_tasks),\n execute_algorithm_job_for_inputs.signature(\n kwargs={\"job_pk\": job_pk}, immutable=True\n ),\n )\n\n on_commit(canvas.apply_async)\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-micro-short\"])\ndef execute_algorithm_job_for_inputs(*, job_pk):\n with transaction.atomic():\n job = Job.objects.get(pk=job_pk)\n\n # Notify the job creator on failure\n linked_task = send_failed_job_notification.signature(\n kwargs={\"job_pk\": str(job.pk)}, immutable=True\n )\n\n # check if all ComponentInterfaceValue's have a value.\n missing_inputs = list(\n civ for civ in job.inputs.all() if not civ.has_value\n )\n\n if missing_inputs:\n job.update_status(\n status=job.CANCELLED,\n error_message=(\n f\"Job can't be started, input is missing for \"\n f\"{oxford_comma([c.interface.title for c in missing_inputs])}\"\n ),\n )\n on_commit(linked_task.apply_async)\n else:\n job.task_on_success = linked_task\n job.save()\n on_commit(\n execute_algorithm_job.signature(\n kwargs={\"job_pk\": job_pk}, immutable=True\n ).apply_async\n )\n\n\n@shared_task(\n **settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-micro-short\"],\n throws=(TooManyJobsScheduled,),\n)\ndef execute_algorithm_job(*, job_pk, retries=0):\n def retry_with_delay():\n _retry(\n task=execute_algorithm_job,\n signature_kwargs={\n \"kwargs\": {\n \"job_pk\": job_pk,\n },\n \"immutable\": True,\n },\n retries=retries,\n )\n\n with transaction.atomic():\n if Job.objects.active().count() >= settings.ALGORITHMS_MAX_ACTIVE_JOBS:\n logger.info(\"Retrying task as too many jobs scheduled\")\n retry_with_delay()\n raise TooManyJobsScheduled\n\n job = Job.objects.get(pk=job_pk)\n on_commit(job.execute)\n\n\n@shared_task(\n **settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-2xlarge\"],\n throws=(\n TooManyJobsScheduled,\n LockError,\n ),\n)\ndef create_algorithm_jobs_for_archive(\n *, archive_pks, archive_item_pks=None, algorithm_pks=None, retries=0\n):\n def retry_with_delay():\n _retry(\n task=create_algorithm_jobs_for_archive,\n signature_kwargs={\n \"kwargs\": {\n \"archive_pks\": archive_pks,\n \"archive_item_pks\": archive_item_pks,\n \"algorithm_pks\": algorithm_pks,\n },\n \"immutable\": True,\n },\n retries=retries,\n )\n\n if Job.objects.active().count() >= settings.ALGORITHMS_MAX_ACTIVE_JOBS:\n logger.info(\"Retrying task as too many jobs scheduled\")\n retry_with_delay()\n raise TooManyJobsScheduled\n\n for archive in Archive.objects.filter(pk__in=archive_pks).all():\n # Only the archive groups should be able to view the job\n # Can be shared with the algorithm editor if needed\n archive_groups = [\n archive.editors_group,\n archive.uploaders_group,\n archive.users_group,\n ]\n\n if algorithm_pks is not None:\n algorithms = Algorithm.objects.filter(pk__in=algorithm_pks).all()\n else:\n algorithms = archive.algorithms.all()\n\n if archive_item_pks is not None:\n archive_items = archive.items.filter(pk__in=archive_item_pks)\n else:\n archive_items = archive.items.all()\n\n for algorithm in algorithms:\n try:\n with cache.lock(\n _cache_key_from_method(create_algorithm_jobs),\n timeout=settings.CELERY_TASK_TIME_LIMIT,\n blocking_timeout=10,\n ):\n create_algorithm_jobs(\n algorithm_image=algorithm.active_image,\n civ_sets=[\n {*ai.values.all()}\n for ai in archive_items.prefetch_related(\n \"values__interface\"\n )\n ],\n extra_viewer_groups=archive_groups,\n # NOTE: no emails in case the logs leak data\n # to the algorithm editors\n task_on_success=None,\n )\n except (TooManyJobsScheduled, LockError) as error:\n logger.info(f\"Retrying task due to: {error}\")\n retry_with_delay()\n raise\n\n\ndef create_algorithm_jobs(\n *,\n algorithm_image,\n civ_sets,\n extra_viewer_groups=None,\n extra_logs_viewer_groups=None,\n max_jobs=None,\n task_on_success=None,\n task_on_failure=None,\n time_limit=None,\n):\n \"\"\"\n Creates algorithm jobs for sets of component interface values\n\n Parameters\n ----------\n algorithm_image\n The algorithm image to use\n civ_sets\n The sets of component interface values that will be used as input\n for the algorithm image\n extra_viewer_groups\n The groups that will also get permission to view the jobs\n extra_logs_viewer_groups\n The groups that will also get permission to view the logs for\n the jobs\n max_jobs\n The maximum number of jobs to schedule\n task_on_success\n Celery task that is run on job success. This must be able\n to handle being called more than once, and in parallel.\n task_on_failure\n Celery task that is run on job failure\n time_limit\n The time limit for the Job\n \"\"\"\n civ_sets = filter_civs_for_algorithm(\n civ_sets=civ_sets, algorithm_image=algorithm_image\n )\n\n if max_jobs is not None:\n civ_sets = civ_sets[:max_jobs]\n\n if time_limit is None:\n time_limit = settings.ALGORITHMS_JOB_DEFAULT_TIME_LIMIT_SECONDS\n\n jobs = []\n\n for civ_set in civ_sets:\n\n if len(jobs) >= settings.ALGORITHMS_JOB_BATCH_LIMIT:\n raise TooManyJobsScheduled\n\n with transaction.atomic():\n job = Job.objects.create(\n creator=None, # System jobs, so no creator\n algorithm_image=algorithm_image,\n task_on_success=task_on_success,\n task_on_failure=task_on_failure,\n time_limit=time_limit,\n extra_viewer_groups=extra_viewer_groups,\n extra_logs_viewer_groups=extra_logs_viewer_groups,\n input_civ_set=civ_set,\n )\n on_commit(job.execute)\n\n jobs.append(job)\n\n return jobs\n\n\ndef filter_civs_for_algorithm(*, civ_sets, algorithm_image):\n \"\"\"\n Removes sets of civs that are invalid for new jobs\n\n Parameters\n ----------\n civ_sets\n Iterable of sets of ComponentInterfaceValues that are candidate for\n new Jobs\n algorithm_image\n The algorithm image to use for new job\n\n Returns\n -------\n Filtered set of ComponentInterfaceValues\n \"\"\"\n input_interfaces = {*algorithm_image.algorithm.inputs.all()}\n\n existing_jobs = {\n frozenset(j.inputs.all())\n for j in Job.objects.filter(algorithm_image=algorithm_image)\n .annotate(\n inputs_match_count=Count(\n \"inputs\",\n filter=Q(\n inputs__in={civ for civ_set in civ_sets for civ in civ_set}\n ),\n )\n )\n .filter(inputs_match_count=len(input_interfaces), creator=None)\n .prefetch_related(\"inputs\")\n }\n\n valid_job_inputs = []\n\n for civ_set in civ_sets:\n # Check interfaces are complete\n civ_interfaces = {civ.interface for civ in civ_set}\n if input_interfaces.issubset(civ_interfaces):\n # If the algorithm works with a subset of the interfaces\n # present in the set then only feed these through to the algorithm\n valid_input = {\n civ for civ in civ_set if civ.interface in input_interfaces\n }\n else:\n continue\n\n # Check job has not been run\n if frozenset(valid_input) in existing_jobs:\n continue\n\n valid_job_inputs.append(valid_input)\n\n return valid_job_inputs\n\n\n@shared_task\ndef send_failed_job_notification(*, job_pk):\n job = Job.objects.get(pk=job_pk)\n\n if job.status == Job.FAILURE and job.creator is not None:\n algorithm = job.algorithm_image.algorithm\n url = reverse(\"algorithms:job-list\", kwargs={\"slug\": algorithm.slug})\n Notification.send(\n kind=NotificationType.NotificationTypeChoices.JOB_STATUS,\n actor=job.creator,\n message=f\"Unfortunately one of the jobs for algorithm {algorithm.title} \"\n f\"failed with an error\",\n target=algorithm,\n description=url,\n )\n\n\nclass ChallengeNameAndUrl(NamedTuple):\n short_name: str\n get_absolute_url: str\n\n\n@shared_task\ndef update_associated_challenges():\n from grandchallenge.challenges.models import Challenge\n\n challenge_list = {}\n for algorithm in Algorithm.objects.all():\n challenge_list[algorithm.pk] = [\n ChallengeNameAndUrl(\n short_name=challenge.short_name,\n get_absolute_url=challenge.get_absolute_url(),\n )\n for challenge in Challenge.objects.filter(\n phase__submission__algorithm_image__algorithm=algorithm\n ).distinct()\n ]\n cache.set(\"challenges_for_algorithms\", challenge_list, timeout=None)\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-2xlarge\"])\ndef import_remote_algorithm_image(*, remote_bucket_name, algorithm_image_pk):\n algorithm_image = AlgorithmImage.objects.get(pk=algorithm_image_pk)\n\n if (\n algorithm_image.import_status\n != AlgorithmImage.ImportStatusChoices.INITIALIZED\n ):\n raise RuntimeError(\"Algorithm image is not initialized\")\n\n s3_client = boto3.client(\"s3\")\n\n try:\n response = s3_client.list_objects_v2(\n Bucket=remote_bucket_name,\n Prefix=algorithm_image.image.field.upload_to(algorithm_image, \"-\")[\n :-1\n ],\n )\n except ClientError as error:\n algorithm_image.import_status = (\n AlgorithmImage.ImportStatusChoices.FAILED\n )\n algorithm_image.status = str(error)\n algorithm_image.save()\n raise\n\n output_files = response.get(\"Contents\", [])\n if len(output_files) != 1:\n algorithm_image.import_status = (\n AlgorithmImage.ImportStatusChoices.FAILED\n )\n algorithm_image.status = \"Unique algorithm image file not found\"\n algorithm_image.save()\n raise RuntimeError(algorithm_image.status)\n\n output_file = output_files[0]\n\n # We cannot copy objects directly here as this is likely a cross-region\n # request, so download it then upload\n with TemporaryDirectory() as tmp_dir:\n filename = output_file[\"Key\"].split(\"/\")[-1]\n dest = safe_join(tmp_dir, filename)\n\n s3_client.download_file(\n Filename=dest,\n Bucket=remote_bucket_name,\n Key=output_file[\"Key\"],\n )\n\n with open(dest, \"rb\") as f:\n algorithm_image.image.save(filename, File(f))\n\n\n@shared_task(**settings.CELERY_TASK_DECORATOR_KWARGS[\"acks-late-2xlarge\"])\ndef set_credits_per_job():\n default_credits_per_month = Credit._meta.get_field(\"credits\").get_default()\n default_credits_per_job = Algorithm._meta.get_field(\n \"credits_per_job\"\n ).get_default()\n min_credits_per_job = (\n default_credits_per_month\n / settings.ALGORITHMS_MAX_DEFAULT_JOBS_PER_MONTH\n )\n\n for algorithm in Algorithm.objects.all().iterator():\n if algorithm.average_duration and algorithm.active_image:\n executor = Job(\n algorithm_image=algorithm.active_image\n ).get_executor(backend=settings.COMPONENTS_DEFAULT_BACKEND)\n\n cents_per_job = (\n executor.usd_cents_per_hour\n * algorithm.average_duration.total_seconds()\n / 3600\n )\n\n algorithm.credits_per_job = max(\n int(\n round(\n cents_per_job\n * default_credits_per_month\n / settings.ALGORITHMS_USER_CENTS_PER_MONTH,\n -1,\n )\n ),\n min_credits_per_job,\n )\n else:\n algorithm.credits_per_job = default_credits_per_job\n\n algorithm.save(update_fields=(\"credits_per_job\",))\n", "path": "app/grandchallenge/algorithms/tasks.py" } ]
diff --git a/app/grandchallenge/algorithms/tasks.py b/app/grandchallenge/algorithms/tasks.py index e3b98744de..e5af0269fd 100644 --- a/app/grandchallenge/algorithms/tasks.py +++ b/app/grandchallenge/algorithms/tasks.py @@ -319,7 +319,7 @@ def filter_civs_for_algorithm(*, civ_sets, algorithm_image): ), ) ) - .filter(inputs_match_count=len(input_interfaces)) + .filter(inputs_match_count=len(input_interfaces), creator=None) .prefetch_related("inputs") } diff --git a/app/tests/algorithms_tests/test_tasks.py b/app/tests/algorithms_tests/test_tasks.py index 58aa09b8eb..c8629c574a 100644 --- a/app/tests/algorithms_tests/test_tasks.py +++ b/app/tests/algorithms_tests/test_tasks.py @@ -639,13 +639,19 @@ def test_existing_jobs(self): cis = ComponentInterfaceFactory.create_batch(2) ai.algorithm.inputs.set(cis) - civs = [ComponentInterfaceValueFactory(interface=c) for c in cis] + civs1 = [ComponentInterfaceValueFactory(interface=c) for c in cis] + civs2 = [ComponentInterfaceValueFactory(interface=c) for c in cis] - j = AlgorithmJobFactory(algorithm_image=ai) - j.inputs.set(civs) + j1 = AlgorithmJobFactory(creator=None, algorithm_image=ai) + j1.inputs.set(civs1) + j2 = AlgorithmJobFactory(algorithm_image=ai) + j2.inputs.set(civs2) civ_sets = [ - civs, # Job already exists + {civ for civ in civs1}, # Job already exists (system job) + { + civ for civ in civs2 + }, # Job already exists but with a creator set and hence should be ignored { # New values ComponentInterfaceValueFactory(interface=cis[0]), @@ -653,7 +659,7 @@ def test_existing_jobs(self): }, { # Changed values - civs[0], + civs1[0], ComponentInterfaceValueFactory(interface=cis[1]), }, ] @@ -662,7 +668,7 @@ def test_existing_jobs(self): civ_sets=civ_sets, algorithm_image=ai ) - assert filtered_civ_sets == civ_sets[1:] + assert sorted(filtered_civ_sets) == sorted(civ_sets[1:]) @pytest.mark.django_db
enthought__chaco-424
Demo quiver.py not working **Problem Description** Zooming in will ends with the following and blank plot. **Reproduction Steps:** Run the file and zoom in until the plot breaks. **Expected behavior:** Plot disappear if keep zooming in and ends with following trace. ``` Traceback (most recent call last): File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/qt4/base_window.py", line 202, in paintEvent self.handler.paintEvent(event) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/qt4/base_window.py", line 54, in paintEvent self._enable_window._paint(event) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/abstract_window.py", line 468, in _paint self.component.draw(gc, view_bounds=(0, 0, size[0], size[1])) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/component.py", line 427, in draw self._draw(gc, view_bounds, mode) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/component.py", line 779, in _draw self._dispatch_draw(layer, gc, view_bounds, mode) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/container.py", line 272, in _dispatch_draw component._dispatch_draw(layer, gc, new_bounds, mode) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/container.py", line 272, in _dispatch_draw component._dispatch_draw(layer, gc, new_bounds, mode) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/enable/component.py", line 799, in _dispatch_draw handler(gc, view_bounds, mode) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/chaco/base_xy_plot.py", line 466, in _draw_plot self._draw_component(gc, view_bounds, mode) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/chaco/base_xy_plot.py", line 474, in _draw_component self._render(gc, pts) File "/Users/ktakami/.edm/envs/chaco-dev/lib/python2.7/site-packages/chaco/quiverplot.py", line 80, in _render ends = points + self._cached_vector_data ValueError: operands could not be broadcast together with shapes (0,) (0,2) ``` **OS, Python version:** OSX, Python 2.7 splits from #385
[ { "content": "\nfrom __future__ import with_statement\n\nfrom numpy import array, compress, matrix, newaxis, sqrt, zeros\n\n# Enthought library imports\nfrom enable.api import ColorTrait\nfrom traits.api import Array, Enum, Float, Instance, Int\n\n# Chaco relative imports\nfrom .abstract_data_source import AbstractDataSource\nfrom .scatterplot import ScatterPlot\n\nclass QuiverPlot(ScatterPlot):\n\n # Determines how to interpret the data in the **vectors** data source.\n # \"vector\": each tuple is a (dx, dy)\n # \"radial\": each tuple is an (r, theta)\n data_type = Enum(\"vector\", \"radial\") # TODO: implement \"radial\"\n\n # A datasource that returns an Nx2 array array indicating directions\n # of the vectors. The interpretation of this array is dependent on\n # the setting of the **data_type** attribute.\n #\n # Usually this will be a MultiArrayDataSource.\n vectors = Instance(AbstractDataSource)\n\n #------------------------------------------------------------------------\n # Visual attributes of the vector\n #------------------------------------------------------------------------\n\n # The color of the lines\n line_color = ColorTrait(\"black\")\n\n # The width of the lines\n line_width = Float(1.0)\n\n # The length, in pixels, of the arrowhead\n arrow_size = Int(5)\n\n #------------------------------------------------------------------------\n # Private traits\n #------------------------------------------------------------------------\n\n _cached_vector_data = Array\n _selected_vector_data = Array\n\n def _gather_points_old(self):\n # In addition to the standard scatterplot _gather_points, we need\n # to also grab the vectors that fall inside the view range\n super(QuiverPlot, self)._gather_points_old()\n\n if not self.index or not self.value:\n return\n\n if len(self._cached_point_mask) == 0:\n self._cached_vector_data = []\n return\n\n vectors = self.vectors.get_data()\n self._cached_vector_data = compress(self._cached_point_mask, vectors, axis=0)\n\n if self._cached_selected_pts is not None:\n indices = self._cached_selection_point_mask\n self._selected_vector_data = compress(indices, vectors, axis=0)\n else:\n self._selected_vector_data = None\n return\n\n\n def _render(self, gc, points, icon_mode=False):\n with gc:\n gc.clip_to_rect(self.x, self.y, self.width, self.height)\n\n gc.set_stroke_color(self.line_color_)\n gc.set_line_width(self.line_width)\n\n # Draw the body of the arrow\n starts = points\n ends = points + self._cached_vector_data\n gc.begin_path()\n gc.line_set(starts, ends)\n gc.stroke_path()\n\n if self.arrow_size > 0:\n vec = self._cached_vector_data\n unit_vec = vec / sqrt(vec[:,0] ** 2 + vec[:,1] ** 2)[:, newaxis]\n a = 0.707106781 # sqrt(2)/2\n\n # Draw the left arrowhead (for an arrow pointing straight up)\n arrow_ends = ends - array(unit_vec * matrix([[a, a], [-a, a]])) * self.arrow_size\n gc.begin_path()\n gc.line_set(ends, arrow_ends)\n gc.stroke_path()\n\n # Draw the left arrowhead (for an arrow pointing straight up)\n arrow_ends = ends - array(unit_vec * matrix([[a, -a], [a, a]])) * self.arrow_size\n gc.begin_path()\n gc.line_set(ends, arrow_ends)\n gc.stroke_path()\n", "path": "chaco/quiverplot.py" } ]
[ { "content": "\nfrom __future__ import with_statement\n\nfrom numpy import array, compress, matrix, newaxis, sqrt, zeros\n\n# Enthought library imports\nfrom enable.api import ColorTrait\nfrom traits.api import Array, Enum, Float, Instance, Int\n\n# Chaco relative imports\nfrom .abstract_data_source import AbstractDataSource\nfrom .scatterplot import ScatterPlot\n\nclass QuiverPlot(ScatterPlot):\n\n # Determines how to interpret the data in the **vectors** data source.\n # \"vector\": each tuple is a (dx, dy)\n # \"radial\": each tuple is an (r, theta)\n data_type = Enum(\"vector\", \"radial\") # TODO: implement \"radial\"\n\n # A datasource that returns an Nx2 array array indicating directions\n # of the vectors. The interpretation of this array is dependent on\n # the setting of the **data_type** attribute.\n #\n # Usually this will be a MultiArrayDataSource.\n vectors = Instance(AbstractDataSource)\n\n #------------------------------------------------------------------------\n # Visual attributes of the vector\n #------------------------------------------------------------------------\n\n # The color of the lines\n line_color = ColorTrait(\"black\")\n\n # The width of the lines\n line_width = Float(1.0)\n\n # The length, in pixels, of the arrowhead\n arrow_size = Int(5)\n\n #------------------------------------------------------------------------\n # Private traits\n #------------------------------------------------------------------------\n\n _cached_vector_data = Array\n _selected_vector_data = Array\n\n def _gather_points_old(self):\n # In addition to the standard scatterplot _gather_points, we need\n # to also grab the vectors that fall inside the view range\n super(QuiverPlot, self)._gather_points_old()\n\n if not self.index or not self.value:\n return\n\n if len(self._cached_point_mask) == 0:\n self._cached_vector_data = []\n return\n\n vectors = self.vectors.get_data()\n self._cached_vector_data = compress(self._cached_point_mask, vectors, axis=0)\n\n if self._cached_selected_pts is not None:\n indices = self._cached_selection_point_mask\n self._selected_vector_data = compress(indices, vectors, axis=0)\n else:\n self._selected_vector_data = None\n return\n\n\n def _render(self, gc, points, icon_mode=False):\n if len(points) < 1:\n return\n\n with gc:\n gc.clip_to_rect(self.x, self.y, self.width, self.height)\n\n gc.set_stroke_color(self.line_color_)\n gc.set_line_width(self.line_width)\n\n # Draw the body of the arrow\n starts = points\n ends = points + self._cached_vector_data\n gc.begin_path()\n gc.line_set(starts, ends)\n gc.stroke_path()\n\n if self.arrow_size > 0:\n vec = self._cached_vector_data\n unit_vec = vec / sqrt(vec[:,0] ** 2 + vec[:,1] ** 2)[:, newaxis]\n a = 0.707106781 # sqrt(2)/2\n\n # Draw the left arrowhead (for an arrow pointing straight up)\n arrow_ends = ends - array(unit_vec * matrix([[a, a], [-a, a]])) * self.arrow_size\n gc.begin_path()\n gc.line_set(ends, arrow_ends)\n gc.stroke_path()\n\n # Draw the left arrowhead (for an arrow pointing straight up)\n arrow_ends = ends - array(unit_vec * matrix([[a, -a], [a, a]])) * self.arrow_size\n gc.begin_path()\n gc.line_set(ends, arrow_ends)\n gc.stroke_path()\n", "path": "chaco/quiverplot.py" } ]
diff --git a/chaco/quiverplot.py b/chaco/quiverplot.py index 757c22a32..adf614741 100644 --- a/chaco/quiverplot.py +++ b/chaco/quiverplot.py @@ -69,6 +69,9 @@ def _gather_points_old(self): def _render(self, gc, points, icon_mode=False): + if len(points) < 1: + return + with gc: gc.clip_to_rect(self.x, self.y, self.width, self.height)
pymodbus-dev__pymodbus-1282
async serial server isn't explicitly started by StartAsyncSerialServer Now my python modbus server isn't replying to the client talking to it on the serial port. It was working with 3.1.0, it fails with 3.1.1, and it's not because of the logging changes. I'll investigate... Meanwhile, found a typo: tcp.py line 213: ` if self.params.host.startswith("unit:"):` "unit" should be "unix", since it's about unix sockets... === I found the issue, it works now. In 3.1.0, StartAsyncSerialServer() calls server.start() In 3.1.1 it does not and returns the server object. So I added .start() in my code on the return value of StartAsyncSerialServer(), and now the server serves. I wonder if this is a bug in my code, perhaps I was supposed to call .start(), but it feels weird that a function called StartAsyncSerialServer() does not actually start the server. Hmm... _Originally posted by @peufeu2 in https://github.com/pymodbus-dev/pymodbus/issues/1279#issuecomment-1400424302_
[ { "content": "\"\"\"Implementation of a Threaded Modbus Server.\"\"\"\n# pylint: disable=missing-type-doc\nimport asyncio\nimport logging\nimport ssl\nimport traceback\nfrom binascii import b2a_hex\nfrom time import sleep\n\nfrom pymodbus.client.serial_asyncio import create_serial_connection\nfrom pymodbus.constants import Defaults\nfrom pymodbus.datastore import ModbusServerContext\nfrom pymodbus.device import ModbusControlBlock, ModbusDeviceIdentification\nfrom pymodbus.exceptions import NoSuchSlaveException, NotImplementedException\nfrom pymodbus.factory import ServerDecoder\nfrom pymodbus.pdu import ModbusExceptions as merror\nfrom pymodbus.transaction import (\n ModbusAsciiFramer,\n ModbusRtuFramer,\n ModbusSocketFramer,\n ModbusTlsFramer,\n)\nfrom pymodbus.utilities import hexlify_packets\n\n\ntry:\n import serial\nexcept ImportError:\n pass\n\n\n# --------------------------------------------------------------------------- #\n# Logging\n# --------------------------------------------------------------------------- #\n_logger = logging.getLogger(__name__)\n\n\ndef sslctx_provider(\n sslctx=None, certfile=None, keyfile=None, password=None, reqclicert=False\n):\n \"\"\"Provide the SSLContext for ModbusTlsServer.\n\n If the user defined SSLContext is not passed in, sslctx_provider will\n produce a default one.\n\n :param sslctx: The user defined SSLContext to use for TLS (default None and\n auto create)\n :param certfile: The cert file path for TLS (used if sslctx is None)\n :param keyfile: The key file path for TLS (used if sslctx is None)\n :param password: The password for for decrypting the private key file\n :param reqclicert: Force the sever request client's certificate\n \"\"\"\n if sslctx is None:\n # According to MODBUS/TCP Security Protocol Specification, it is\n # TLSv2 at least\n sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)\n sslctx.verify_mode = ssl.CERT_NONE\n sslctx.check_hostname = False\n sslctx.options |= ssl.OP_NO_TLSv1_1\n sslctx.options |= ssl.OP_NO_TLSv1\n sslctx.options |= ssl.OP_NO_SSLv3\n sslctx.options |= ssl.OP_NO_SSLv2\n sslctx.load_cert_chain(certfile=certfile, keyfile=keyfile, password=password)\n\n if reqclicert:\n sslctx.verify_mode = ssl.CERT_REQUIRED\n\n return sslctx\n\n\n# --------------------------------------------------------------------------- #\n# Protocol Handlers\n# --------------------------------------------------------------------------- #\n\n\nclass ModbusBaseRequestHandler(asyncio.BaseProtocol):\n \"\"\"Implements modbus slave wire protocol.\n\n This uses the asyncio.Protocol to implement the client handler.\n\n When a connection is established, the asyncio.Protocol.connection_made\n callback is called. This callback will setup the connection and\n create and schedule an asyncio.Task and assign it to running_task.\n\n running_task will be canceled upon connection_lost event.\n \"\"\"\n\n def __init__(self, owner):\n \"\"\"Initialize.\"\"\"\n self.server = owner\n self.running = False\n self.receive_queue = asyncio.Queue()\n self.handler_task = None # coroutine to be run on asyncio loop\n self._sent = b\"\" # for handle_local_echo\n\n def _log_exception(self):\n \"\"\"Show log exception.\"\"\"\n if isinstance(self, ModbusConnectedRequestHandler):\n txt = f\"Handler for stream [{self.client_address[:2]}] has been canceled\"\n _logger.debug(txt)\n elif isinstance(self, ModbusSingleRequestHandler):\n _logger.debug(\"Handler for serial port has been cancelled\")\n else:\n if hasattr(self, \"protocol\"):\n sock_name = (\n self.protocol._sock.getsockname() # pylint: disable=protected-access\n )\n else:\n sock_name = \"No socket\"\n txt = f\"Handler for UDP socket [{sock_name[1]}] has been canceled\"\n _logger.debug(txt)\n\n def connection_made(self, transport):\n \"\"\"Call for socket establish\n\n For streamed protocols (TCP) this will also correspond to an\n entire conversation; however for datagram protocols (UDP) this\n corresponds to the socket being opened\n \"\"\"\n try:\n if (\n hasattr(transport, \"get_extra_info\")\n and transport.get_extra_info(\"sockname\") is not None\n ):\n sockname = transport.get_extra_info(\"sockname\")[:2]\n txt = f\"Socket [{sockname}] opened\"\n _logger.debug(txt)\n elif hasattr(transport, \"serial\"):\n txt = f\"Serial connection opened on port: {transport.serial.port}\"\n _logger.debug(txt)\n else:\n txt = f\"Unable to get information about transport {transport}\"\n _logger.warning(txt)\n self.transport = transport # pylint: disable=attribute-defined-outside-init\n self.running = True\n self.framer = ( # pylint: disable=attribute-defined-outside-init\n self.server.framer(\n self.server.decoder,\n client=None,\n )\n )\n\n # schedule the connection handler on the event loop\n self.handler_task = asyncio.create_task(self.handle())\n except Exception as exc: # pragma: no cover pylint: disable=broad-except\n txt = (\n f\"Datastore unable to fulfill request: {exc}; {traceback.format_exc()}\"\n )\n _logger.error(txt)\n\n def connection_lost(self, call_exc):\n \"\"\"Call for socket tear down.\n\n For streamed protocols any break in the network connection will\n be reported here; for datagram protocols, only a teardown of the\n socket itself will result in this call.\n \"\"\"\n try:\n if self.handler_task:\n self.handler_task.cancel()\n if call_exc is None:\n self._log_exception()\n elif hasattr(self, \"client_address\"): # TCP connection\n txt = f\"Client Disconnection {self.client_address} due to {call_exc}\"\n _logger.debug(txt)\n\n self.running = False\n except Exception as exc: # pylint: disable=broad-except\n txt = (\n f\"Datastore unable to fulfill request: {exc}; {traceback.format_exc()}\"\n )\n _logger.error(txt)\n\n async def handle(self): # pylint: disable=too-complex\n \"\"\"Return Asyncio coroutine which represents a single conversation.\n\n between the modbus slave and master\n\n Once the client connection is established, the data chunks will be\n fed to this coroutine via the asyncio.Queue object which is fed by\n the ModbusBaseRequestHandler class's callback Future.\n\n This callback future gets data from either\n asyncio.DatagramProtocol.datagram_received or\n from asyncio.BaseProtocol.data_received.\n\n This function will execute without blocking in the while-loop and\n yield to the asyncio event loop when the frame is exhausted.\n As a result, multiple clients can be interleaved without any\n interference between them.\n\n For ModbusConnectedRequestHandler, each connection will be given an\n instance of the handle() coroutine and this instance will be put in the\n active_connections dict. Calling server_close will individually cancel\n each running handle() task.\n\n For ModbusDisconnectedRequestHandler, a single handle() coroutine will\n be started and maintained. Calling server_close will cancel that task.\n \"\"\"\n reset_frame = False\n while self.running:\n try:\n units = self.server.context.slaves()\n # this is an asyncio.Queue await, it will never fail\n data = await self._recv_()\n if isinstance(data, tuple):\n # addr is populated when talking over UDP\n data, *addr = data\n else:\n addr = (None,) # empty tuple\n\n if not isinstance(units, (list, tuple)):\n units = [units]\n # if broadcast is enabled make sure to\n # process requests to address 0\n if self.server.broadcast_enable: # pragma: no cover\n if 0 not in units:\n units.append(0)\n\n if _logger.isEnabledFor(logging.DEBUG):\n txt = f\"Handling data: {hexlify_packets(data)}\"\n _logger.debug(txt)\n\n single = self.server.context.single\n self.framer.processIncomingPacket(\n data=data,\n callback=lambda x: self.execute(x, *addr),\n unit=units,\n single=single,\n )\n\n except asyncio.CancelledError:\n # catch and ignore cancellation errors\n if self.running:\n self._log_exception()\n self.running = False\n except Exception as exc: # pylint: disable=broad-except\n # force TCP socket termination as processIncomingPacket\n # should handle application layer errors\n # for UDP sockets, simply reset the frame\n if isinstance(self, ModbusConnectedRequestHandler):\n client_addr = self.client_address[:2]\n txt = f'Unknown exception \"{exc}\" on stream {client_addr} forcing disconnect'\n _logger.error(txt)\n self.transport.close()\n else:\n txt = f\"Unknown error occurred {exc}\"\n _logger.error(exc)\n reset_frame = True # graceful recovery\n finally:\n if reset_frame:\n self.framer.resetFrame()\n reset_frame = False\n\n def execute(self, request, *addr):\n \"\"\"Call with the resulting message.\n\n :param request: The decoded request message\n :param addr: the address\n \"\"\"\n broadcast = False\n try:\n if self.server.broadcast_enable and not request.unit_id:\n broadcast = True\n # if broadcasting then execute on all slave contexts,\n # note response will be ignored\n for unit_id in self.server.context.slaves():\n response = request.execute(self.server.context[unit_id])\n else:\n context = self.server.context[request.unit_id]\n response = request.execute(context)\n except NoSuchSlaveException:\n txt = f\"requested slave does not exist: {request.unit_id}\"\n _logger.error(txt)\n if self.server.ignore_missing_slaves:\n return # the client will simply timeout waiting for a response\n response = request.doException(merror.GatewayNoResponse)\n except Exception as exc: # pylint: disable=broad-except\n txt = (\n f\"Datastore unable to fulfill request: {exc}; {traceback.format_exc()}\"\n )\n _logger.error(txt)\n response = request.doException(merror.SlaveFailure)\n # no response when broadcasting\n if not broadcast:\n response.transaction_id = request.transaction_id\n response.unit_id = request.unit_id\n skip_encoding = False\n if self.server.response_manipulator:\n response, skip_encoding = self.server.response_manipulator(response)\n self.send(response, *addr, skip_encoding=skip_encoding)\n\n def send(self, message, *addr, **kwargs):\n \"\"\"Send message.\"\"\"\n\n def __send(msg, *addr):\n if _logger.isEnabledFor(logging.DEBUG):\n txt = f\"send: [{message}]- {b2a_hex(msg)}\"\n _logger.debug(txt)\n if addr == (None,):\n self._send_(msg)\n else:\n self._send_(msg, *addr)\n\n if kwargs.get(\"skip_encoding\", False):\n __send(message, *addr)\n elif message.should_respond:\n # self.server.control.Counter.BusMessage += 1\n pdu = self.framer.buildPacket(message)\n __send(pdu, *addr)\n else:\n _logger.debug(\"Skipping sending response!!\")\n\n # ----------------------------------------------------------------------- #\n # Derived class implementations\n # ----------------------------------------------------------------------- #\n\n def _send_(self, data): # pragma: no cover\n \"\"\"Send a request (string) to the network.\n\n :param data: The unencoded modbus response\n :raises NotImplementedException:\n \"\"\"\n raise NotImplementedException(\"Method not implemented by derived class\")\n\n async def _recv_(self): # pragma: no cover\n \"\"\"Receive data from the network.\n\n :raises NotImplementedException:\n \"\"\"\n raise NotImplementedException(\"Method not implemented by derived class\")\n\n\nclass ModbusConnectedRequestHandler(ModbusBaseRequestHandler, asyncio.Protocol):\n \"\"\"Implements the modbus server protocol\n\n This uses asyncio.Protocol to implement\n the client handler for a connected protocol (TCP).\n \"\"\"\n\n def connection_made(self, transport):\n \"\"\"Call when a connection is made.\"\"\"\n super().connection_made(transport)\n\n self.client_address = ( # pylint: disable=attribute-defined-outside-init\n transport.get_extra_info(\"peername\")\n )\n self.server.active_connections[self.client_address] = self\n txt = f\"TCP client connection established [{self.client_address[:2]}]\"\n _logger.debug(txt)\n\n def connection_lost(self, call_exc):\n \"\"\"Call when the connection is lost or closed.\"\"\"\n super().connection_lost(call_exc)\n client_addr = self.client_address[:2]\n txt = f\"TCP client disconnected [{client_addr}]\"\n _logger.debug(txt)\n if self.client_address in self.server.active_connections:\n self.server.active_connections.pop(self.client_address)\n\n def data_received(self, data):\n \"\"\"Call when some data is received.\n\n data is a non-empty bytes object containing the incoming data.\n \"\"\"\n self.receive_queue.put_nowait(data)\n\n async def _recv_(self):\n try:\n result = await self.receive_queue.get()\n except RuntimeError:\n _logger.error(\"Event loop is closed\")\n result = None\n return result\n\n def _send_(self, data):\n \"\"\"Send tcp.\"\"\"\n self.transport.write(data)\n\n\nclass ModbusDisconnectedRequestHandler(\n ModbusBaseRequestHandler, asyncio.DatagramProtocol\n):\n \"\"\"Implements the modbus server protocol\n\n This uses the socketserver.BaseRequestHandler to implement\n the client handler for a disconnected protocol (UDP). The\n only difference is that we have to specify who to send the\n resulting packet data to.\n \"\"\"\n\n def __init__(self, owner):\n \"\"\"Initialize.\"\"\"\n super().__init__(owner)\n _future = asyncio.get_running_loop().create_future()\n self.server.on_connection_terminated = _future\n\n def connection_lost(self, call_exc):\n \"\"\"Handle connection lost.\"\"\"\n super().connection_lost(call_exc)\n self.server.on_connection_terminated.set_result(True)\n\n def datagram_received(self, data, addr):\n \"\"\"Call when a datagram is received.\n\n data is a bytes object containing the incoming data. addr\n is the address of the peer sending the data; the exact\n format depends on the transport.\n \"\"\"\n self.receive_queue.put_nowait((data, addr))\n\n def error_received(self, exc): # pragma: no cover\n \"\"\"Call when a previous send/receive raises an OSError.\n\n exc is the OSError instance.\n\n This method is called in rare conditions,\n when the transport (e.g. UDP) detects that a datagram could\n not be delivered to its recipient. In many conditions\n though, undeliverable datagrams will be silently dropped.\n \"\"\"\n txt = f\"datagram connection error [{exc}]\"\n _logger.error(txt)\n\n async def _recv_(self):\n return await self.receive_queue.get()\n\n def _send_(self, data, addr=None):\n self.transport.sendto(data, addr=addr)\n\n\nclass ModbusSingleRequestHandler(ModbusBaseRequestHandler, asyncio.Protocol):\n \"\"\"Implement the modbus server protocol.\n\n This uses asyncio.Protocol to implement\n the client handler for a serial connection.\n \"\"\"\n\n def connection_made(self, transport):\n \"\"\"Handle connect made.\"\"\"\n super().connection_made(transport)\n _logger.debug(\"Serial connection established\")\n\n def connection_lost(self, call_exc):\n \"\"\"Handle connection lost.\"\"\"\n super().connection_lost(call_exc)\n _logger.debug(\"Serial connection lost\")\n if hasattr(self.server, \"on_connection_lost\"):\n self.server.on_connection_lost()\n\n def data_received(self, data):\n \"\"\"Receive data.\"\"\"\n if (\n hasattr(self.server, \"handle_local_echo\")\n and self.server.handle_local_echo is True\n and self._sent\n ):\n if self._sent in data:\n data, self._sent = data.replace(self._sent, b\"\", 1), b\"\"\n elif self._sent.startswith(data):\n self._sent, data = self._sent.replace(data, b\"\", 1), b\"\"\n else:\n self._sent = b\"\"\n if not data:\n return\n self.receive_queue.put_nowait(data)\n\n async def _recv_(self):\n return await self.receive_queue.get()\n\n def _send_(self, data):\n if self.transport is not None:\n self.transport.write(data)\n if (\n hasattr(self.server, \"handle_local_echo\")\n and self.server.handle_local_echo is True\n ):\n self._sent = data\n\n\n# --------------------------------------------------------------------------- #\n# Server Implementations\n# --------------------------------------------------------------------------- #\n\n\nclass ModbusUnixServer:\n \"\"\"A modbus threaded Unix socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__(\n self,\n context,\n path,\n framer=None,\n identity=None,\n handler=None,\n **kwargs,\n ):\n \"\"\"Initialize the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own default structure.\n\n :param context: The ModbusServerContext datastore\n :param path: unix socket path\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param handler: A handler for each client session; default is\n ModbusConnectedRequestHandler. The handler class\n receives connection create/teardown events\n :param allow_reuse_address: Whether the server will allow the\n reuse of an address.\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for manipulating the\n response\n \"\"\"\n self.active_connections = {}\n self.loop = kwargs.get(\"loop\") or asyncio.get_event_loop()\n self.decoder = ServerDecoder()\n self.framer = framer or ModbusSocketFramer\n self.context = context or ModbusServerContext()\n self.control = ModbusControlBlock()\n self.path = path\n self.handler = handler or ModbusConnectedRequestHandler\n self.handler.server = self\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n # asyncio future that will be done once server has started\n self.serving = self.loop.create_future()\n # constructors cannot be declared async, so we have to\n # defer the initialization of the server\n self.server = None\n self.factory_parms = {}\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.server is None:\n try:\n self.server = await self.loop.create_unix_server(\n lambda: self.handler(self),\n self.path,\n )\n self.serving.set_result(True)\n await self.server.serve_forever()\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n else:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n _logger.info(\"Server graceful shutdown.\")\n\n async def shutdown(self):\n \"\"\"Shutdown server.\"\"\"\n await self.server_close()\n\n async def server_close(self):\n \"\"\"Close server.\"\"\"\n for k_item, v_item in self.active_connections.items():\n txt = f\"aborting active session {k_item}\"\n _logger.warning(txt)\n v_item.handler_task.cancel()\n self.active_connections = {}\n if self.server is not None:\n self.server.close()\n await self.server.wait_closed()\n self.server = None\n\n\nclass ModbusTcpServer:\n \"\"\"A modbus threaded tcp socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__(\n self,\n context,\n framer=None,\n identity=None,\n address=None,\n handler=None,\n allow_reuse_address=False,\n defer_start=False,\n backlog=20,\n **kwargs,\n ):\n \"\"\"Initialize the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param handler: A handler for each client session; default is\n ModbusConnectedRequestHandler. The handler class\n receives connection create/teardown events\n :param allow_reuse_address: Whether the server will allow the\n reuse of an address.\n :param backlog: is the maximum number of queued connections\n passed to listen(). Defaults to 20, increase if many\n connections are being made and broken to your Modbus slave\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for manipulating the\n response\n \"\"\"\n self.active_connections = {}\n self.loop = kwargs.get(\"loop\") or asyncio.get_event_loop()\n self.allow_reuse_address = allow_reuse_address\n self.decoder = ServerDecoder()\n self.framer = framer or ModbusSocketFramer\n self.context = context or ModbusServerContext()\n self.control = ModbusControlBlock()\n self.address = address or (\"\", Defaults.TcpPort)\n self.handler = handler or ModbusConnectedRequestHandler\n self.handler.server = self\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n # asyncio future that will be done once server has started\n self.serving = self.loop.create_future()\n # constructors cannot be declared async, so we have to\n # defer the initialization of the server\n self.server = None\n self.factory_parms = {\n \"reuse_address\": allow_reuse_address,\n \"backlog\": backlog,\n \"start_serving\": not defer_start,\n }\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.server is None:\n self.server = await self.loop.create_server(\n lambda: self.handler(self),\n *self.address,\n **self.factory_parms,\n )\n self.serving.set_result(True)\n try:\n await self.server.serve_forever()\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n else:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n _logger.info(\"Server graceful shutdown.\")\n\n async def shutdown(self):\n \"\"\"Shutdown server.\"\"\"\n await self.server_close()\n\n async def server_close(self):\n \"\"\"Close server.\"\"\"\n for k_item, v_item in self.active_connections.items():\n txt = f\"aborting active session {k_item}\"\n _logger.warning(txt)\n v_item.handler_task.cancel()\n self.active_connections = {}\n if self.server is not None:\n self.server.close()\n await self.server.wait_closed()\n self.server = None\n\n\nclass ModbusTlsServer(ModbusTcpServer):\n \"\"\"A modbus threaded tls socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__( # pylint: disable=too-many-arguments\n self,\n context,\n framer=None,\n identity=None,\n address=None,\n sslctx=None,\n certfile=None,\n keyfile=None,\n password=None,\n reqclicert=False,\n handler=None,\n allow_reuse_address=False,\n defer_start=False,\n backlog=20,\n **kwargs,\n ):\n \"\"\"Overloaded initializer for the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param sslctx: The SSLContext to use for TLS (default None and auto\n create)\n :param certfile: The cert file path for TLS (used if sslctx is None)\n :param keyfile: The key file path for TLS (used if sslctx is None)\n :param password: The password for for decrypting the private key file\n :param reqclicert: Force the sever request client's certificate\n :param handler: A handler for each client session; default is\n ModbusConnectedRequestHandler. The handler class\n receives connection create/teardown events\n :param allow_reuse_address: Whether the server will allow the\n reuse of an address.\n :param backlog: is the maximum number of queued connections\n passed to listen(). Defaults to 20, increase if many\n connections are being made and broken to your Modbus slave\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for\n manipulating the response\n \"\"\"\n super().__init__(\n context,\n framer=framer,\n identity=identity,\n address=address,\n handler=handler,\n allow_reuse_address=allow_reuse_address,\n defer_start=defer_start,\n backlog=backlog,\n **kwargs,\n )\n self.sslctx = sslctx_provider(sslctx, certfile, keyfile, password, reqclicert)\n self.factory_parms[\"ssl\"] = self.sslctx\n\n\nclass ModbusUdpServer:\n \"\"\"A modbus threaded udp socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__(\n self,\n context,\n framer=None,\n identity=None,\n address=None,\n handler=None,\n defer_start=False, # pylint: disable=unused-argument\n backlog=20, # pylint: disable=unused-argument\n **kwargs,\n ):\n \"\"\"Overloaded initializer for the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param handler: A handler for each client session; default is\n ModbusDisonnectedRequestHandler\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for\n manipulating the response\n \"\"\"\n self.loop = asyncio.get_running_loop()\n self.decoder = ServerDecoder()\n self.framer = framer or ModbusSocketFramer\n self.context = context or ModbusServerContext()\n self.control = ModbusControlBlock()\n self.address = address or (\"\", Defaults.TcpPort)\n self.handler = handler or ModbusDisconnectedRequestHandler\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n self.protocol = None\n self.endpoint = None\n self.on_connection_terminated = None\n # asyncio future that will be done once server has started\n self.serving = self.loop.create_future()\n self.factory_parms = {\n \"local_addr\": self.address,\n \"allow_broadcast\": True,\n }\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.protocol is None:\n try:\n self.protocol, self.endpoint = await self.loop.create_datagram_endpoint(\n lambda: self.handler(self),\n **self.factory_parms,\n )\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc:\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n raise RuntimeError(exc) from exc\n self.serving.set_result(True)\n else:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n\n async def shutdown(self):\n \"\"\"Shutdown server.\"\"\"\n await self.server_close()\n\n async def server_close(self):\n \"\"\"Close server.\"\"\"\n if self.endpoint:\n self.endpoint.running = False\n if self.endpoint is not None and self.endpoint.handler_task is not None:\n self.endpoint.handler_task.cancel()\n if self.protocol is not None:\n self.protocol.close()\n self.protocol = None\n\n\nclass ModbusSerialServer: # pylint: disable=too-many-instance-attributes\n \"\"\"A modbus threaded serial socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n handler = None\n\n def __init__(\n self, context, framer=ModbusRtuFramer, identity=None, **kwargs\n ): # pragma: no cover\n \"\"\"Initialize the socket server.\n\n If the identity structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use, default ModbusRtuFramer\n :param identity: An optional identify structure\n :param port: The serial port to attach to\n :param stopbits: The number of stop bits to use\n :param bytesize: The bytesize of the serial messages\n :param parity: Which kind of parity to use\n :param baudrate: The baud rate to use for the serial device\n :param timeout: The timeout to use for the serial device\n :param handle_local_echo: (optional) Discard local echo from dongle.\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param auto_reconnect: True to enable automatic reconnection,\n False otherwise\n :param reconnect_delay: reconnect delay in seconds\n :param response_manipulator: Callback method for\n manipulating the response\n \"\"\"\n self.loop = kwargs.get(\"loop\") or asyncio.get_event_loop()\n self.bytesize = kwargs.get(\"bytesize\", Defaults.Bytesize)\n self.parity = kwargs.get(\"parity\", Defaults.Parity)\n self.baudrate = kwargs.get(\"baudrate\", Defaults.Baudrate)\n self.timeout = kwargs.get(\"timeout\", Defaults.Timeout)\n self.device = kwargs.get(\"port\", 0)\n self.stopbits = kwargs.get(\"stopbits\", Defaults.Stopbits)\n self.handle_local_echo = kwargs.get(\n \"handle_local_echo\", Defaults.HandleLocalEcho\n )\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.auto_reconnect = kwargs.get(\"auto_reconnect\", False)\n self.reconnect_delay = kwargs.get(\"reconnect_delay\", 2)\n self.reconnecting_task = None\n self.handler = kwargs.get(\"handler\") or ModbusSingleRequestHandler\n self.framer = framer or ModbusRtuFramer\n self.decoder = ServerDecoder()\n self.context = context or ModbusServerContext()\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n self.control = ModbusControlBlock()\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n self.protocol = None\n self.transport = None\n self.server = None\n self.control = ModbusControlBlock()\n identity = kwargs.get(\"identity\")\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n async def start(self):\n \"\"\"Start connecting.\"\"\"\n await self._connect()\n\n async def _delayed_connect(self):\n \"\"\"Delay connect.\"\"\"\n await asyncio.sleep(self.reconnect_delay)\n await self._connect()\n\n async def _connect(self):\n \"\"\"Connect.\"\"\"\n if self.reconnecting_task is not None:\n self.reconnecting_task = None\n if self.device.startswith(\"socket:\"):\n return\n try:\n self.transport, self.protocol = await create_serial_connection(\n self.loop,\n lambda: self.handler(self),\n self.device,\n baudrate=self.baudrate,\n bytesize=self.bytesize,\n parity=self.parity,\n stopbits=self.stopbits,\n timeout=self.timeout,\n )\n except serial.serialutil.SerialException as exc:\n txt = f\"Failed to open serial port: {self.device}\"\n _logger.debug(txt)\n if not self.auto_reconnect:\n raise exc\n self._check_reconnect()\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Exception while create - {exc}\"\n _logger.debug(txt)\n\n def on_connection_lost(self):\n \"\"\"Call on lost connection.\"\"\"\n if self.transport is not None:\n self.transport.close()\n self.transport = None\n self.protocol = None\n if self.server is None:\n self._check_reconnect()\n\n async def shutdown(self):\n \"\"\"Terminate server.\"\"\"\n if self.transport is not None:\n self.transport.abort()\n if self.server is not None:\n self.server.close()\n await asyncio.wait_for(self.server.wait_closed(), 10)\n self.server = None\n self.transport = None\n self.protocol = None\n\n def _check_reconnect(self):\n \"\"\"Check reconnect.\"\"\"\n txt = f\"checking autoreconnect {self.auto_reconnect} {self.reconnecting_task}\"\n _logger.debug(txt)\n if self.auto_reconnect and (self.reconnecting_task is None):\n _logger.debug(\"Scheduling serial connection reconnect\")\n self.reconnecting_task = self.loop.create_task(self._delayed_connect())\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.server:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n if self.device.startswith(\"socket:\"):\n # Socket server means listen so start a socket server\n parts = self.device[9:].split(\":\")\n host_addr = (parts[0], int(parts[1]))\n self.server = await self.loop.create_server(\n lambda: self.handler(self),\n *host_addr,\n reuse_address=True,\n start_serving=True,\n backlog=20,\n )\n try:\n await self.server.serve_forever()\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n return\n\n while self.server or self.transport or self.protocol:\n await asyncio.sleep(10)\n\n\n# --------------------------------------------------------------------------- #\n# Creation Factories\n# --------------------------------------------------------------------------- #\n\n\nclass _serverList:\n \"\"\"Maintains a list of active servers.\n\n The list allows applications to have multiple servers and\n being able to do shutdown gracefully.\n \"\"\"\n\n _servers = []\n\n def __init__(self, server, custom_functions, register):\n \"\"\"Register new server.\"\"\"\n for func in custom_functions:\n server.decoder.register(func)\n self.server = server\n if register:\n self._servers.append(self)\n self.job_stop = asyncio.Event()\n self.job_is_stopped = asyncio.Event()\n self.task = None\n self.loop = asyncio.get_event_loop()\n\n @classmethod\n def get_server(cls):\n \"\"\"Get server at index.\"\"\"\n return cls._servers[-1] if cls._servers else None\n\n def _remove(self):\n \"\"\"Remove server from active list.\"\"\"\n server = self._servers[-1]\n self._servers.pop()\n del server\n\n async def _run(self):\n \"\"\"Help starting/stopping server.\"\"\"\n # self.task = asyncio.create_task(self.server.serve_forever())\n # await self.job_stop.wait()\n # await self.server.shutdown()\n # await asyncio.sleep(0.1)\n # self.task.cancel()\n # await asyncio.sleep(0.1)\n # try:\n # await asyncio.wait_for(self.task, 10)\n # except asyncio.CancelledError:\n # pass\n # self.job_is_stopped.set()\n\n async def run(self):\n \"\"\"Help starting/stopping server.\"\"\"\n try:\n # await self._run()\n await self.server.serve_forever()\n except asyncio.CancelledError:\n pass\n\n async def async_await_stop(self):\n \"\"\"Wait for server stop.\"\"\"\n await self.server.shutdown()\n # self.job_stop.set()\n # try:\n # await asyncio.wait_for(self.job_is_stopped.wait(), 60)\n # except asyncio.exceptions.CancelledError:\n # pass\n # self._remove()\n\n\nasync def StartAsyncUnixServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n path=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a tcp modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param path: An optional path to bind to.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n :return: an initialized but inactive server object coroutine\n \"\"\"\n server = ModbusUnixServer(\n context, path, kwargs.pop(\"framer\", ModbusSocketFramer), identity, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncTcpServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n address=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a tcp modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n :return: an initialized but inactive server object coroutine\n \"\"\"\n server = ModbusTcpServer(\n context, kwargs.pop(\"framer\", ModbusSocketFramer), identity, address, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncTlsServer( # pylint: disable=invalid-name,dangerous-default-value,too-many-arguments\n context=None,\n identity=None,\n address=None,\n sslctx=None,\n certfile=None,\n keyfile=None,\n password=None,\n reqclicert=False,\n allow_reuse_address=False,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a tls modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param sslctx: The SSLContext to use for TLS (default None and auto create)\n :param certfile: The cert file path for TLS (used if sslctx is None)\n :param keyfile: The key file path for TLS (used if sslctx is None)\n :param password: The password for for decrypting the private key file\n :param reqclicert: Force the sever request client's certificate\n :param allow_reuse_address: Whether the server will allow the reuse of an\n address.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n :return: an initialized but inactive server object coroutine\n \"\"\"\n server = ModbusTlsServer(\n context,\n kwargs.pop(\"framer\", ModbusTlsFramer),\n identity,\n address,\n sslctx,\n certfile,\n keyfile,\n password,\n reqclicert,\n allow_reuse_address=allow_reuse_address,\n **kwargs,\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncUdpServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n address=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a udp modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs:\n \"\"\"\n server = ModbusUdpServer(\n context, kwargs.pop(\"framer\", ModbusSocketFramer), identity, address, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncSerialServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n): # pragma: no cover\n \"\"\"Start and run a serial modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n \"\"\"\n server = ModbusSerialServer(\n context, kwargs.pop(\"framer\", ModbusAsciiFramer), identity=identity, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\ndef StartSerialServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncSerialServer(**kwargs))\n\n\ndef StartTcpServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncTcpServer(**kwargs))\n\n\ndef StartTlsServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncTlsServer(**kwargs))\n\n\ndef StartUdpServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncUdpServer(**kwargs))\n\n\nasync def ServerAsyncStop(): # pylint: disable=invalid-name\n \"\"\"Terminate server.\"\"\"\n if my_job := _serverList.get_server():\n await my_job.async_await_stop()\n await asyncio.sleep(0.1)\n else:\n raise RuntimeError(\"ServerAsyncStop called without server task active.\")\n\n\ndef ServerStop(): # pylint: disable=invalid-name\n \"\"\"Terminate server.\"\"\"\n if my_job := _serverList.get_server():\n if my_job.loop.is_running():\n asyncio.run_coroutine_threadsafe(my_job.async_await_stop(), my_job.loop)\n sleep(0.1)\n else:\n raise RuntimeError(\"ServerStop called without server task active.\")\n", "path": "pymodbus/server/async_io.py" } ]
[ { "content": "\"\"\"Implementation of a Threaded Modbus Server.\"\"\"\n# pylint: disable=missing-type-doc\nimport asyncio\nimport logging\nimport ssl\nimport traceback\nfrom binascii import b2a_hex\nfrom time import sleep\n\nfrom pymodbus.client.serial_asyncio import create_serial_connection\nfrom pymodbus.constants import Defaults\nfrom pymodbus.datastore import ModbusServerContext\nfrom pymodbus.device import ModbusControlBlock, ModbusDeviceIdentification\nfrom pymodbus.exceptions import NoSuchSlaveException, NotImplementedException\nfrom pymodbus.factory import ServerDecoder\nfrom pymodbus.pdu import ModbusExceptions as merror\nfrom pymodbus.transaction import (\n ModbusAsciiFramer,\n ModbusRtuFramer,\n ModbusSocketFramer,\n ModbusTlsFramer,\n)\nfrom pymodbus.utilities import hexlify_packets\n\n\ntry:\n import serial\nexcept ImportError:\n pass\n\n\n# --------------------------------------------------------------------------- #\n# Logging\n# --------------------------------------------------------------------------- #\n_logger = logging.getLogger(__name__)\n\n\ndef sslctx_provider(\n sslctx=None, certfile=None, keyfile=None, password=None, reqclicert=False\n):\n \"\"\"Provide the SSLContext for ModbusTlsServer.\n\n If the user defined SSLContext is not passed in, sslctx_provider will\n produce a default one.\n\n :param sslctx: The user defined SSLContext to use for TLS (default None and\n auto create)\n :param certfile: The cert file path for TLS (used if sslctx is None)\n :param keyfile: The key file path for TLS (used if sslctx is None)\n :param password: The password for for decrypting the private key file\n :param reqclicert: Force the sever request client's certificate\n \"\"\"\n if sslctx is None:\n # According to MODBUS/TCP Security Protocol Specification, it is\n # TLSv2 at least\n sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)\n sslctx.verify_mode = ssl.CERT_NONE\n sslctx.check_hostname = False\n sslctx.options |= ssl.OP_NO_TLSv1_1\n sslctx.options |= ssl.OP_NO_TLSv1\n sslctx.options |= ssl.OP_NO_SSLv3\n sslctx.options |= ssl.OP_NO_SSLv2\n sslctx.load_cert_chain(certfile=certfile, keyfile=keyfile, password=password)\n\n if reqclicert:\n sslctx.verify_mode = ssl.CERT_REQUIRED\n\n return sslctx\n\n\n# --------------------------------------------------------------------------- #\n# Protocol Handlers\n# --------------------------------------------------------------------------- #\n\n\nclass ModbusBaseRequestHandler(asyncio.BaseProtocol):\n \"\"\"Implements modbus slave wire protocol.\n\n This uses the asyncio.Protocol to implement the client handler.\n\n When a connection is established, the asyncio.Protocol.connection_made\n callback is called. This callback will setup the connection and\n create and schedule an asyncio.Task and assign it to running_task.\n\n running_task will be canceled upon connection_lost event.\n \"\"\"\n\n def __init__(self, owner):\n \"\"\"Initialize.\"\"\"\n self.server = owner\n self.running = False\n self.receive_queue = asyncio.Queue()\n self.handler_task = None # coroutine to be run on asyncio loop\n self._sent = b\"\" # for handle_local_echo\n\n def _log_exception(self):\n \"\"\"Show log exception.\"\"\"\n if isinstance(self, ModbusConnectedRequestHandler):\n txt = f\"Handler for stream [{self.client_address[:2]}] has been canceled\"\n _logger.debug(txt)\n elif isinstance(self, ModbusSingleRequestHandler):\n _logger.debug(\"Handler for serial port has been cancelled\")\n else:\n if hasattr(self, \"protocol\"):\n sock_name = (\n self.protocol._sock.getsockname() # pylint: disable=protected-access\n )\n else:\n sock_name = \"No socket\"\n txt = f\"Handler for UDP socket [{sock_name[1]}] has been canceled\"\n _logger.debug(txt)\n\n def connection_made(self, transport):\n \"\"\"Call for socket establish\n\n For streamed protocols (TCP) this will also correspond to an\n entire conversation; however for datagram protocols (UDP) this\n corresponds to the socket being opened\n \"\"\"\n try:\n if (\n hasattr(transport, \"get_extra_info\")\n and transport.get_extra_info(\"sockname\") is not None\n ):\n sockname = transport.get_extra_info(\"sockname\")[:2]\n txt = f\"Socket [{sockname}] opened\"\n _logger.debug(txt)\n elif hasattr(transport, \"serial\"):\n txt = f\"Serial connection opened on port: {transport.serial.port}\"\n _logger.debug(txt)\n else:\n txt = f\"Unable to get information about transport {transport}\"\n _logger.warning(txt)\n self.transport = transport # pylint: disable=attribute-defined-outside-init\n self.running = True\n self.framer = ( # pylint: disable=attribute-defined-outside-init\n self.server.framer(\n self.server.decoder,\n client=None,\n )\n )\n\n # schedule the connection handler on the event loop\n self.handler_task = asyncio.create_task(self.handle())\n except Exception as exc: # pragma: no cover pylint: disable=broad-except\n txt = (\n f\"Datastore unable to fulfill request: {exc}; {traceback.format_exc()}\"\n )\n _logger.error(txt)\n\n def connection_lost(self, call_exc):\n \"\"\"Call for socket tear down.\n\n For streamed protocols any break in the network connection will\n be reported here; for datagram protocols, only a teardown of the\n socket itself will result in this call.\n \"\"\"\n try:\n if self.handler_task:\n self.handler_task.cancel()\n if call_exc is None:\n self._log_exception()\n elif hasattr(self, \"client_address\"): # TCP connection\n txt = f\"Client Disconnection {self.client_address} due to {call_exc}\"\n _logger.debug(txt)\n\n self.running = False\n except Exception as exc: # pylint: disable=broad-except\n txt = (\n f\"Datastore unable to fulfill request: {exc}; {traceback.format_exc()}\"\n )\n _logger.error(txt)\n\n async def handle(self): # pylint: disable=too-complex\n \"\"\"Return Asyncio coroutine which represents a single conversation.\n\n between the modbus slave and master\n\n Once the client connection is established, the data chunks will be\n fed to this coroutine via the asyncio.Queue object which is fed by\n the ModbusBaseRequestHandler class's callback Future.\n\n This callback future gets data from either\n asyncio.DatagramProtocol.datagram_received or\n from asyncio.BaseProtocol.data_received.\n\n This function will execute without blocking in the while-loop and\n yield to the asyncio event loop when the frame is exhausted.\n As a result, multiple clients can be interleaved without any\n interference between them.\n\n For ModbusConnectedRequestHandler, each connection will be given an\n instance of the handle() coroutine and this instance will be put in the\n active_connections dict. Calling server_close will individually cancel\n each running handle() task.\n\n For ModbusDisconnectedRequestHandler, a single handle() coroutine will\n be started and maintained. Calling server_close will cancel that task.\n \"\"\"\n reset_frame = False\n while self.running:\n try:\n units = self.server.context.slaves()\n # this is an asyncio.Queue await, it will never fail\n data = await self._recv_()\n if isinstance(data, tuple):\n # addr is populated when talking over UDP\n data, *addr = data\n else:\n addr = (None,) # empty tuple\n\n if not isinstance(units, (list, tuple)):\n units = [units]\n # if broadcast is enabled make sure to\n # process requests to address 0\n if self.server.broadcast_enable: # pragma: no cover\n if 0 not in units:\n units.append(0)\n\n if _logger.isEnabledFor(logging.DEBUG):\n txt = f\"Handling data: {hexlify_packets(data)}\"\n _logger.debug(txt)\n\n single = self.server.context.single\n self.framer.processIncomingPacket(\n data=data,\n callback=lambda x: self.execute(x, *addr),\n unit=units,\n single=single,\n )\n\n except asyncio.CancelledError:\n # catch and ignore cancellation errors\n if self.running:\n self._log_exception()\n self.running = False\n except Exception as exc: # pylint: disable=broad-except\n # force TCP socket termination as processIncomingPacket\n # should handle application layer errors\n # for UDP sockets, simply reset the frame\n if isinstance(self, ModbusConnectedRequestHandler):\n client_addr = self.client_address[:2]\n txt = f'Unknown exception \"{exc}\" on stream {client_addr} forcing disconnect'\n _logger.error(txt)\n self.transport.close()\n else:\n txt = f\"Unknown error occurred {exc}\"\n _logger.error(exc)\n reset_frame = True # graceful recovery\n finally:\n if reset_frame:\n self.framer.resetFrame()\n reset_frame = False\n\n def execute(self, request, *addr):\n \"\"\"Call with the resulting message.\n\n :param request: The decoded request message\n :param addr: the address\n \"\"\"\n broadcast = False\n try:\n if self.server.broadcast_enable and not request.unit_id:\n broadcast = True\n # if broadcasting then execute on all slave contexts,\n # note response will be ignored\n for unit_id in self.server.context.slaves():\n response = request.execute(self.server.context[unit_id])\n else:\n context = self.server.context[request.unit_id]\n response = request.execute(context)\n except NoSuchSlaveException:\n txt = f\"requested slave does not exist: {request.unit_id}\"\n _logger.error(txt)\n if self.server.ignore_missing_slaves:\n return # the client will simply timeout waiting for a response\n response = request.doException(merror.GatewayNoResponse)\n except Exception as exc: # pylint: disable=broad-except\n txt = (\n f\"Datastore unable to fulfill request: {exc}; {traceback.format_exc()}\"\n )\n _logger.error(txt)\n response = request.doException(merror.SlaveFailure)\n # no response when broadcasting\n if not broadcast:\n response.transaction_id = request.transaction_id\n response.unit_id = request.unit_id\n skip_encoding = False\n if self.server.response_manipulator:\n response, skip_encoding = self.server.response_manipulator(response)\n self.send(response, *addr, skip_encoding=skip_encoding)\n\n def send(self, message, *addr, **kwargs):\n \"\"\"Send message.\"\"\"\n\n def __send(msg, *addr):\n if _logger.isEnabledFor(logging.DEBUG):\n txt = f\"send: [{message}]- {b2a_hex(msg)}\"\n _logger.debug(txt)\n if addr == (None,):\n self._send_(msg)\n else:\n self._send_(msg, *addr)\n\n if kwargs.get(\"skip_encoding\", False):\n __send(message, *addr)\n elif message.should_respond:\n # self.server.control.Counter.BusMessage += 1\n pdu = self.framer.buildPacket(message)\n __send(pdu, *addr)\n else:\n _logger.debug(\"Skipping sending response!!\")\n\n # ----------------------------------------------------------------------- #\n # Derived class implementations\n # ----------------------------------------------------------------------- #\n\n def _send_(self, data): # pragma: no cover\n \"\"\"Send a request (string) to the network.\n\n :param data: The unencoded modbus response\n :raises NotImplementedException:\n \"\"\"\n raise NotImplementedException(\"Method not implemented by derived class\")\n\n async def _recv_(self): # pragma: no cover\n \"\"\"Receive data from the network.\n\n :raises NotImplementedException:\n \"\"\"\n raise NotImplementedException(\"Method not implemented by derived class\")\n\n\nclass ModbusConnectedRequestHandler(ModbusBaseRequestHandler, asyncio.Protocol):\n \"\"\"Implements the modbus server protocol\n\n This uses asyncio.Protocol to implement\n the client handler for a connected protocol (TCP).\n \"\"\"\n\n def connection_made(self, transport):\n \"\"\"Call when a connection is made.\"\"\"\n super().connection_made(transport)\n\n self.client_address = ( # pylint: disable=attribute-defined-outside-init\n transport.get_extra_info(\"peername\")\n )\n self.server.active_connections[self.client_address] = self\n txt = f\"TCP client connection established [{self.client_address[:2]}]\"\n _logger.debug(txt)\n\n def connection_lost(self, call_exc):\n \"\"\"Call when the connection is lost or closed.\"\"\"\n super().connection_lost(call_exc)\n client_addr = self.client_address[:2]\n txt = f\"TCP client disconnected [{client_addr}]\"\n _logger.debug(txt)\n if self.client_address in self.server.active_connections:\n self.server.active_connections.pop(self.client_address)\n\n def data_received(self, data):\n \"\"\"Call when some data is received.\n\n data is a non-empty bytes object containing the incoming data.\n \"\"\"\n self.receive_queue.put_nowait(data)\n\n async def _recv_(self):\n try:\n result = await self.receive_queue.get()\n except RuntimeError:\n _logger.error(\"Event loop is closed\")\n result = None\n return result\n\n def _send_(self, data):\n \"\"\"Send tcp.\"\"\"\n self.transport.write(data)\n\n\nclass ModbusDisconnectedRequestHandler(\n ModbusBaseRequestHandler, asyncio.DatagramProtocol\n):\n \"\"\"Implements the modbus server protocol\n\n This uses the socketserver.BaseRequestHandler to implement\n the client handler for a disconnected protocol (UDP). The\n only difference is that we have to specify who to send the\n resulting packet data to.\n \"\"\"\n\n def __init__(self, owner):\n \"\"\"Initialize.\"\"\"\n super().__init__(owner)\n _future = asyncio.get_running_loop().create_future()\n self.server.on_connection_terminated = _future\n\n def connection_lost(self, call_exc):\n \"\"\"Handle connection lost.\"\"\"\n super().connection_lost(call_exc)\n self.server.on_connection_terminated.set_result(True)\n\n def datagram_received(self, data, addr):\n \"\"\"Call when a datagram is received.\n\n data is a bytes object containing the incoming data. addr\n is the address of the peer sending the data; the exact\n format depends on the transport.\n \"\"\"\n self.receive_queue.put_nowait((data, addr))\n\n def error_received(self, exc): # pragma: no cover\n \"\"\"Call when a previous send/receive raises an OSError.\n\n exc is the OSError instance.\n\n This method is called in rare conditions,\n when the transport (e.g. UDP) detects that a datagram could\n not be delivered to its recipient. In many conditions\n though, undeliverable datagrams will be silently dropped.\n \"\"\"\n txt = f\"datagram connection error [{exc}]\"\n _logger.error(txt)\n\n async def _recv_(self):\n return await self.receive_queue.get()\n\n def _send_(self, data, addr=None):\n self.transport.sendto(data, addr=addr)\n\n\nclass ModbusSingleRequestHandler(ModbusBaseRequestHandler, asyncio.Protocol):\n \"\"\"Implement the modbus server protocol.\n\n This uses asyncio.Protocol to implement\n the client handler for a serial connection.\n \"\"\"\n\n def connection_made(self, transport):\n \"\"\"Handle connect made.\"\"\"\n super().connection_made(transport)\n _logger.debug(\"Serial connection established\")\n\n def connection_lost(self, call_exc):\n \"\"\"Handle connection lost.\"\"\"\n super().connection_lost(call_exc)\n _logger.debug(\"Serial connection lost\")\n if hasattr(self.server, \"on_connection_lost\"):\n self.server.on_connection_lost()\n\n def data_received(self, data):\n \"\"\"Receive data.\"\"\"\n if (\n hasattr(self.server, \"handle_local_echo\")\n and self.server.handle_local_echo is True\n and self._sent\n ):\n if self._sent in data:\n data, self._sent = data.replace(self._sent, b\"\", 1), b\"\"\n elif self._sent.startswith(data):\n self._sent, data = self._sent.replace(data, b\"\", 1), b\"\"\n else:\n self._sent = b\"\"\n if not data:\n return\n self.receive_queue.put_nowait(data)\n\n async def _recv_(self):\n return await self.receive_queue.get()\n\n def _send_(self, data):\n if self.transport is not None:\n self.transport.write(data)\n if (\n hasattr(self.server, \"handle_local_echo\")\n and self.server.handle_local_echo is True\n ):\n self._sent = data\n\n\n# --------------------------------------------------------------------------- #\n# Server Implementations\n# --------------------------------------------------------------------------- #\n\n\nclass ModbusUnixServer:\n \"\"\"A modbus threaded Unix socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__(\n self,\n context,\n path,\n framer=None,\n identity=None,\n handler=None,\n **kwargs,\n ):\n \"\"\"Initialize the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own default structure.\n\n :param context: The ModbusServerContext datastore\n :param path: unix socket path\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param handler: A handler for each client session; default is\n ModbusConnectedRequestHandler. The handler class\n receives connection create/teardown events\n :param allow_reuse_address: Whether the server will allow the\n reuse of an address.\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for manipulating the\n response\n \"\"\"\n self.active_connections = {}\n self.loop = kwargs.get(\"loop\") or asyncio.get_event_loop()\n self.decoder = ServerDecoder()\n self.framer = framer or ModbusSocketFramer\n self.context = context or ModbusServerContext()\n self.control = ModbusControlBlock()\n self.path = path\n self.handler = handler or ModbusConnectedRequestHandler\n self.handler.server = self\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n # asyncio future that will be done once server has started\n self.serving = self.loop.create_future()\n # constructors cannot be declared async, so we have to\n # defer the initialization of the server\n self.server = None\n self.factory_parms = {}\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.server is None:\n try:\n self.server = await self.loop.create_unix_server(\n lambda: self.handler(self),\n self.path,\n )\n self.serving.set_result(True)\n await self.server.serve_forever()\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n else:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n _logger.info(\"Server graceful shutdown.\")\n\n async def shutdown(self):\n \"\"\"Shutdown server.\"\"\"\n await self.server_close()\n\n async def server_close(self):\n \"\"\"Close server.\"\"\"\n for k_item, v_item in self.active_connections.items():\n txt = f\"aborting active session {k_item}\"\n _logger.warning(txt)\n v_item.handler_task.cancel()\n self.active_connections = {}\n if self.server is not None:\n self.server.close()\n await self.server.wait_closed()\n self.server = None\n\n\nclass ModbusTcpServer:\n \"\"\"A modbus threaded tcp socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__(\n self,\n context,\n framer=None,\n identity=None,\n address=None,\n handler=None,\n allow_reuse_address=False,\n defer_start=False,\n backlog=20,\n **kwargs,\n ):\n \"\"\"Initialize the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param handler: A handler for each client session; default is\n ModbusConnectedRequestHandler. The handler class\n receives connection create/teardown events\n :param allow_reuse_address: Whether the server will allow the\n reuse of an address.\n :param backlog: is the maximum number of queued connections\n passed to listen(). Defaults to 20, increase if many\n connections are being made and broken to your Modbus slave\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for manipulating the\n response\n \"\"\"\n self.active_connections = {}\n self.loop = kwargs.get(\"loop\") or asyncio.get_event_loop()\n self.allow_reuse_address = allow_reuse_address\n self.decoder = ServerDecoder()\n self.framer = framer or ModbusSocketFramer\n self.context = context or ModbusServerContext()\n self.control = ModbusControlBlock()\n self.address = address or (\"\", Defaults.TcpPort)\n self.handler = handler or ModbusConnectedRequestHandler\n self.handler.server = self\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n # asyncio future that will be done once server has started\n self.serving = self.loop.create_future()\n # constructors cannot be declared async, so we have to\n # defer the initialization of the server\n self.server = None\n self.factory_parms = {\n \"reuse_address\": allow_reuse_address,\n \"backlog\": backlog,\n \"start_serving\": not defer_start,\n }\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.server is None:\n self.server = await self.loop.create_server(\n lambda: self.handler(self),\n *self.address,\n **self.factory_parms,\n )\n self.serving.set_result(True)\n try:\n await self.server.serve_forever()\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n else:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n _logger.info(\"Server graceful shutdown.\")\n\n async def shutdown(self):\n \"\"\"Shutdown server.\"\"\"\n await self.server_close()\n\n async def server_close(self):\n \"\"\"Close server.\"\"\"\n for k_item, v_item in self.active_connections.items():\n txt = f\"aborting active session {k_item}\"\n _logger.warning(txt)\n v_item.handler_task.cancel()\n self.active_connections = {}\n if self.server is not None:\n self.server.close()\n await self.server.wait_closed()\n self.server = None\n\n\nclass ModbusTlsServer(ModbusTcpServer):\n \"\"\"A modbus threaded tls socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__( # pylint: disable=too-many-arguments\n self,\n context,\n framer=None,\n identity=None,\n address=None,\n sslctx=None,\n certfile=None,\n keyfile=None,\n password=None,\n reqclicert=False,\n handler=None,\n allow_reuse_address=False,\n defer_start=False,\n backlog=20,\n **kwargs,\n ):\n \"\"\"Overloaded initializer for the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param sslctx: The SSLContext to use for TLS (default None and auto\n create)\n :param certfile: The cert file path for TLS (used if sslctx is None)\n :param keyfile: The key file path for TLS (used if sslctx is None)\n :param password: The password for for decrypting the private key file\n :param reqclicert: Force the sever request client's certificate\n :param handler: A handler for each client session; default is\n ModbusConnectedRequestHandler. The handler class\n receives connection create/teardown events\n :param allow_reuse_address: Whether the server will allow the\n reuse of an address.\n :param backlog: is the maximum number of queued connections\n passed to listen(). Defaults to 20, increase if many\n connections are being made and broken to your Modbus slave\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for\n manipulating the response\n \"\"\"\n super().__init__(\n context,\n framer=framer,\n identity=identity,\n address=address,\n handler=handler,\n allow_reuse_address=allow_reuse_address,\n defer_start=defer_start,\n backlog=backlog,\n **kwargs,\n )\n self.sslctx = sslctx_provider(sslctx, certfile, keyfile, password, reqclicert)\n self.factory_parms[\"ssl\"] = self.sslctx\n\n\nclass ModbusUdpServer:\n \"\"\"A modbus threaded udp socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n def __init__(\n self,\n context,\n framer=None,\n identity=None,\n address=None,\n handler=None,\n defer_start=False, # pylint: disable=unused-argument\n backlog=20, # pylint: disable=unused-argument\n **kwargs,\n ):\n \"\"\"Overloaded initializer for the socket server.\n\n If the identify structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param handler: A handler for each client session; default is\n ModbusDisonnectedRequestHandler\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param response_manipulator: Callback method for\n manipulating the response\n \"\"\"\n self.loop = asyncio.get_running_loop()\n self.decoder = ServerDecoder()\n self.framer = framer or ModbusSocketFramer\n self.context = context or ModbusServerContext()\n self.control = ModbusControlBlock()\n self.address = address or (\"\", Defaults.TcpPort)\n self.handler = handler or ModbusDisconnectedRequestHandler\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n self.protocol = None\n self.endpoint = None\n self.on_connection_terminated = None\n # asyncio future that will be done once server has started\n self.serving = self.loop.create_future()\n self.factory_parms = {\n \"local_addr\": self.address,\n \"allow_broadcast\": True,\n }\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.protocol is None:\n try:\n self.protocol, self.endpoint = await self.loop.create_datagram_endpoint(\n lambda: self.handler(self),\n **self.factory_parms,\n )\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc:\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n raise RuntimeError(exc) from exc\n self.serving.set_result(True)\n else:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n\n async def shutdown(self):\n \"\"\"Shutdown server.\"\"\"\n await self.server_close()\n\n async def server_close(self):\n \"\"\"Close server.\"\"\"\n if self.endpoint:\n self.endpoint.running = False\n if self.endpoint is not None and self.endpoint.handler_task is not None:\n self.endpoint.handler_task.cancel()\n if self.protocol is not None:\n self.protocol.close()\n self.protocol = None\n\n\nclass ModbusSerialServer: # pylint: disable=too-many-instance-attributes\n \"\"\"A modbus threaded serial socket server.\n\n We inherit and overload the socket server so that we\n can control the client threads as well as have a single\n server context instance.\n \"\"\"\n\n handler = None\n\n def __init__(\n self, context, framer=ModbusRtuFramer, identity=None, **kwargs\n ): # pragma: no cover\n \"\"\"Initialize the socket server.\n\n If the identity structure is not passed in, the ModbusControlBlock\n uses its own empty structure.\n :param context: The ModbusServerContext datastore\n :param framer: The framer strategy to use, default ModbusRtuFramer\n :param identity: An optional identify structure\n :param port: The serial port to attach to\n :param stopbits: The number of stop bits to use\n :param bytesize: The bytesize of the serial messages\n :param parity: Which kind of parity to use\n :param baudrate: The baud rate to use for the serial device\n :param timeout: The timeout to use for the serial device\n :param handle_local_echo: (optional) Discard local echo from dongle.\n :param ignore_missing_slaves: True to not send errors on a request\n to a missing slave\n :param broadcast_enable: True to treat unit_id 0 as broadcast address,\n False to treat 0 as any other unit_id\n :param auto_reconnect: True to enable automatic reconnection,\n False otherwise\n :param reconnect_delay: reconnect delay in seconds\n :param response_manipulator: Callback method for\n manipulating the response\n \"\"\"\n self.loop = kwargs.get(\"loop\") or asyncio.get_event_loop()\n self.bytesize = kwargs.get(\"bytesize\", Defaults.Bytesize)\n self.parity = kwargs.get(\"parity\", Defaults.Parity)\n self.baudrate = kwargs.get(\"baudrate\", Defaults.Baudrate)\n self.timeout = kwargs.get(\"timeout\", Defaults.Timeout)\n self.device = kwargs.get(\"port\", 0)\n self.stopbits = kwargs.get(\"stopbits\", Defaults.Stopbits)\n self.handle_local_echo = kwargs.get(\n \"handle_local_echo\", Defaults.HandleLocalEcho\n )\n self.ignore_missing_slaves = kwargs.get(\n \"ignore_missing_slaves\", Defaults.IgnoreMissingSlaves\n )\n self.broadcast_enable = kwargs.get(\"broadcast_enable\", Defaults.BroadcastEnable)\n self.auto_reconnect = kwargs.get(\"auto_reconnect\", False)\n self.reconnect_delay = kwargs.get(\"reconnect_delay\", 2)\n self.reconnecting_task = None\n self.handler = kwargs.get(\"handler\") or ModbusSingleRequestHandler\n self.framer = framer or ModbusRtuFramer\n self.decoder = ServerDecoder()\n self.context = context or ModbusServerContext()\n self.response_manipulator = kwargs.get(\"response_manipulator\", None)\n self.control = ModbusControlBlock()\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n self.protocol = None\n self.transport = None\n self.server = None\n self.control = ModbusControlBlock()\n identity = kwargs.get(\"identity\")\n if isinstance(identity, ModbusDeviceIdentification):\n self.control.Identity.update(identity)\n\n async def start(self):\n \"\"\"Start connecting.\"\"\"\n await self._connect()\n\n async def _delayed_connect(self):\n \"\"\"Delay connect.\"\"\"\n await asyncio.sleep(self.reconnect_delay)\n await self._connect()\n\n async def _connect(self):\n \"\"\"Connect.\"\"\"\n if self.reconnecting_task is not None:\n self.reconnecting_task = None\n if self.device.startswith(\"socket:\"):\n return\n try:\n self.transport, self.protocol = await create_serial_connection(\n self.loop,\n lambda: self.handler(self),\n self.device,\n baudrate=self.baudrate,\n bytesize=self.bytesize,\n parity=self.parity,\n stopbits=self.stopbits,\n timeout=self.timeout,\n )\n except serial.serialutil.SerialException as exc:\n txt = f\"Failed to open serial port: {self.device}\"\n _logger.debug(txt)\n if not self.auto_reconnect:\n raise exc\n self._check_reconnect()\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Exception while create - {exc}\"\n _logger.debug(txt)\n\n def on_connection_lost(self):\n \"\"\"Call on lost connection.\"\"\"\n if self.transport is not None:\n self.transport.close()\n self.transport = None\n self.protocol = None\n if self.server is None:\n self._check_reconnect()\n\n async def shutdown(self):\n \"\"\"Terminate server.\"\"\"\n if self.transport is not None:\n self.transport.abort()\n if self.server is not None:\n self.server.close()\n await asyncio.wait_for(self.server.wait_closed(), 10)\n self.server = None\n self.transport = None\n self.protocol = None\n\n def _check_reconnect(self):\n \"\"\"Check reconnect.\"\"\"\n txt = f\"checking autoreconnect {self.auto_reconnect} {self.reconnecting_task}\"\n _logger.debug(txt)\n if self.auto_reconnect and (self.reconnecting_task is None):\n _logger.debug(\"Scheduling serial connection reconnect\")\n self.reconnecting_task = self.loop.create_task(self._delayed_connect())\n\n async def serve_forever(self):\n \"\"\"Start endless loop.\"\"\"\n if self.server:\n raise RuntimeError(\n \"Can't call serve_forever on an already running server object\"\n )\n if self.device.startswith(\"socket:\"):\n # Socket server means listen so start a socket server\n parts = self.device[9:].split(\":\")\n host_addr = (parts[0], int(parts[1]))\n self.server = await self.loop.create_server(\n lambda: self.handler(self),\n *host_addr,\n reuse_address=True,\n start_serving=True,\n backlog=20,\n )\n try:\n await self.server.serve_forever()\n except asyncio.exceptions.CancelledError:\n raise\n except Exception as exc: # pylint: disable=broad-except\n txt = f\"Server unexpected exception {exc}\"\n _logger.error(txt)\n return\n\n while self.server or self.transport or self.protocol:\n await asyncio.sleep(10)\n\n\n# --------------------------------------------------------------------------- #\n# Creation Factories\n# --------------------------------------------------------------------------- #\n\n\nclass _serverList:\n \"\"\"Maintains a list of active servers.\n\n The list allows applications to have multiple servers and\n being able to do shutdown gracefully.\n \"\"\"\n\n _servers = []\n\n def __init__(self, server, custom_functions, register):\n \"\"\"Register new server.\"\"\"\n for func in custom_functions:\n server.decoder.register(func)\n self.server = server\n if register:\n self._servers.append(self)\n self.job_stop = asyncio.Event()\n self.job_is_stopped = asyncio.Event()\n self.task = None\n self.loop = asyncio.get_event_loop()\n\n @classmethod\n def get_server(cls):\n \"\"\"Get server at index.\"\"\"\n return cls._servers[-1] if cls._servers else None\n\n def _remove(self):\n \"\"\"Remove server from active list.\"\"\"\n server = self._servers[-1]\n self._servers.pop()\n del server\n\n async def _run(self):\n \"\"\"Help starting/stopping server.\"\"\"\n # self.task = asyncio.create_task(self.server.serve_forever())\n # await self.job_stop.wait()\n # await self.server.shutdown()\n # await asyncio.sleep(0.1)\n # self.task.cancel()\n # await asyncio.sleep(0.1)\n # try:\n # await asyncio.wait_for(self.task, 10)\n # except asyncio.CancelledError:\n # pass\n # self.job_is_stopped.set()\n\n async def run(self):\n \"\"\"Help starting/stopping server.\"\"\"\n try:\n # await self._run()\n await self.server.serve_forever()\n except asyncio.CancelledError:\n pass\n\n async def async_await_stop(self):\n \"\"\"Wait for server stop.\"\"\"\n await self.server.shutdown()\n # self.job_stop.set()\n # try:\n # await asyncio.wait_for(self.job_is_stopped.wait(), 60)\n # except asyncio.exceptions.CancelledError:\n # pass\n # self._remove()\n\n\nasync def StartAsyncUnixServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n path=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a tcp modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param path: An optional path to bind to.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n :return: an initialized but inactive server object coroutine\n \"\"\"\n server = ModbusUnixServer(\n context, path, kwargs.pop(\"framer\", ModbusSocketFramer), identity, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncTcpServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n address=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a tcp modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n :return: an initialized but inactive server object coroutine\n \"\"\"\n server = ModbusTcpServer(\n context, kwargs.pop(\"framer\", ModbusSocketFramer), identity, address, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncTlsServer( # pylint: disable=invalid-name,dangerous-default-value,too-many-arguments\n context=None,\n identity=None,\n address=None,\n sslctx=None,\n certfile=None,\n keyfile=None,\n password=None,\n reqclicert=False,\n allow_reuse_address=False,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a tls modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param sslctx: The SSLContext to use for TLS (default None and auto create)\n :param certfile: The cert file path for TLS (used if sslctx is None)\n :param keyfile: The key file path for TLS (used if sslctx is None)\n :param password: The password for for decrypting the private key file\n :param reqclicert: Force the sever request client's certificate\n :param allow_reuse_address: Whether the server will allow the reuse of an\n address.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n :return: an initialized but inactive server object coroutine\n \"\"\"\n server = ModbusTlsServer(\n context,\n kwargs.pop(\"framer\", ModbusTlsFramer),\n identity,\n address,\n sslctx,\n certfile,\n keyfile,\n password,\n reqclicert,\n allow_reuse_address=allow_reuse_address,\n **kwargs,\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncUdpServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n address=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n):\n \"\"\"Start and run a udp modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param address: An optional (interface, port) to bind to.\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs:\n \"\"\"\n server = ModbusUdpServer(\n context, kwargs.pop(\"framer\", ModbusSocketFramer), identity, address, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await job.run()\n return server\n\n\nasync def StartAsyncSerialServer( # pylint: disable=invalid-name,dangerous-default-value\n context=None,\n identity=None,\n custom_functions=[],\n defer_start=False,\n **kwargs,\n): # pragma: no cover\n \"\"\"Start and run a serial modbus server.\n\n :param context: The ModbusServerContext datastore\n :param identity: An optional identify structure\n :param custom_functions: An optional list of custom function classes\n supported by server instance.\n :param defer_start: if set, the server object will be returned ready to start.\n Otherwise, the server will be immediately spun\n up without the ability to shut it off\n :param kwargs: The rest\n \"\"\"\n server = ModbusSerialServer(\n context, kwargs.pop(\"framer\", ModbusAsciiFramer), identity=identity, **kwargs\n )\n if not defer_start:\n job = _serverList(server, custom_functions, not defer_start)\n await server.start()\n await job.run()\n return server\n\n\ndef StartSerialServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncSerialServer(**kwargs))\n\n\ndef StartTcpServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncTcpServer(**kwargs))\n\n\ndef StartTlsServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncTlsServer(**kwargs))\n\n\ndef StartUdpServer(**kwargs): # pylint: disable=invalid-name\n \"\"\"Start and run a serial modbus server.\"\"\"\n return asyncio.run(StartAsyncUdpServer(**kwargs))\n\n\nasync def ServerAsyncStop(): # pylint: disable=invalid-name\n \"\"\"Terminate server.\"\"\"\n if my_job := _serverList.get_server():\n await my_job.async_await_stop()\n await asyncio.sleep(0.1)\n else:\n raise RuntimeError(\"ServerAsyncStop called without server task active.\")\n\n\ndef ServerStop(): # pylint: disable=invalid-name\n \"\"\"Terminate server.\"\"\"\n if my_job := _serverList.get_server():\n if my_job.loop.is_running():\n asyncio.run_coroutine_threadsafe(my_job.async_await_stop(), my_job.loop)\n sleep(0.1)\n else:\n raise RuntimeError(\"ServerStop called without server task active.\")\n", "path": "pymodbus/server/async_io.py" } ]
diff --git a/pymodbus/server/async_io.py b/pymodbus/server/async_io.py index 66593afee..6151a528b 100644 --- a/pymodbus/server/async_io.py +++ b/pymodbus/server/async_io.py @@ -1263,6 +1263,7 @@ async def StartAsyncSerialServer( # pylint: disable=invalid-name,dangerous-defa ) if not defer_start: job = _serverList(server, custom_functions, not defer_start) + await server.start() await job.run() return server
mindee__doctr-243
Pb: unitest text_export_size not passing on tf 2.3.1 Unitest text_export_size not OK locally on tf 2.3.1 : ``` def test_export_sizes(test_convert_to_tflite, test_convert_to_fp16, test_quantize_model): assert sys.getsizeof(test_convert_to_tflite) > sys.getsizeof(test_convert_to_fp16) > assert sys.getsizeof(test_convert_to_fp16) > sys.getsizeof(test_quantize_model) E AssertionError: assert 3041 > 3041 ```
[ { "content": "# Copyright (C) 2021, Mindee.\n\n# This program is licensed under the Apache License version 2.\n# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.\n\n\"\"\"\nPackage installation setup\n\"\"\"\n\nimport os\nfrom pathlib import Path\nimport subprocess\n\nfrom setuptools import find_packages, setup\n\n\nversion = \"0.1.2a0\"\nsha = 'Unknown'\npackage_name = 'doctr'\n\ncwd = Path(__file__).parent.absolute()\n\nif os.getenv('BUILD_VERSION'):\n version = os.getenv('BUILD_VERSION')\nelif sha != 'Unknown':\n try:\n sha = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd).decode('ascii').strip()\n except Exception:\n pass\n version += '+' + sha[:7]\nprint(f\"Building wheel {package_name}-{version}\")\n\nwith open(cwd.joinpath(package_name, 'version.py'), 'w') as f:\n f.write(f\"__version__ = '{version}'\\n\")\n\nwith open('README.md', 'r') as f:\n readme = f.read()\n\nrequirements = [\n \"numpy>=1.16.0\",\n \"scipy>=1.4.0\",\n \"opencv-python>=4.2\",\n \"tensorflow>=2.3.0\",\n \"PyMuPDF>=1.16.0,<1.18.11\",\n \"pyclipper>=1.2.0\",\n \"shapely>=1.6.0\",\n \"matplotlib>=3.1.0\",\n \"mplcursors>=0.3\",\n \"rapidfuzz>=1.0.0\",\n \"weasyprint>=52.2\",\n]\n\nsetup(\n # Metadata\n name=os.getenv('PKG_INDEX') if os.getenv('PKG_INDEX') else package_name,\n version=version,\n author='François-Guillaume Fernandez, Charles Gaillard',\n author_email='[email protected]',\n description='Extract valuable text information from your documents',\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n url='https://github.com/mindee/doctr',\n download_url='https://github.com/mindee/doctr/tags',\n license='Apache',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n keywords=['ocr', 'deep learning', 'tensorflow', 'text detection', 'text recognition'],\n\n # Package info\n packages=find_packages(exclude=('test',)),\n zip_safe=True,\n python_requires='>=3.6.0',\n include_package_data=True,\n install_requires=requirements,\n package_data={'': ['LICENSE']}\n)\n", "path": "setup.py" } ]
[ { "content": "# Copyright (C) 2021, Mindee.\n\n# This program is licensed under the Apache License version 2.\n# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.\n\n\"\"\"\nPackage installation setup\n\"\"\"\n\nimport os\nfrom pathlib import Path\nimport subprocess\n\nfrom setuptools import find_packages, setup\n\n\nversion = \"0.1.2a0\"\nsha = 'Unknown'\npackage_name = 'doctr'\n\ncwd = Path(__file__).parent.absolute()\n\nif os.getenv('BUILD_VERSION'):\n version = os.getenv('BUILD_VERSION')\nelif sha != 'Unknown':\n try:\n sha = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd).decode('ascii').strip()\n except Exception:\n pass\n version += '+' + sha[:7]\nprint(f\"Building wheel {package_name}-{version}\")\n\nwith open(cwd.joinpath(package_name, 'version.py'), 'w') as f:\n f.write(f\"__version__ = '{version}'\\n\")\n\nwith open('README.md', 'r') as f:\n readme = f.read()\n\nrequirements = [\n \"numpy>=1.16.0\",\n \"scipy>=1.4.0\",\n \"opencv-python>=4.2\",\n \"tensorflow>=2.4.0\",\n \"PyMuPDF>=1.16.0,<1.18.11\",\n \"pyclipper>=1.2.0\",\n \"shapely>=1.6.0\",\n \"matplotlib>=3.1.0\",\n \"mplcursors>=0.3\",\n \"rapidfuzz>=1.0.0\",\n \"weasyprint>=52.2\",\n]\n\nsetup(\n # Metadata\n name=os.getenv('PKG_INDEX') if os.getenv('PKG_INDEX') else package_name,\n version=version,\n author='François-Guillaume Fernandez, Charles Gaillard',\n author_email='[email protected]',\n description='Extract valuable text information from your documents',\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n url='https://github.com/mindee/doctr',\n download_url='https://github.com/mindee/doctr/tags',\n license='Apache',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n keywords=['ocr', 'deep learning', 'tensorflow', 'text detection', 'text recognition'],\n\n # Package info\n packages=find_packages(exclude=('test',)),\n zip_safe=True,\n python_requires='>=3.6.0',\n include_package_data=True,\n install_requires=requirements,\n package_data={'': ['LICENSE']}\n)\n", "path": "setup.py" } ]
diff --git a/requirements.txt b/requirements.txt index 329db4d173..e67e58e8d2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ numpy>=1.16.0 scipy>=1.4.0 opencv-python>=3.4.5.20 -tensorflow>=2.3.0 +tensorflow>=2.4.0 PyMuPDF>=1.16.0,<1.18.11 pyclipper>=1.2.0 shapely>=1.6.0 diff --git a/setup.py b/setup.py index 7c423b4c58..145c41bad0 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,7 @@ "numpy>=1.16.0", "scipy>=1.4.0", "opencv-python>=4.2", - "tensorflow>=2.3.0", + "tensorflow>=2.4.0", "PyMuPDF>=1.16.0,<1.18.11", "pyclipper>=1.2.0", "shapely>=1.6.0", diff --git a/test/test_models_export.py b/test/test_models_export.py index fe0b9b9a30..d482e4a10e 100644 --- a/test/test_models_export.py +++ b/test/test_models_export.py @@ -40,7 +40,4 @@ def test_quantize_model(mock_model): def test_export_sizes(test_convert_to_tflite, test_convert_to_fp16, test_quantize_model): assert sys.getsizeof(test_convert_to_tflite) > sys.getsizeof(test_convert_to_fp16) - if tf.__version__ < "2.4.0": - assert sys.getsizeof(test_convert_to_fp16) >= sys.getsizeof(test_quantize_model) - else: - assert sys.getsizeof(test_convert_to_fp16) > sys.getsizeof(test_quantize_model) + assert sys.getsizeof(test_convert_to_fp16) > sys.getsizeof(test_quantize_model)
DjangoGirls__djangogirls-63
Order of the questions in the form can get mixed up Haven't debug it yet, but just adding so I won't forget
[ { "content": "from django import forms\n\n\ndef generate_form_from_questions(questions):\n fields = {}\n\n for question in questions:\n options = {\n 'label': question.title,\n 'help_text': question.help_text or None,\n 'required': question.is_required,\n }\n name = 'question_{}'.format(question.pk)\n\n if question.question_type == 'text':\n options['widget'] = forms.Textarea\n\n if question.question_type == 'choices':\n choices = ((x, x) for x in question.choices.split(';'))\n options['choices'] = choices\n\n if question.question_type in ['paragraph', 'text']:\n fields[name] = forms.CharField(**options)\n elif question.question_type == 'choices':\n if question.is_multiple_choice:\n options['widget'] = forms.CheckboxSelectMultiple\n fields[name] = forms.MultipleChoiceField(**options)\n else:\n options['widget'] = forms.RadioSelect\n fields[name] = forms.ChoiceField(**options)\n\n if question.question_type == 'email':\n fields[name] = forms.EmailField(**options)\n\n fields['newsletter_optin'] = forms.ChoiceField(\n widget=forms.RadioSelect,\n label='Do you want to receive news from the Django Girls team?',\n help_text='No spam, pinky swear! Only helpful programming tips and '\n 'latest news from Django Girls world. We sent this very rarely.',\n required=True,\n choices=(('yes', 'Yes please!'), ('no', 'No, thank you'))\n )\n\n return fields\n\n\ndef get_applications_for_page(page, state=None, rsvp_status=None, order=None):\n \"\"\"\n Return a QuerySet of Application objects for a given page.\n Raises Form.DoesNotExist if Form for page does not yet exist.\n \"\"\"\n from applications.models import Form # circular import\n page_form = Form.objects.filter(page=page)\n if not page_form.exists():\n raise Form.DoesNotExist\n page_form = page_form.first()\n\n applications = page_form.application_set.all()\n\n if rsvp_status:\n applications = applications.filter(state='accepted', rsvp_status__in=rsvp_status)\n elif state:\n applications = applications.filter(state__in=state)\n\n if order:\n is_reversed = True if order[0] == '-' else False\n order = order[1:] if order[0] == '-' else order\n if order == 'average_score':\n # here is an exception for the average_score, because we also want to get\n # the standard deviation into account in this sorting\n applications = sorted(applications, key=lambda app: (getattr(app, order), -app.stdev()), reverse=is_reversed)\n else:\n applications = sorted(applications, key=lambda app: getattr(app, order), reverse=is_reversed)\n\n return applications\n\n\ndef random_application(request, page, prev_application):\n \"\"\"\n Get a new random application for a particular event,\n that hasn't been scored by the request user.\n \"\"\"\n from applications.models import Application # circular import\n return Application.objects.filter(\n form__page=page\n ).exclude(pk=prev_application.id\n ).exclude(scores__user=request.user).order_by('?').first()\n\n\nDEFAULT_QUESTIONS = [\n {\n \"title\": \"What's your name?\",\n \"question_type\": \"paragraph\",\n },\n {\n \"title\": \"Your e-mail address:\",\n \"question_type\": \"email\",\n },\n {\n \"title\": \"Your phone number:\",\n \"help_text\": \"Include your country prefix\",\n \"question_type\": \"paragraph\",\n },\n {\n \"title\": \"Where are you from?\",\n \"help_text\": \"City, Country\",\n \"question_type\": \"paragraph\",\n },\n {\n \"title\": \"How old are you?\",\n \"question_type\": \"paragraph\",\n \"is_required\": False,\n },\n {\n \"title\": \"Which operating system do you use?\",\n \"question_type\": \"choices\",\n \"choices\": \"Mac OS X; Windows; Linux\",\n \"is_multiple_choice\": True,\n },\n {\n \"title\": \"What is your current level of experience with programming?\",\n \"question_type\": \"choices\",\n \"choices\": \"I'm a total beginner, I don't know anything about it; \"\n \"I've tried some HTML or CSS before; I've tried some JavaScript \"\n \"before; I've done a few lessons of Python; I've built a website \"\n \"before; I work as a programmer\",\n \"is_multiple_choice\": True,\n },\n {\n \"title\": \"If you checked anything other than beginner, could you \"\n \"tell us a bit more about your programming knowledge?\",\n \"question_type\": \"text\",\n \"is_required\": False,\n },\n {\n \"title\": \"What is your current occupation?\",\n \"help_text\": \"What is your current job? Are you a student?\",\n \"question_type\": \"text\",\n },\n {\n \"title\": \"Why do you want to attend the workshop?\",\n \"help_text\": \"Tell us about your motivations and aspirations.\",\n \"question_type\": \"text\",\n },\n {\n \"title\": \"How are you planning to share what you've learnt with \"\n \"others?\",\n \"help_text\": \"Django Girls is a volunteer-run organisation and we \"\n \"look for people who are active and can help us help more women get \"\n \"into the field. We want you to share what you learn at the workshop \"\n \"with others in different ways: by organising a Django Girls event \"\n \"in your city, talking about Django Girls on your local meetups, \"\n \"writing a blog or simply teaching your friends.\",\n \"question_type\": \"text\",\n \"is_required\": False\n },\n {\n \"title\": \"How did you hear about Django Girls?\",\n \"help_text\": \"Django Girls is a volunteer-run organisation and we \"\n \"look for people who are active and can help us help more women get \"\n \"into the field. We want you to share what you learn at the workshop \"\n \"with others in different ways: by organising a Django Girls event \"\n \"in your city, talking about Django Girls on your local meetups, \"\n \"writing a blog or simply teaching your friends.\",\n \"question_type\": \"choices\",\n \"choices\": \"Facebook; Twitter; From a friend; PyLadies\",\n \"is_required\": False,\n \"is_multiple_choice\": True,\n },\n {\n \"title\": \"It is important that all attendees comply with the \"\n \"<a href='/pages/coc/'>Django Girls Code of Conduct</a>\",\n \"question_type\": \"choices\",\n \"choices\": \"I've read and understood the Django Girls Code of Conduct\",\n \"is_required\": True,\n \"is_multiple_choice\": True,\n }\n]", "path": "applications/utils.py" } ]
[ { "content": "from collections import OrderedDict\n\nfrom django import forms\n\n\ndef generate_form_from_questions(questions):\n fields = OrderedDict()\n\n for question in questions:\n options = {\n 'label': question.title,\n 'help_text': question.help_text or None,\n 'required': question.is_required,\n }\n name = 'question_{}'.format(question.pk)\n\n if question.question_type == 'text':\n options['widget'] = forms.Textarea\n\n if question.question_type == 'choices':\n choices = ((x, x) for x in question.choices.split(';'))\n options['choices'] = choices\n\n if question.question_type in ['paragraph', 'text']:\n fields[name] = forms.CharField(**options)\n elif question.question_type == 'choices':\n if question.is_multiple_choice:\n options['widget'] = forms.CheckboxSelectMultiple\n fields[name] = forms.MultipleChoiceField(**options)\n else:\n options['widget'] = forms.RadioSelect\n fields[name] = forms.ChoiceField(**options)\n\n if question.question_type == 'email':\n fields[name] = forms.EmailField(**options)\n\n fields['newsletter_optin'] = forms.ChoiceField(\n widget=forms.RadioSelect,\n label='Do you want to receive news from the Django Girls team?',\n help_text='No spam, pinky swear! Only helpful programming tips and '\n 'latest news from Django Girls world. We sent this very rarely.',\n required=True,\n choices=(('yes', 'Yes please!'), ('no', 'No, thank you'))\n )\n\n return fields\n\n\ndef get_applications_for_page(page, state=None, rsvp_status=None, order=None):\n \"\"\"\n Return a QuerySet of Application objects for a given page.\n Raises Form.DoesNotExist if Form for page does not yet exist.\n \"\"\"\n from applications.models import Form # circular import\n page_form = Form.objects.filter(page=page)\n if not page_form.exists():\n raise Form.DoesNotExist\n page_form = page_form.first()\n\n applications = page_form.application_set.all()\n\n if rsvp_status:\n applications = applications.filter(state='accepted', rsvp_status__in=rsvp_status)\n elif state:\n applications = applications.filter(state__in=state)\n\n if order:\n is_reversed = True if order[0] == '-' else False\n order = order[1:] if order[0] == '-' else order\n if order == 'average_score':\n # here is an exception for the average_score, because we also want to get\n # the standard deviation into account in this sorting\n applications = sorted(applications, key=lambda app: (getattr(app, order), -app.stdev()), reverse=is_reversed)\n else:\n applications = sorted(applications, key=lambda app: getattr(app, order), reverse=is_reversed)\n\n return applications\n\n\ndef random_application(request, page, prev_application):\n \"\"\"\n Get a new random application for a particular event,\n that hasn't been scored by the request user.\n \"\"\"\n from applications.models import Application # circular import\n return Application.objects.filter(\n form__page=page\n ).exclude(pk=prev_application.id\n ).exclude(scores__user=request.user).order_by('?').first()\n\n\nDEFAULT_QUESTIONS = [\n {\n \"title\": \"What's your name?\",\n \"question_type\": \"paragraph\",\n },\n {\n \"title\": \"Your e-mail address:\",\n \"question_type\": \"email\",\n },\n {\n \"title\": \"Your phone number:\",\n \"help_text\": \"Include your country prefix\",\n \"question_type\": \"paragraph\",\n },\n {\n \"title\": \"Where are you from?\",\n \"help_text\": \"City, Country\",\n \"question_type\": \"paragraph\",\n },\n {\n \"title\": \"How old are you?\",\n \"question_type\": \"paragraph\",\n \"is_required\": False,\n },\n {\n \"title\": \"Which operating system do you use?\",\n \"question_type\": \"choices\",\n \"choices\": \"Mac OS X; Windows; Linux\",\n \"is_multiple_choice\": True,\n },\n {\n \"title\": \"What is your current level of experience with programming?\",\n \"question_type\": \"choices\",\n \"choices\": \"I'm a total beginner, I don't know anything about it; \"\n \"I've tried some HTML or CSS before; I've tried some JavaScript \"\n \"before; I've done a few lessons of Python; I've built a website \"\n \"before; I work as a programmer\",\n \"is_multiple_choice\": True,\n },\n {\n \"title\": \"If you checked anything other than beginner, could you \"\n \"tell us a bit more about your programming knowledge?\",\n \"question_type\": \"text\",\n \"is_required\": False,\n },\n {\n \"title\": \"What is your current occupation?\",\n \"help_text\": \"What is your current job? Are you a student?\",\n \"question_type\": \"text\",\n },\n {\n \"title\": \"Why do you want to attend the workshop?\",\n \"help_text\": \"Tell us about your motivations and aspirations.\",\n \"question_type\": \"text\",\n },\n {\n \"title\": \"How are you planning to share what you've learnt with \"\n \"others?\",\n \"help_text\": \"Django Girls is a volunteer-run organisation and we \"\n \"look for people who are active and can help us help more women get \"\n \"into the field. We want you to share what you learn at the workshop \"\n \"with others in different ways: by organising a Django Girls event \"\n \"in your city, talking about Django Girls on your local meetups, \"\n \"writing a blog or simply teaching your friends.\",\n \"question_type\": \"text\",\n \"is_required\": False\n },\n {\n \"title\": \"How did you hear about Django Girls?\",\n \"help_text\": \"Django Girls is a volunteer-run organisation and we \"\n \"look for people who are active and can help us help more women get \"\n \"into the field. We want you to share what you learn at the workshop \"\n \"with others in different ways: by organising a Django Girls event \"\n \"in your city, talking about Django Girls on your local meetups, \"\n \"writing a blog or simply teaching your friends.\",\n \"question_type\": \"choices\",\n \"choices\": \"Facebook; Twitter; From a friend; PyLadies\",\n \"is_required\": False,\n \"is_multiple_choice\": True,\n },\n {\n \"title\": \"It is important that all attendees comply with the \"\n \"<a href='/pages/coc/'>Django Girls Code of Conduct</a>\",\n \"question_type\": \"choices\",\n \"choices\": \"I've read and understood the Django Girls Code of Conduct\",\n \"is_required\": True,\n \"is_multiple_choice\": True,\n }\n]", "path": "applications/utils.py" } ]
diff --git a/applications/utils.py b/applications/utils.py index 280a0a333..f6ce9c84e 100644 --- a/applications/utils.py +++ b/applications/utils.py @@ -1,8 +1,10 @@ +from collections import OrderedDict + from django import forms def generate_form_from_questions(questions): - fields = {} + fields = OrderedDict() for question in questions: options = {
urllib3__urllib3-783
HTTPResponse.close may not close underlying connection. Found while investigating kennethreitz/requests#2963 The `HTTPResponse` class has a `close` method that rather suggests it will try to close the backing TCP connection behind the given HTTP response. Right now, that's not what happens if the connection is kept alive for any reason (that is, if the server did not send `Connection: close`): instead, the TCP connection will be kept alive and handled as normal. This seems moderately surprising to me. What it means, in practice, is that calling `HTTPResponse.close()` in both urllib3 and httplib/http.client does not guarantee the closure of the backing TCP connection: instead, in both cases it says "I'm done with the TCP connection, but the underlying connection is free to re-use it". The problems this causes can be see in the `_error_catcher` context manager on the HTTPResponse which does not actually call the class `close` method, presumably because it's too deficient to do the job. This behaviour affects the chunked transfer encoding decoding logic which calls `self.close()` and therefore may incorrectly keep the connection alive, though it does not itself return the connection to the pool. I believe it _should_ be safe to have `close` close the underlying connection if it is present. As something of an optimisation, we can then safely assume that `close` can call `release_conn`, which will allow us to keep hold of the `HTTPConnection` object in a situation where otherwise we might lose it.
[ { "content": "from __future__ import absolute_import\nfrom contextlib import contextmanager\nimport zlib\nimport io\nfrom socket import timeout as SocketTimeout\nfrom socket import error as SocketError\n\nfrom ._collections import HTTPHeaderDict\nfrom .exceptions import (\n ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked\n)\nfrom .packages.six import string_types as basestring, binary_type, PY3\nfrom .packages.six.moves import http_client as httplib\nfrom .connection import HTTPException, BaseSSLError\nfrom .util.response import is_fp_closed, is_response_to_head\n\n\nclass DeflateDecoder(object):\n\n def __init__(self):\n self._first_try = True\n self._data = binary_type()\n self._obj = zlib.decompressobj()\n\n def __getattr__(self, name):\n return getattr(self._obj, name)\n\n def decompress(self, data):\n if not data:\n return data\n\n if not self._first_try:\n return self._obj.decompress(data)\n\n self._data += data\n try:\n return self._obj.decompress(data)\n except zlib.error:\n self._first_try = False\n self._obj = zlib.decompressobj(-zlib.MAX_WBITS)\n try:\n return self.decompress(self._data)\n finally:\n self._data = None\n\n\nclass GzipDecoder(object):\n\n def __init__(self):\n self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)\n\n def __getattr__(self, name):\n return getattr(self._obj, name)\n\n def decompress(self, data):\n if not data:\n return data\n return self._obj.decompress(data)\n\n\ndef _get_decoder(mode):\n if mode == 'gzip':\n return GzipDecoder()\n\n return DeflateDecoder()\n\n\nclass HTTPResponse(io.IOBase):\n \"\"\"\n HTTP Response container.\n\n Backwards-compatible to httplib's HTTPResponse but the response ``body`` is\n loaded and decoded on-demand when the ``data`` property is accessed. This\n class is also compatible with the Python standard library's :mod:`io`\n module, and can hence be treated as a readable object in the context of that\n framework.\n\n Extra parameters for behaviour not present in httplib.HTTPResponse:\n\n :param preload_content:\n If True, the response's body will be preloaded during construction.\n\n :param decode_content:\n If True, attempts to decode specific content-encoding's based on headers\n (like 'gzip' and 'deflate') will be skipped and raw data will be used\n instead.\n\n :param original_response:\n When this HTTPResponse wrapper is generated from an httplib.HTTPResponse\n object, it's convenient to include the original for debug purposes. It's\n otherwise unused.\n \"\"\"\n\n CONTENT_DECODERS = ['gzip', 'deflate']\n REDIRECT_STATUSES = [301, 302, 303, 307, 308]\n\n def __init__(self, body='', headers=None, status=0, version=0, reason=None,\n strict=0, preload_content=True, decode_content=True,\n original_response=None, pool=None, connection=None):\n\n if isinstance(headers, HTTPHeaderDict):\n self.headers = headers\n else:\n self.headers = HTTPHeaderDict(headers)\n self.status = status\n self.version = version\n self.reason = reason\n self.strict = strict\n self.decode_content = decode_content\n\n self._decoder = None\n self._body = None\n self._fp = None\n self._original_response = original_response\n self._fp_bytes_read = 0\n\n if body and isinstance(body, (basestring, binary_type)):\n self._body = body\n\n self._pool = pool\n self._connection = connection\n\n if hasattr(body, 'read'):\n self._fp = body\n\n # Are we using the chunked-style of transfer encoding?\n self.chunked = False\n self.chunk_left = None\n tr_enc = self.headers.get('transfer-encoding', '').lower()\n # Don't incur the penalty of creating a list and then discarding it\n encodings = (enc.strip() for enc in tr_enc.split(\",\"))\n if \"chunked\" in encodings:\n self.chunked = True\n\n # If requested, preload the body.\n if preload_content and not self._body:\n self._body = self.read(decode_content=decode_content)\n\n def get_redirect_location(self):\n \"\"\"\n Should we redirect and where to?\n\n :returns: Truthy redirect location string if we got a redirect status\n code and valid location. ``None`` if redirect status and no\n location. ``False`` if not a redirect status code.\n \"\"\"\n if self.status in self.REDIRECT_STATUSES:\n return self.headers.get('location')\n\n return False\n\n def release_conn(self):\n if not self._pool or not self._connection:\n return\n\n self._pool._put_conn(self._connection)\n self._connection = None\n\n @property\n def data(self):\n # For backwords-compat with earlier urllib3 0.4 and earlier.\n if self._body:\n return self._body\n\n if self._fp:\n return self.read(cache_content=True)\n\n def tell(self):\n \"\"\"\n Obtain the number of bytes pulled over the wire so far. May differ from\n the amount of content returned by :meth:``HTTPResponse.read`` if bytes\n are encoded on the wire (e.g, compressed).\n \"\"\"\n return self._fp_bytes_read\n\n def _init_decoder(self):\n \"\"\"\n Set-up the _decoder attribute if necessar.\n \"\"\"\n # Note: content-encoding value should be case-insensitive, per RFC 7230\n # Section 3.2\n content_encoding = self.headers.get('content-encoding', '').lower()\n if self._decoder is None and content_encoding in self.CONTENT_DECODERS:\n self._decoder = _get_decoder(content_encoding)\n\n def _decode(self, data, decode_content, flush_decoder):\n \"\"\"\n Decode the data passed in and potentially flush the decoder.\n \"\"\"\n try:\n if decode_content and self._decoder:\n data = self._decoder.decompress(data)\n except (IOError, zlib.error) as e:\n content_encoding = self.headers.get('content-encoding', '').lower()\n raise DecodeError(\n \"Received response with content-encoding: %s, but \"\n \"failed to decode it.\" % content_encoding, e)\n\n if flush_decoder and decode_content:\n data += self._flush_decoder()\n\n return data\n\n def _flush_decoder(self):\n \"\"\"\n Flushes the decoder. Should only be called if the decoder is actually\n being used.\n \"\"\"\n if self._decoder:\n buf = self._decoder.decompress(b'')\n return buf + self._decoder.flush()\n\n return b''\n\n @contextmanager\n def _error_catcher(self):\n \"\"\"\n Catch low-level python exceptions, instead re-raising urllib3\n variants, so that low-level exceptions are not leaked in the\n high-level api.\n\n On exit, release the connection back to the pool.\n \"\"\"\n try:\n try:\n yield\n\n except SocketTimeout:\n # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but\n # there is yet no clean way to get at it from this context.\n raise ReadTimeoutError(self._pool, None, 'Read timed out.')\n\n except BaseSSLError as e:\n # FIXME: Is there a better way to differentiate between SSLErrors?\n if 'read operation timed out' not in str(e): # Defensive:\n # This shouldn't happen but just in case we're missing an edge\n # case, let's avoid swallowing SSL errors.\n raise\n\n raise ReadTimeoutError(self._pool, None, 'Read timed out.')\n\n except (HTTPException, SocketError) as e:\n # This includes IncompleteRead.\n raise ProtocolError('Connection broken: %r' % e, e)\n\n except Exception:\n # The response may not be closed but we're not going to use it anymore\n # so close it now to ensure that the connection is released back to the pool.\n if self._original_response and not self._original_response.isclosed():\n self._original_response.close()\n\n # Closing the response may not actually be sufficient to close\n # everything, so if we have a hold of the connection close that\n # too.\n if self._connection is not None:\n self._connection.close()\n\n raise\n finally:\n if self._original_response and self._original_response.isclosed():\n self.release_conn()\n\n def read(self, amt=None, decode_content=None, cache_content=False):\n \"\"\"\n Similar to :meth:`httplib.HTTPResponse.read`, but with two additional\n parameters: ``decode_content`` and ``cache_content``.\n\n :param amt:\n How much of the content to read. If specified, caching is skipped\n because it doesn't make sense to cache partial content as the full\n response.\n\n :param decode_content:\n If True, will attempt to decode the body based on the\n 'content-encoding' header.\n\n :param cache_content:\n If True, will save the returned data such that the same result is\n returned despite of the state of the underlying file object. This\n is useful if you want the ``.data`` property to continue working\n after having ``.read()`` the file object. (Overridden if ``amt`` is\n set.)\n \"\"\"\n self._init_decoder()\n if decode_content is None:\n decode_content = self.decode_content\n\n if self._fp is None:\n return\n\n flush_decoder = False\n data = None\n\n with self._error_catcher():\n if amt is None:\n # cStringIO doesn't like amt=None\n data = self._fp.read()\n flush_decoder = True\n else:\n cache_content = False\n data = self._fp.read(amt)\n if amt != 0 and not data: # Platform-specific: Buggy versions of Python.\n # Close the connection when no data is returned\n #\n # This is redundant to what httplib/http.client _should_\n # already do. However, versions of python released before\n # December 15, 2012 (http://bugs.python.org/issue16298) do\n # not properly close the connection in all cases. There is\n # no harm in redundantly calling close.\n self._fp.close()\n flush_decoder = True\n\n if data:\n self._fp_bytes_read += len(data)\n\n data = self._decode(data, decode_content, flush_decoder)\n\n if cache_content:\n self._body = data\n\n return data\n\n def stream(self, amt=2**16, decode_content=None):\n \"\"\"\n A generator wrapper for the read() method. A call will block until\n ``amt`` bytes have been read from the connection or until the\n connection is closed.\n\n :param amt:\n How much of the content to read. The generator will return up to\n much data per iteration, but may return less. This is particularly\n likely when using compressed data. However, the empty string will\n never be returned.\n\n :param decode_content:\n If True, will attempt to decode the body based on the\n 'content-encoding' header.\n \"\"\"\n if self.chunked:\n for line in self.read_chunked(amt, decode_content=decode_content):\n yield line\n else:\n while not is_fp_closed(self._fp):\n data = self.read(amt=amt, decode_content=decode_content)\n\n if data:\n yield data\n\n @classmethod\n def from_httplib(ResponseCls, r, **response_kw):\n \"\"\"\n Given an :class:`httplib.HTTPResponse` instance ``r``, return a\n corresponding :class:`urllib3.response.HTTPResponse` object.\n\n Remaining parameters are passed to the HTTPResponse constructor, along\n with ``original_response=r``.\n \"\"\"\n headers = r.msg\n\n if not isinstance(headers, HTTPHeaderDict):\n if PY3: # Python 3\n headers = HTTPHeaderDict(headers.items())\n else: # Python 2\n headers = HTTPHeaderDict.from_httplib(headers)\n\n # HTTPResponse objects in Python 3 don't have a .strict attribute\n strict = getattr(r, 'strict', 0)\n resp = ResponseCls(body=r,\n headers=headers,\n status=r.status,\n version=r.version,\n reason=r.reason,\n strict=strict,\n original_response=r,\n **response_kw)\n return resp\n\n # Backwards-compatibility methods for httplib.HTTPResponse\n def getheaders(self):\n return self.headers\n\n def getheader(self, name, default=None):\n return self.headers.get(name, default)\n\n # Overrides from io.IOBase\n def close(self):\n if not self.closed:\n self._fp.close()\n\n @property\n def closed(self):\n if self._fp is None:\n return True\n elif hasattr(self._fp, 'closed'):\n return self._fp.closed\n elif hasattr(self._fp, 'isclosed'): # Python 2\n return self._fp.isclosed()\n else:\n return True\n\n def fileno(self):\n if self._fp is None:\n raise IOError(\"HTTPResponse has no file to get a fileno from\")\n elif hasattr(self._fp, \"fileno\"):\n return self._fp.fileno()\n else:\n raise IOError(\"The file-like object this HTTPResponse is wrapped \"\n \"around has no file descriptor\")\n\n def flush(self):\n if self._fp is not None and hasattr(self._fp, 'flush'):\n return self._fp.flush()\n\n def readable(self):\n # This method is required for `io` module compatibility.\n return True\n\n def readinto(self, b):\n # This method is required for `io` module compatibility.\n temp = self.read(len(b))\n if len(temp) == 0:\n return 0\n else:\n b[:len(temp)] = temp\n return len(temp)\n\n def _update_chunk_length(self):\n # First, we'll figure out length of a chunk and then\n # we'll try to read it from socket.\n if self.chunk_left is not None:\n return\n line = self._fp.fp.readline()\n line = line.split(b';', 1)[0]\n try:\n self.chunk_left = int(line, 16)\n except ValueError:\n # Invalid chunked protocol response, abort.\n self.close()\n raise httplib.IncompleteRead(line)\n\n def _handle_chunk(self, amt):\n returned_chunk = None\n if amt is None:\n chunk = self._fp._safe_read(self.chunk_left)\n returned_chunk = chunk\n self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.\n self.chunk_left = None\n elif amt < self.chunk_left:\n value = self._fp._safe_read(amt)\n self.chunk_left = self.chunk_left - amt\n returned_chunk = value\n elif amt == self.chunk_left:\n value = self._fp._safe_read(amt)\n self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.\n self.chunk_left = None\n returned_chunk = value\n else: # amt > self.chunk_left\n returned_chunk = self._fp._safe_read(self.chunk_left)\n self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.\n self.chunk_left = None\n return returned_chunk\n\n def read_chunked(self, amt=None, decode_content=None):\n \"\"\"\n Similar to :meth:`HTTPResponse.read`, but with an additional\n parameter: ``decode_content``.\n\n :param decode_content:\n If True, will attempt to decode the body based on the\n 'content-encoding' header.\n \"\"\"\n self._init_decoder()\n # FIXME: Rewrite this method and make it a class with a better structured logic.\n if not self.chunked:\n raise ResponseNotChunked(\n \"Response is not chunked. \"\n \"Header 'transfer-encoding: chunked' is missing.\")\n\n # Don't bother reading the body of a HEAD request.\n if self._original_response and is_response_to_head(self._original_response):\n self._original_response.close()\n return\n\n with self._error_catcher():\n while True:\n self._update_chunk_length()\n if self.chunk_left == 0:\n break\n chunk = self._handle_chunk(amt)\n decoded = self._decode(chunk, decode_content=decode_content,\n flush_decoder=False)\n if decoded:\n yield decoded\n\n if decode_content:\n # On CPython and PyPy, we should never need to flush the\n # decoder. However, on Jython we *might* need to, so\n # lets defensively do it anyway.\n decoded = self._flush_decoder()\n if decoded: # Platform-specific: Jython.\n yield decoded\n\n # Chunk content ends with \\r\\n: discard it.\n while True:\n line = self._fp.fp.readline()\n if not line:\n # Some sites may not end with '\\r\\n'.\n break\n if line == b'\\r\\n':\n break\n\n # We read everything; close the \"file\".\n if self._original_response:\n self._original_response.close()\n", "path": "urllib3/response.py" } ]
[ { "content": "from __future__ import absolute_import\nfrom contextlib import contextmanager\nimport zlib\nimport io\nfrom socket import timeout as SocketTimeout\nfrom socket import error as SocketError\n\nfrom ._collections import HTTPHeaderDict\nfrom .exceptions import (\n ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked\n)\nfrom .packages.six import string_types as basestring, binary_type, PY3\nfrom .packages.six.moves import http_client as httplib\nfrom .connection import HTTPException, BaseSSLError\nfrom .util.response import is_fp_closed, is_response_to_head\n\n\nclass DeflateDecoder(object):\n\n def __init__(self):\n self._first_try = True\n self._data = binary_type()\n self._obj = zlib.decompressobj()\n\n def __getattr__(self, name):\n return getattr(self._obj, name)\n\n def decompress(self, data):\n if not data:\n return data\n\n if not self._first_try:\n return self._obj.decompress(data)\n\n self._data += data\n try:\n return self._obj.decompress(data)\n except zlib.error:\n self._first_try = False\n self._obj = zlib.decompressobj(-zlib.MAX_WBITS)\n try:\n return self.decompress(self._data)\n finally:\n self._data = None\n\n\nclass GzipDecoder(object):\n\n def __init__(self):\n self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)\n\n def __getattr__(self, name):\n return getattr(self._obj, name)\n\n def decompress(self, data):\n if not data:\n return data\n return self._obj.decompress(data)\n\n\ndef _get_decoder(mode):\n if mode == 'gzip':\n return GzipDecoder()\n\n return DeflateDecoder()\n\n\nclass HTTPResponse(io.IOBase):\n \"\"\"\n HTTP Response container.\n\n Backwards-compatible to httplib's HTTPResponse but the response ``body`` is\n loaded and decoded on-demand when the ``data`` property is accessed. This\n class is also compatible with the Python standard library's :mod:`io`\n module, and can hence be treated as a readable object in the context of that\n framework.\n\n Extra parameters for behaviour not present in httplib.HTTPResponse:\n\n :param preload_content:\n If True, the response's body will be preloaded during construction.\n\n :param decode_content:\n If True, attempts to decode specific content-encoding's based on headers\n (like 'gzip' and 'deflate') will be skipped and raw data will be used\n instead.\n\n :param original_response:\n When this HTTPResponse wrapper is generated from an httplib.HTTPResponse\n object, it's convenient to include the original for debug purposes. It's\n otherwise unused.\n \"\"\"\n\n CONTENT_DECODERS = ['gzip', 'deflate']\n REDIRECT_STATUSES = [301, 302, 303, 307, 308]\n\n def __init__(self, body='', headers=None, status=0, version=0, reason=None,\n strict=0, preload_content=True, decode_content=True,\n original_response=None, pool=None, connection=None):\n\n if isinstance(headers, HTTPHeaderDict):\n self.headers = headers\n else:\n self.headers = HTTPHeaderDict(headers)\n self.status = status\n self.version = version\n self.reason = reason\n self.strict = strict\n self.decode_content = decode_content\n\n self._decoder = None\n self._body = None\n self._fp = None\n self._original_response = original_response\n self._fp_bytes_read = 0\n\n if body and isinstance(body, (basestring, binary_type)):\n self._body = body\n\n self._pool = pool\n self._connection = connection\n\n if hasattr(body, 'read'):\n self._fp = body\n\n # Are we using the chunked-style of transfer encoding?\n self.chunked = False\n self.chunk_left = None\n tr_enc = self.headers.get('transfer-encoding', '').lower()\n # Don't incur the penalty of creating a list and then discarding it\n encodings = (enc.strip() for enc in tr_enc.split(\",\"))\n if \"chunked\" in encodings:\n self.chunked = True\n\n # If requested, preload the body.\n if preload_content and not self._body:\n self._body = self.read(decode_content=decode_content)\n\n def get_redirect_location(self):\n \"\"\"\n Should we redirect and where to?\n\n :returns: Truthy redirect location string if we got a redirect status\n code and valid location. ``None`` if redirect status and no\n location. ``False`` if not a redirect status code.\n \"\"\"\n if self.status in self.REDIRECT_STATUSES:\n return self.headers.get('location')\n\n return False\n\n def release_conn(self):\n if not self._pool or not self._connection:\n return\n\n self._pool._put_conn(self._connection)\n self._connection = None\n\n @property\n def data(self):\n # For backwords-compat with earlier urllib3 0.4 and earlier.\n if self._body:\n return self._body\n\n if self._fp:\n return self.read(cache_content=True)\n\n def tell(self):\n \"\"\"\n Obtain the number of bytes pulled over the wire so far. May differ from\n the amount of content returned by :meth:``HTTPResponse.read`` if bytes\n are encoded on the wire (e.g, compressed).\n \"\"\"\n return self._fp_bytes_read\n\n def _init_decoder(self):\n \"\"\"\n Set-up the _decoder attribute if necessar.\n \"\"\"\n # Note: content-encoding value should be case-insensitive, per RFC 7230\n # Section 3.2\n content_encoding = self.headers.get('content-encoding', '').lower()\n if self._decoder is None and content_encoding in self.CONTENT_DECODERS:\n self._decoder = _get_decoder(content_encoding)\n\n def _decode(self, data, decode_content, flush_decoder):\n \"\"\"\n Decode the data passed in and potentially flush the decoder.\n \"\"\"\n try:\n if decode_content and self._decoder:\n data = self._decoder.decompress(data)\n except (IOError, zlib.error) as e:\n content_encoding = self.headers.get('content-encoding', '').lower()\n raise DecodeError(\n \"Received response with content-encoding: %s, but \"\n \"failed to decode it.\" % content_encoding, e)\n\n if flush_decoder and decode_content:\n data += self._flush_decoder()\n\n return data\n\n def _flush_decoder(self):\n \"\"\"\n Flushes the decoder. Should only be called if the decoder is actually\n being used.\n \"\"\"\n if self._decoder:\n buf = self._decoder.decompress(b'')\n return buf + self._decoder.flush()\n\n return b''\n\n @contextmanager\n def _error_catcher(self):\n \"\"\"\n Catch low-level python exceptions, instead re-raising urllib3\n variants, so that low-level exceptions are not leaked in the\n high-level api.\n\n On exit, release the connection back to the pool.\n \"\"\"\n try:\n try:\n yield\n\n except SocketTimeout:\n # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but\n # there is yet no clean way to get at it from this context.\n raise ReadTimeoutError(self._pool, None, 'Read timed out.')\n\n except BaseSSLError as e:\n # FIXME: Is there a better way to differentiate between SSLErrors?\n if 'read operation timed out' not in str(e): # Defensive:\n # This shouldn't happen but just in case we're missing an edge\n # case, let's avoid swallowing SSL errors.\n raise\n\n raise ReadTimeoutError(self._pool, None, 'Read timed out.')\n\n except (HTTPException, SocketError) as e:\n # This includes IncompleteRead.\n raise ProtocolError('Connection broken: %r' % e, e)\n\n except Exception:\n # The response may not be closed but we're not going to use it anymore\n # so close it now to ensure that the connection is released back to the pool.\n if self._original_response and not self._original_response.isclosed():\n self._original_response.close()\n\n # Closing the response may not actually be sufficient to close\n # everything, so if we have a hold of the connection close that\n # too.\n if self._connection is not None:\n self._connection.close()\n\n raise\n finally:\n if self._original_response and self._original_response.isclosed():\n self.release_conn()\n\n def read(self, amt=None, decode_content=None, cache_content=False):\n \"\"\"\n Similar to :meth:`httplib.HTTPResponse.read`, but with two additional\n parameters: ``decode_content`` and ``cache_content``.\n\n :param amt:\n How much of the content to read. If specified, caching is skipped\n because it doesn't make sense to cache partial content as the full\n response.\n\n :param decode_content:\n If True, will attempt to decode the body based on the\n 'content-encoding' header.\n\n :param cache_content:\n If True, will save the returned data such that the same result is\n returned despite of the state of the underlying file object. This\n is useful if you want the ``.data`` property to continue working\n after having ``.read()`` the file object. (Overridden if ``amt`` is\n set.)\n \"\"\"\n self._init_decoder()\n if decode_content is None:\n decode_content = self.decode_content\n\n if self._fp is None:\n return\n\n flush_decoder = False\n data = None\n\n with self._error_catcher():\n if amt is None:\n # cStringIO doesn't like amt=None\n data = self._fp.read()\n flush_decoder = True\n else:\n cache_content = False\n data = self._fp.read(amt)\n if amt != 0 and not data: # Platform-specific: Buggy versions of Python.\n # Close the connection when no data is returned\n #\n # This is redundant to what httplib/http.client _should_\n # already do. However, versions of python released before\n # December 15, 2012 (http://bugs.python.org/issue16298) do\n # not properly close the connection in all cases. There is\n # no harm in redundantly calling close.\n self._fp.close()\n flush_decoder = True\n\n if data:\n self._fp_bytes_read += len(data)\n\n data = self._decode(data, decode_content, flush_decoder)\n\n if cache_content:\n self._body = data\n\n return data\n\n def stream(self, amt=2**16, decode_content=None):\n \"\"\"\n A generator wrapper for the read() method. A call will block until\n ``amt`` bytes have been read from the connection or until the\n connection is closed.\n\n :param amt:\n How much of the content to read. The generator will return up to\n much data per iteration, but may return less. This is particularly\n likely when using compressed data. However, the empty string will\n never be returned.\n\n :param decode_content:\n If True, will attempt to decode the body based on the\n 'content-encoding' header.\n \"\"\"\n if self.chunked:\n for line in self.read_chunked(amt, decode_content=decode_content):\n yield line\n else:\n while not is_fp_closed(self._fp):\n data = self.read(amt=amt, decode_content=decode_content)\n\n if data:\n yield data\n\n @classmethod\n def from_httplib(ResponseCls, r, **response_kw):\n \"\"\"\n Given an :class:`httplib.HTTPResponse` instance ``r``, return a\n corresponding :class:`urllib3.response.HTTPResponse` object.\n\n Remaining parameters are passed to the HTTPResponse constructor, along\n with ``original_response=r``.\n \"\"\"\n headers = r.msg\n\n if not isinstance(headers, HTTPHeaderDict):\n if PY3: # Python 3\n headers = HTTPHeaderDict(headers.items())\n else: # Python 2\n headers = HTTPHeaderDict.from_httplib(headers)\n\n # HTTPResponse objects in Python 3 don't have a .strict attribute\n strict = getattr(r, 'strict', 0)\n resp = ResponseCls(body=r,\n headers=headers,\n status=r.status,\n version=r.version,\n reason=r.reason,\n strict=strict,\n original_response=r,\n **response_kw)\n return resp\n\n # Backwards-compatibility methods for httplib.HTTPResponse\n def getheaders(self):\n return self.headers\n\n def getheader(self, name, default=None):\n return self.headers.get(name, default)\n\n # Overrides from io.IOBase\n def close(self):\n if not self.closed:\n self._fp.close()\n\n if self._connection is not None:\n self._connection.close()\n\n @property\n def closed(self):\n if self._fp is None:\n return True\n elif hasattr(self._fp, 'closed'):\n return self._fp.closed\n elif hasattr(self._fp, 'isclosed'): # Python 2\n return self._fp.isclosed()\n else:\n return True\n\n def fileno(self):\n if self._fp is None:\n raise IOError(\"HTTPResponse has no file to get a fileno from\")\n elif hasattr(self._fp, \"fileno\"):\n return self._fp.fileno()\n else:\n raise IOError(\"The file-like object this HTTPResponse is wrapped \"\n \"around has no file descriptor\")\n\n def flush(self):\n if self._fp is not None and hasattr(self._fp, 'flush'):\n return self._fp.flush()\n\n def readable(self):\n # This method is required for `io` module compatibility.\n return True\n\n def readinto(self, b):\n # This method is required for `io` module compatibility.\n temp = self.read(len(b))\n if len(temp) == 0:\n return 0\n else:\n b[:len(temp)] = temp\n return len(temp)\n\n def _update_chunk_length(self):\n # First, we'll figure out length of a chunk and then\n # we'll try to read it from socket.\n if self.chunk_left is not None:\n return\n line = self._fp.fp.readline()\n line = line.split(b';', 1)[0]\n try:\n self.chunk_left = int(line, 16)\n except ValueError:\n # Invalid chunked protocol response, abort.\n self.close()\n raise httplib.IncompleteRead(line)\n\n def _handle_chunk(self, amt):\n returned_chunk = None\n if amt is None:\n chunk = self._fp._safe_read(self.chunk_left)\n returned_chunk = chunk\n self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.\n self.chunk_left = None\n elif amt < self.chunk_left:\n value = self._fp._safe_read(amt)\n self.chunk_left = self.chunk_left - amt\n returned_chunk = value\n elif amt == self.chunk_left:\n value = self._fp._safe_read(amt)\n self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.\n self.chunk_left = None\n returned_chunk = value\n else: # amt > self.chunk_left\n returned_chunk = self._fp._safe_read(self.chunk_left)\n self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.\n self.chunk_left = None\n return returned_chunk\n\n def read_chunked(self, amt=None, decode_content=None):\n \"\"\"\n Similar to :meth:`HTTPResponse.read`, but with an additional\n parameter: ``decode_content``.\n\n :param decode_content:\n If True, will attempt to decode the body based on the\n 'content-encoding' header.\n \"\"\"\n self._init_decoder()\n # FIXME: Rewrite this method and make it a class with a better structured logic.\n if not self.chunked:\n raise ResponseNotChunked(\n \"Response is not chunked. \"\n \"Header 'transfer-encoding: chunked' is missing.\")\n\n # Don't bother reading the body of a HEAD request.\n if self._original_response and is_response_to_head(self._original_response):\n self._original_response.close()\n return\n\n with self._error_catcher():\n while True:\n self._update_chunk_length()\n if self.chunk_left == 0:\n break\n chunk = self._handle_chunk(amt)\n decoded = self._decode(chunk, decode_content=decode_content,\n flush_decoder=False)\n if decoded:\n yield decoded\n\n if decode_content:\n # On CPython and PyPy, we should never need to flush the\n # decoder. However, on Jython we *might* need to, so\n # lets defensively do it anyway.\n decoded = self._flush_decoder()\n if decoded: # Platform-specific: Jython.\n yield decoded\n\n # Chunk content ends with \\r\\n: discard it.\n while True:\n line = self._fp.fp.readline()\n if not line:\n # Some sites may not end with '\\r\\n'.\n break\n if line == b'\\r\\n':\n break\n\n # We read everything; close the \"file\".\n if self._original_response:\n self._original_response.close()\n", "path": "urllib3/response.py" } ]
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py index 1e6113f447..8895c063b6 100644 --- a/test/with_dummyserver/test_socketlevel.py +++ b/test/with_dummyserver/test_socketlevel.py @@ -433,6 +433,53 @@ def socket_handler(listener): timeout=Timeout(connect=1, read=0.1)) self.assertEqual(len(response.read()), 8) + def test_closing_response_actually_closes_connection(self): + done_closing = Event() + complete = Event() + # The insane use of this variable here is to get around the fact that + # Python 2.6 does not support returning a value from Event.wait(). This + # means we can't tell if an event timed out, so we can't use the timing + # out of the 'complete' event to determine the success or failure of + # the test. Python 2 also doesn't have the nonlocal statement, so we + # can't write directly to this variable, only mutate it. Hence: list. + successful = [] + + def socket_handler(listener): + sock = listener.accept()[0] + + buf = b'' + while not buf.endswith(b'\r\n\r\n'): + buf = sock.recv(65536) + + sock.send(('HTTP/1.1 200 OK\r\n' + 'Content-Type: text/plain\r\n' + 'Content-Length: 0\r\n' + '\r\n').encode('utf-8')) + + # Wait for the socket to close. + done_closing.wait(timeout=1) + + # Look for the empty string to show that the connection got closed. + # Don't get stuck in a timeout. + sock.settimeout(1) + new_data = sock.recv(65536) + self.assertFalse(new_data) + successful.append(True) + sock.close() + complete.set() + + self._start_server(socket_handler) + pool = HTTPConnectionPool(self.host, self.port) + + response = pool.request('GET', '/', retries=0, preload_content=False) + self.assertEqual(response.status, 200) + response.close() + + done_closing.set() # wait until the socket in our pool gets closed + complete.wait(timeout=1) + if not successful: + self.fail("Timed out waiting for connection close") + class TestProxyManager(SocketDummyServerTestCase): diff --git a/urllib3/response.py b/urllib3/response.py index 8f2a1b5c29..f02192124e 100644 --- a/urllib3/response.py +++ b/urllib3/response.py @@ -387,6 +387,9 @@ def close(self): if not self.closed: self._fp.close() + if self._connection is not None: + self._connection.close() + @property def closed(self): if self._fp is None:
holoviz__panel-743
GridSpec objects attribute violates Panel interface contract The `Panel` class provides an `objects` attribute that is expected to contain a list of child objects: ```python class Panel(Reactive): ... objects = param.Parameter(default=[], doc=""" The list of child objects that make up the layout.""") ``` but `GridSpec` overrides this with a dictionary: ```python class GridSpec(Panel): ... objects = param.Dict(default={}, doc=""" The dictionary of child objects that make up the grid.""") ``` Consequently any code that is meant to operate on a `Panel .object` list is likely to break when applied to a `GridSpec`. In particular, the `Panel._cleanup`, which is inherited by `GridSpec` will fail because iterating over the objects will return tuples instead of the actual child objects. For this case, you could probably fix this by overriding `_cleanup` in `GridSpec` but it would not fix the underlying issue. Observed using Panel 0.6.0.
[ { "content": "\"\"\"\nDefines Layout classes which may be used to arrange panes and widgets\nin flexible ways to build complex dashboards.\n\"\"\"\nfrom __future__ import absolute_import, division, unicode_literals\n\nfrom collections import OrderedDict\n\nimport param\nimport numpy as np\n\nfrom bokeh.layouts import grid as _bk_grid\nfrom bokeh.models import (Column as BkColumn, Row as BkRow,\n Spacer as BkSpacer, GridBox as BkGridBox,\n Box as BkBox, Markup as BkMarkup)\nfrom bokeh.models.widgets import Tabs as BkTabs, Panel as BkPanel\n\nfrom .util import param_name, param_reprs\nfrom .viewable import Reactive\n\n\nclass Panel(Reactive):\n \"\"\"\n Abstract baseclass for a layout of Viewables.\n \"\"\"\n\n objects = param.Parameter(default=[], doc=\"\"\"\n The list of child objects that make up the layout.\"\"\")\n\n _bokeh_model = None\n\n __abstract = True\n\n _rename = {'objects': 'children'}\n\n _linked_props = []\n\n def __repr__(self, depth=0, max_depth=10):\n if depth > max_depth:\n return '...'\n spacer = '\\n' + (' ' * (depth+1))\n cls = type(self).__name__\n params = param_reprs(self, ['objects'])\n objs = ['[%d] %s' % (i, obj.__repr__(depth+1)) for i, obj in enumerate(self)]\n if not params and not objs:\n return super(Panel, self).__repr__(depth+1)\n elif not params:\n template = '{cls}{spacer}{objs}'\n elif not objs:\n template = '{cls}({params})'\n else:\n template = '{cls}({params}){spacer}{objs}'\n return template.format(\n cls=cls, params=', '.join(params),\n objs=('%s' % spacer).join(objs), spacer=spacer)\n\n #----------------------------------------------------------------\n # Callback API\n #----------------------------------------------------------------\n\n def _update_model(self, events, msg, root, model, doc, comm=None):\n if self._rename['objects'] in msg:\n old = events['objects'].old\n msg[self._rename['objects']] = self._get_objects(model, old, doc, root, comm)\n\n held = doc._hold\n if comm is None and not held:\n doc.hold()\n model.update(**msg)\n\n from .io import state\n ref = root.ref['id']\n if ref in state._views:\n state._views[ref][0]._preprocess(root)\n\n if comm is None and not held:\n doc.unhold()\n\n #----------------------------------------------------------------\n # Model API\n #----------------------------------------------------------------\n\n def _init_properties(self):\n properties = {k: v for k, v in self.param.get_param_values()\n if v is not None}\n del properties['objects']\n return self._process_param_change(properties)\n\n def _get_objects(self, model, old_objects, doc, root, comm=None):\n \"\"\"\n Returns new child models for the layout while reusing unchanged\n models and cleaning up any dropped objects.\n \"\"\"\n from .pane import panel\n new_models = []\n for i, pane in enumerate(self.objects):\n pane = panel(pane)\n self.objects[i] = pane\n\n for obj in old_objects:\n if obj not in self.objects:\n obj._cleanup(root)\n\n for i, pane in enumerate(self.objects):\n if pane in old_objects:\n child, _ = pane._models[root.ref['id']]\n else:\n child = pane._get_model(doc, root, model, comm)\n new_models.append(child)\n return new_models\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n model = self._bokeh_model()\n if root is None:\n root = model\n objects = self._get_objects(model, [], doc, root, comm)\n props = dict(self._init_properties(), objects=objects)\n model.update(**self._process_param_change(props))\n self._models[root.ref['id']] = (model, parent)\n self._link_props(model, self._linked_props, doc, root, comm)\n return model\n\n def _cleanup(self, root):\n super(Panel, self)._cleanup(root)\n for p in self.objects:\n p._cleanup(root)\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n def select(self, selector=None):\n \"\"\"\n Iterates over the Viewable and any potential children in the\n applying the Selector.\n\n Arguments\n ---------\n selector: type or callable or None\n The selector allows selecting a subset of Viewables by\n declaring a type or callable function to filter by.\n\n Returns\n -------\n viewables: list(Viewable)\n \"\"\"\n objects = super(Panel, self).select(selector)\n for obj in self:\n objects += obj.select(selector)\n return objects\n\n\n\nclass ListPanel(Panel):\n \"\"\"\n An abstract baseclass for Panel objects with list-like children.\n \"\"\"\n\n margin = param.Parameter(default=0, doc=\"\"\"\n Allows to create additional space around the component. May\n be specified as a two-tuple of the form (vertical, horizontal)\n or a four-tuple (top, right, bottom, left).\"\"\")\n\n objects = param.List(default=[], doc=\"\"\"\n The list of child objects that make up the layout.\"\"\")\n\n __abstract = True\n\n def __init__(self, *objects, **params):\n from .pane import panel\n if objects:\n if 'objects' in params:\n raise ValueError(\"A %s's objects should be supplied either \"\n \"as positional arguments or as a keyword, \"\n \"not both.\" % type(self).__name__)\n params['objects'] = [panel(pane) for pane in objects]\n super(Panel, self).__init__(**params)\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n def __getitem__(self, index):\n return self.objects[index]\n\n def __len__(self):\n return len(self.objects)\n\n def __iter__(self):\n for obj in self.objects:\n yield obj\n\n def __contains__(self, obj):\n return obj in self.objects\n\n def __setitem__(self, index, panes):\n from .pane import panel\n new_objects = list(self)\n if not isinstance(index, slice):\n start, end = index, index+1\n if start > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (end, type(self).__name__, len(self.objects)))\n panes = [panes]\n else:\n start = index.start or 0\n end = len(self) if index.stop is None else index.stop\n if index.start is None and index.stop is None:\n if not isinstance(panes, list):\n raise IndexError('Expected a list of objects to '\n 'replace the objects in the %s, '\n 'got a %s type.' %\n (type(self).__name__, type(panes).__name__))\n expected = len(panes)\n new_objects = [None]*expected\n end = expected\n elif end > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (end, type(self).__name__, len(self.objects)))\n else:\n expected = end-start\n if not isinstance(panes, list) or len(panes) != expected:\n raise IndexError('Expected a list of %d objects to set '\n 'on the %s to match the supplied slice.' %\n (expected, type(self).__name__))\n for i, pane in zip(range(start, end), panes):\n new_objects[i] = panel(pane)\n\n self.objects = new_objects\n\n def clone(self, *objects, **params):\n \"\"\"\n Makes a copy of the layout sharing the same parameters.\n\n Arguments\n ---------\n objects: Objects to add to the cloned layout.\n params: Keyword arguments override the parameters on the clone.\n\n Returns\n -------\n Cloned layout object\n \"\"\"\n if not objects:\n if 'objects' in params:\n objects = params.pop('objects')\n else:\n objects = self.objects\n elif 'objects' in params:\n raise ValueError(\"A %s's objects should be supplied either \"\n \"as arguments or as a keyword, not both.\"\n % type(self).__name__)\n p = dict(self.param.get_param_values(), **params)\n del p['objects']\n return type(self)(*objects, **params)\n\n def append(self, obj):\n \"\"\"\n Appends an object to the layout.\n\n Arguments\n ---------\n obj (object): Panel component to add to the layout.\n \"\"\"\n from .pane import panel\n new_objects = list(self)\n new_objects.append(panel(obj))\n self.objects = new_objects\n\n def clear(self):\n \"\"\"\n Clears the objects on this layout.\n \"\"\"\n self.objects = []\n\n def extend(self, objects):\n \"\"\"\n Extends the objects on this layout with a list.\n\n Arguments\n ---------\n objects (list): List of panel components to add to the layout.\n \"\"\"\n from .pane import panel\n new_objects = list(self)\n new_objects.extend(list(map(panel, objects)))\n self.objects = new_objects\n\n def insert(self, index, obj):\n \"\"\"\n Inserts an object in the layout at the specified index.\n\n Arguments\n ---------\n index (int): Index at which to insert the object.\n object (object): Panel components to insert in the layout.\n \"\"\"\n from .pane import panel\n new_objects = list(self)\n new_objects.insert(index, panel(obj))\n self.objects = new_objects\n\n def pop(self, index):\n \"\"\"\n Pops an item from the layout by index.\n\n Arguments\n ---------\n index (int): The index of the item to pop from the layout.\n \"\"\"\n new_objects = list(self)\n if index in new_objects:\n index = new_objects.index(index)\n obj = new_objects.pop(index)\n self.objects = new_objects\n return obj\n\n def remove(self, obj):\n \"\"\"\n Removes an object from the layout.\n\n Arguments\n ---------\n obj (object): The object to remove from the layout.\n \"\"\"\n new_objects = list(self)\n new_objects.remove(obj)\n self.objects = new_objects\n\n def reverse(self):\n \"\"\"\n Reverses the objects in the layout.\n \"\"\"\n new_objects = list(self)\n new_objects.reverse()\n self.objects = new_objects\n\n\nclass Row(ListPanel):\n \"\"\"\n Horizontal layout of Viewables.\n \"\"\"\n\n _bokeh_model = BkRow\n\n\nclass Column(ListPanel):\n \"\"\"\n Vertical layout of Viewables.\n \"\"\"\n\n _bokeh_model = BkColumn\n\n\n\nclass GridBox(ListPanel):\n \"\"\"\n List-like Grid which wraps depending on the specified number of\n rows or columns.\n \"\"\"\n\n nrows = param.Integer(default=None, bounds=(0, None), doc=\"\"\"\n Number of rows to reflow the layout into.\"\"\")\n\n ncols = param.Integer(default=None, bounds=(0, None), doc=\"\"\"\n Number of columns to reflow the layout into.\"\"\")\n\n _bokeh_model = BkGridBox\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n model = self._bokeh_model()\n if root is None:\n root = model\n objects = self._get_objects(model, [], doc, root, comm)\n grid = _bk_grid(objects, nrows=self.nrows, ncols=self.ncols,\n sizing_mode=self.sizing_mode)\n model.children = grid.children\n props = {k: v for k, v in self._init_properties().items()\n if k not in ('nrows', 'ncols')}\n model.update(**self._process_param_change(props))\n self._models[root.ref['id']] = (model, parent)\n self._link_props(model, self._linked_props, doc, root, comm)\n return model\n\n def _update_model(self, events, msg, root, model, doc, comm=None):\n if self._rename['objects'] in msg or 'ncols' in msg or 'nrows' in msg:\n if 'objects' in events:\n old = events['objects'].old\n else:\n old = self.objects\n objects = self._get_objects(model, old, doc, root, comm)\n grid = _bk_grid(objects, nrows=self.nrows, ncols=self.ncols,\n sizing_mode=self.sizing_mode)\n children = grid.children\n msg[self._rename['objects']] = children\n\n held = doc._hold\n if comm is None and not held:\n doc.hold()\n model.update(**{k: v for k, v in msg.items() if k not in ('nrows', 'ncols')})\n\n from .io import state\n ref = root.ref['id']\n if ref in state._views:\n state._views[ref][0]._preprocess(root)\n\n if comm is None and not held:\n doc.unhold()\n\n\n\nclass WidgetBox(ListPanel):\n \"\"\"\n Vertical layout of widgets.\n \"\"\"\n\n _rename = {'objects': 'children', 'horizontal': None}\n\n horizontal = param.Boolean(default=False, doc=\"\"\"Whether to lay out the\n widgets in a Row layout as opposed to a Column layout.\"\"\")\n\n @property\n def _bokeh_model(self):\n return BkRow if self.horizontal else BkColumn\n\n css_classes = param.List(default=['widget-box'], doc=\"\"\"\n CSS classes to apply to the layout.\"\"\")\n\n margin = param.Parameter(default=5, doc=\"\"\"\n Allows to create additional space around the component. May\n be specified as a two-tuple of the form (vertical, horizontal)\n or a four-tuple (top, right, bottom, left).\"\"\")\n\n\nclass Tabs(ListPanel):\n \"\"\"\n Panel of Viewables to be displayed in separate tabs.\n \"\"\"\n\n active = param.Integer(default=0, doc=\"\"\"\n Number of the currently active tab.\"\"\")\n\n closable = param.Boolean(default=False, doc=\"\"\"\n Whether it should be possible to close tabs.\"\"\")\n\n objects = param.List(default=[], doc=\"\"\"\n The list of child objects that make up the tabs.\"\"\")\n\n tabs_location = param.ObjectSelector(\n default='above', objects=['above', 'below', 'left', 'right'], doc=\"\"\"\n The location of the tabs relative to the tab contents.\"\"\")\n\n height = param.Integer(default=None, bounds=(0, None))\n\n width = param.Integer(default=None, bounds=(0, None))\n\n _bokeh_model = BkTabs\n\n _rename = {'objects': 'tabs'}\n\n _linked_props = ['active']\n\n def __init__(self, *items, **params):\n if 'objects' in params:\n if items:\n raise ValueError('Tabs objects should be supplied either '\n 'as positional arguments or as a keyword, '\n 'not both.')\n items = params['objects']\n objects, self._names = self._to_objects_and_names(items)\n super(Tabs, self).__init__(*objects, **params)\n self.param.watch(self._update_names, 'objects')\n # ALERT: Ensure that name update happens first, should be\n # replaced by watch precedence support in param\n self._param_watchers['objects']['value'].reverse()\n\n def _to_object_and_name(self, item):\n from .pane import panel\n if isinstance(item, tuple):\n name, item = item\n else:\n name = getattr(item, 'name', None)\n pane = panel(item, name=name)\n name = param_name(pane.name) if name is None else name\n return pane, name\n\n def _to_objects_and_names(self, items):\n objects, names = [], []\n for item in items:\n pane, name = self._to_object_and_name(item)\n objects.append(pane)\n names.append(name)\n return objects, names\n\n def _init_properties(self):\n return {k: v for k, v in self.param.get_param_values()\n if v is not None and k != 'closable'}\n\n #----------------------------------------------------------------\n # Callback API\n #----------------------------------------------------------------\n\n def _update_names(self, event):\n if len(event.new) == len(self._names):\n return\n names = []\n for obj in event.new:\n if obj in event.old:\n index = event.old.index(obj)\n name = self._names[index]\n else:\n name = obj.name\n names.append(name)\n self._names = names\n\n #----------------------------------------------------------------\n # Model API\n #----------------------------------------------------------------\n\n def _update_model(self, events, msg, root, model, doc, comm=None):\n if 'closable' in msg:\n closable = msg.pop('closable')\n for child in model.tabs:\n child.closable = closable\n super(Tabs, self)._update_model(events, msg, root, model, doc, comm)\n\n def _get_objects(self, model, old_objects, doc, root, comm=None):\n \"\"\"\n Returns new child models for the layout while reusing unchanged\n models and cleaning up any dropped objects.\n \"\"\"\n from .pane import panel\n new_models = []\n if len(self._names) != len(self):\n raise ValueError('Tab names do not match objects, ensure '\n 'that the Tabs.objects are not modified '\n 'directly. Found %d names, expected %d.' %\n (len(self._names), len(self)))\n for i, (name, pane) in enumerate(zip(self._names, self)):\n pane = panel(pane, name=name)\n self.objects[i] = pane\n\n for obj in old_objects:\n if obj not in self.objects:\n obj._cleanup(root)\n\n for i, (name, pane) in enumerate(zip(self._names, self)):\n if pane in old_objects:\n child, _ = pane._models[root.ref['id']]\n else:\n child = pane._get_model(doc, root, model, comm)\n child = BkPanel(title=name, name=pane.name, child=child,\n closable=self.closable)\n new_models.append(child)\n return new_models\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n def __setitem__(self, index, panes):\n new_objects = list(self)\n if not isinstance(index, slice):\n if index > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (index, type(self).__name__, len(self.objects)))\n start, end = index, index+1\n panes = [panes]\n else:\n start = index.start or 0\n end = len(self.objects) if index.stop is None else index.stop\n if index.start is None and index.stop is None:\n if not isinstance(panes, list):\n raise IndexError('Expected a list of objects to '\n 'replace the objects in the %s, '\n 'got a %s type.' %\n (type(self).__name__, type(panes).__name__))\n expected = len(panes)\n new_objects = [None]*expected\n self._names = [None]*len(panes)\n end = expected\n else:\n expected = end-start\n if end > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (end, type(self).__name__, len(self.objects)))\n if not isinstance(panes, list) or len(panes) != expected:\n raise IndexError('Expected a list of %d objects to set '\n 'on the %s to match the supplied slice.' %\n (expected, type(self).__name__))\n for i, pane in zip(range(start, end), panes):\n new_objects[i], self._names[i] = self._to_object_and_name(pane)\n self.objects = new_objects\n\n def clone(self, *objects, **params):\n \"\"\"\n Makes a copy of the Tabs sharing the same parameters.\n\n Arguments\n ---------\n objects: Objects to add to the cloned Tabs object.\n params: Keyword arguments override the parameters on the clone.\n\n Returns\n -------\n Cloned Tabs object\n \"\"\"\n if not objects:\n if 'objects' in params:\n objects = params.pop('objects')\n else:\n objects = zip(self._names, self.objects)\n elif 'objects' in params:\n raise ValueError('Tabs objects should be supplied either '\n 'as positional arguments or as a keyword, '\n 'not both.')\n p = dict(self.param.get_param_values(), **params)\n del p['objects']\n return type(self)(*objects, **params)\n\n def append(self, pane):\n \"\"\"\n Appends an object to the tabs.\n\n Arguments\n ---------\n obj (object): Panel component to add as a tab.\n \"\"\"\n new_object, new_name = self._to_object_and_name(pane)\n new_objects = list(self)\n new_objects.append(new_object)\n self._names.append(new_name)\n self.objects = new_objects\n\n def clear(self):\n \"\"\"\n Clears the tabs.\n \"\"\"\n self._names = []\n self.objects = []\n\n def extend(self, panes):\n \"\"\"\n Extends the the tabs with a list.\n\n Arguments\n ---------\n objects (list): List of panel components to add as tabs.\n \"\"\"\n new_objects, new_names = self._to_objects_and_names(panes)\n objects = list(self)\n objects.extend(new_objects)\n self._names.extend(new_names)\n self.objects = objects\n\n def insert(self, index, pane):\n \"\"\"\n Inserts an object in the tabs at the specified index.\n\n Arguments\n ---------\n index (int): Index at which to insert the object.\n object (object): Panel components to insert as tabs.\n \"\"\"\n new_object, new_name = self._to_object_and_name(pane)\n new_objects = list(self.objects)\n new_objects.insert(index, new_object)\n self._names.insert(index, new_name)\n self.objects = new_objects\n\n def pop(self, index):\n \"\"\"\n Pops an item from the tabs by index.\n\n Arguments\n ---------\n index (int): The index of the item to pop from the tabs.\n \"\"\"\n new_objects = list(self)\n if index in new_objects:\n index = new_objects.index(index)\n new_objects.pop(index)\n self._names.pop(index)\n self.objects = new_objects\n\n def remove(self, pane):\n \"\"\"\n Removes an object from the tabs.\n\n Arguments\n ---------\n obj (object): The object to remove from the tabs.\n \"\"\"\n new_objects = list(self)\n if pane in new_objects:\n index = new_objects.index(pane)\n new_objects.remove(pane)\n self._names.pop(index)\n self.objects = new_objects\n\n def reverse(self):\n \"\"\"\n Reverses the tabs.\n \"\"\"\n new_objects = list(self)\n new_objects.reverse()\n self._names.reverse()\n self.objects = new_objects\n\n\nclass GridSpec(Panel):\n\n objects = param.Dict(default={}, doc=\"\"\"\n The dictionary of child objects that make up the grid.\"\"\")\n\n mode = param.ObjectSelector(\n default='warn', objects=['warn', 'error', 'override'], doc=\"\"\"\n Whether to warn, error or simply override on overlapping\n assignment.\"\"\")\n\n width = param.Integer(default=600)\n\n height = param.Integer(default=600)\n\n _bokeh_model = BkGridBox\n\n _rename = {'objects': 'children', 'mode': None}\n\n def __init__(self, **params):\n if 'objects' not in params:\n params['objects'] = OrderedDict()\n super(GridSpec, self).__init__(**params)\n\n def _init_properties(self):\n properties = super(GridSpec, self)._init_properties()\n if self.sizing_mode not in ['fixed', None]:\n if 'min_width' not in properties and 'width' in properties:\n properties['min_width'] = properties['width']\n if 'min_height' not in properties and 'height' in properties:\n properties['min_height'] = properties['height']\n return properties\n\n def _get_objects(self, model, old_objects, doc, root, comm=None):\n if self.ncols:\n width = int(float(self.width)/self.ncols)\n else:\n width = 0\n\n if self.nrows:\n height = int(float(self.height)/self.nrows)\n else:\n height = 0\n\n children = []\n for (y0, x0, y1, x1), obj in self.objects.items():\n x0 = 0 if x0 is None else x0\n x1 = (self.ncols) if x1 is None else x1\n y0 = 0 if y0 is None else y0\n y1 = (self.nrows) if y1 is None else y1\n r, c, h, w = (y0, x0, y1-y0, x1-x0)\n\n if self.sizing_mode in ['fixed', None]:\n properties = {'width': w*width, 'height': h*height}\n else:\n properties = {'sizing_mode': self.sizing_mode}\n obj.set_param(**properties)\n model = obj._get_model(doc, root, model, comm)\n\n if isinstance(model, BkMarkup) and self.sizing_mode not in ['fixed', None]:\n if model.style is None:\n model.style = {}\n style = {}\n if 'width' not in model.style:\n style['width'] = '100%'\n if 'height' not in model.style:\n style['height'] = '100%'\n if style:\n model.style.update(style)\n\n if isinstance(model, BkBox) and len(model.children) == 1:\n model.children[0].update(**properties)\n else:\n model.update(**properties)\n children.append((model, r, c, h, w))\n\n new_objects = list(self.objects.values())\n if isinstance(old_objects, dict):\n old_objects = list(old_objects.values())\n for old in old_objects:\n if old not in new_objects:\n old._cleanup(root)\n return children\n\n @property\n def _xoffset(self):\n min_xidx = [x0 for (_, x0, _, _) in self.objects if x0 is not None]\n return min(min_xidx) if min_xidx and len(min_xidx) == len(self.objects) else 0\n\n @property\n def _yoffset(self):\n min_yidx = [y0 for (y0, x0, _, _) in self.objects if y0 is not None]\n return min(min_yidx) if min_yidx and len(min_yidx) == len(self.objects) else 0\n\n @property\n def _object_grid(self):\n grid = np.full((self.nrows, self.ncols), None, dtype=object)\n for i, ((y0, x0, y1, x1), obj) in enumerate(self.objects.items()):\n l = 0 if x0 is None else x0\n r = self.ncols if x1 is None else x1\n t = 0 if y0 is None else y0\n b = self.nrows if y1 is None else y1\n for y in range(t, b):\n for x in range(l, r):\n grid[y, x] = {((y0, x0, y1, x1), obj)}\n return grid\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n @property\n def nrows(self):\n max_yidx = [y1 for (_, _, y1, _) in self.objects if y1 is not None]\n return max(max_yidx) if max_yidx else 0\n\n @property\n def ncols(self):\n max_xidx = [x1 for (_, _, _, x1) in self.objects if x1 is not None]\n return max(max_xidx) if max_xidx else 0\n\n @property\n def grid(self):\n grid = np.zeros((self.nrows, self.ncols), dtype='uint8')\n for (y0, x0, y1, x1) in self.objects:\n x0 = 0 if x0 is None else x0\n x1 = self.ncols if x1 is None else x1\n y0 = 0 if y0 is None else y0\n y1 = self.nrows if y1 is None else y1\n grid[y0:y1, x0:x1] += 1\n return grid\n\n def clone(self, **params):\n \"\"\"\n Makes a copy of the GridSpec sharing the same parameters.\n\n Arguments\n ---------\n params: Keyword arguments override the parameters on the clone.\n\n Returns\n -------\n Cloned GridSpec object\n \"\"\"\n p = dict(self.param.get_param_values(), **params)\n return type(self)(**p)\n\n def __iter__(self):\n for obj in self.objects.values():\n yield obj\n\n def __delitem__(self, index, trigger=True):\n if isinstance(index, tuple):\n yidx, xidx = index\n else:\n yidx, xidx = index, slice(None)\n\n subgrid = self._object_grid[yidx, xidx]\n if isinstance(subgrid, np.ndarray):\n deleted = OrderedDict([list(o)[0] for o in subgrid.flatten()])\n else:\n deleted = [list(subgrid)[0][0]]\n if deleted:\n for key in deleted:\n del self.objects[key]\n if trigger:\n self.param.trigger('objects')\n\n def __getitem__(self, index):\n if isinstance(index, tuple):\n yidx, xidx = index\n else:\n yidx, xidx = index, slice(None)\n\n subgrid = self._object_grid[yidx, xidx]\n if isinstance(subgrid, np.ndarray):\n params = dict(self.get_param_values())\n params['objects'] = OrderedDict([list(o)[0] for o in subgrid.flatten()])\n gspec = GridSpec(**params)\n xoff, yoff = gspec._xoffset, gspec._yoffset\n adjusted = []\n for (y0, x0, y1, x1), obj in gspec.objects.items():\n if y0 is not None: y0 -= yoff\n if y1 is not None: y1 -= yoff\n if x0 is not None: x0 -= xoff\n if x1 is not None: x1 -= xoff\n if ((y0, x0, y1, x1), obj) not in adjusted:\n adjusted.append(((y0, x0, y1, x1), obj))\n gspec.objects = OrderedDict(adjusted)\n width_scale = gspec.ncols/float(self.ncols)\n height_scale = gspec.nrows/float(self.nrows)\n if gspec.width:\n gspec.width = int(gspec.width * width_scale)\n if gspec.height:\n gspec.height = int(gspec.height * height_scale)\n if gspec.max_width:\n gspec.max_width = int(gspec.max_width * width_scale)\n if gspec.max_height:\n gspec.max_height = int(gspec.max_height * height_scale)\n return gspec\n else:\n return list(subgrid)[0][1]\n\n def __setitem__(self, index, obj):\n from .pane.base import Pane\n if not isinstance(index, tuple):\n raise IndexError('Must supply a 2D index for GridSpec assignment.')\n\n yidx, xidx = index\n if isinstance(xidx, slice):\n x0, x1 = (xidx.start, xidx.stop)\n else:\n x0, x1 = (xidx, xidx+1)\n\n if isinstance(yidx, slice):\n y0, y1 = (yidx.start, yidx.stop)\n else:\n y0, y1 = (yidx, yidx+1)\n\n l = 0 if x0 is None else x0\n r = self.nrows if x1 is None else x1\n t = 0 if y0 is None else y0\n b = self.ncols if y1 is None else y1\n\n key = (y0, x0, y1, x1)\n overlap = key in self.objects\n clone = self.clone(mode='override')\n if not overlap:\n clone.objects[key] = Pane(obj)\n grid = clone.grid\n else:\n grid = clone.grid\n grid[t:b, l:r] += 1\n\n overlap_grid = grid>1\n if (overlap_grid).any():\n overlapping = ''\n objects = []\n for (yidx, xidx) in zip(*np.where(overlap_grid)):\n old_obj = self[yidx, xidx]\n if old_obj not in objects:\n objects.append(old_obj)\n overlapping += ' (%d, %d): %s\\n\\n' % (yidx, xidx, old_obj)\n overlap_text = ('Specified region overlaps with the following '\n 'existing object(s) in the grid:\\n\\n'+overlapping+\n 'The following shows a view of the grid '\n '(empty: 0, occupied: 1, overlapping: 2):\\n\\n'+\n str(grid.astype('uint8')))\n if self.mode == 'error':\n raise IndexError(overlap_text)\n elif self.mode == 'warn':\n self.param.warning(overlap_text)\n self.__delitem__(index, False)\n self.objects[key] = Pane(obj)\n self.param.trigger('objects')\n\n\nclass Spacer(Reactive):\n \"\"\"Empty object used to control formatting (using positive or negative space)\"\"\"\n\n _bokeh_model = BkSpacer\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_properties())\n model = self._bokeh_model(**properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n\n\nclass VSpacer(Spacer):\n \"\"\"\n Spacer which automatically fills all available vertical space.\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_height', readonly=True)\n\n\nclass HSpacer(Spacer):\n \"\"\"\n Spacer which automatically fills all available horizontal space.\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_width', readonly=True)\n", "path": "panel/layout.py" } ]
[ { "content": "\"\"\"\nDefines Layout classes which may be used to arrange panes and widgets\nin flexible ways to build complex dashboards.\n\"\"\"\nfrom __future__ import absolute_import, division, unicode_literals\n\nfrom collections import OrderedDict\n\nimport param\nimport numpy as np\n\nfrom bokeh.layouts import grid as _bk_grid\nfrom bokeh.models import (Column as BkColumn, Row as BkRow,\n Spacer as BkSpacer, GridBox as BkGridBox,\n Box as BkBox, Markup as BkMarkup)\nfrom bokeh.models.widgets import Tabs as BkTabs, Panel as BkPanel\n\nfrom .util import param_name, param_reprs\nfrom .viewable import Reactive\n\n\nclass Panel(Reactive):\n \"\"\"\n Abstract baseclass for a layout of Viewables.\n \"\"\"\n\n _bokeh_model = None\n\n __abstract = True\n\n _rename = {'objects': 'children'}\n\n _linked_props = []\n\n def __repr__(self, depth=0, max_depth=10):\n if depth > max_depth:\n return '...'\n spacer = '\\n' + (' ' * (depth+1))\n cls = type(self).__name__\n params = param_reprs(self, ['objects'])\n objs = ['[%d] %s' % (i, obj.__repr__(depth+1)) for i, obj in enumerate(self)]\n if not params and not objs:\n return super(Panel, self).__repr__(depth+1)\n elif not params:\n template = '{cls}{spacer}{objs}'\n elif not objs:\n template = '{cls}({params})'\n else:\n template = '{cls}({params}){spacer}{objs}'\n return template.format(\n cls=cls, params=', '.join(params),\n objs=('%s' % spacer).join(objs), spacer=spacer)\n\n #----------------------------------------------------------------\n # Callback API\n #----------------------------------------------------------------\n\n def _update_model(self, events, msg, root, model, doc, comm=None):\n if self._rename['objects'] in msg:\n old = events['objects'].old\n msg[self._rename['objects']] = self._get_objects(model, old, doc, root, comm)\n\n held = doc._hold\n if comm is None and not held:\n doc.hold()\n model.update(**msg)\n\n from .io import state\n ref = root.ref['id']\n if ref in state._views:\n state._views[ref][0]._preprocess(root)\n\n if comm is None and not held:\n doc.unhold()\n\n #----------------------------------------------------------------\n # Model API\n #----------------------------------------------------------------\n\n def _init_properties(self):\n properties = {k: v for k, v in self.param.get_param_values()\n if v is not None}\n del properties['objects']\n return self._process_param_change(properties)\n\n def _get_objects(self, model, old_objects, doc, root, comm=None):\n \"\"\"\n Returns new child models for the layout while reusing unchanged\n models and cleaning up any dropped objects.\n \"\"\"\n from .pane import panel\n new_models = []\n for i, pane in enumerate(self.objects):\n pane = panel(pane)\n self.objects[i] = pane\n\n for obj in old_objects:\n if obj not in self.objects:\n obj._cleanup(root)\n\n for i, pane in enumerate(self.objects):\n if pane in old_objects:\n child, _ = pane._models[root.ref['id']]\n else:\n child = pane._get_model(doc, root, model, comm)\n new_models.append(child)\n return new_models\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n model = self._bokeh_model()\n if root is None:\n root = model\n objects = self._get_objects(model, [], doc, root, comm)\n props = dict(self._init_properties(), objects=objects)\n model.update(**self._process_param_change(props))\n self._models[root.ref['id']] = (model, parent)\n self._link_props(model, self._linked_props, doc, root, comm)\n return model\n\n def _cleanup(self, root):\n super(Panel, self)._cleanup(root)\n for p in self.objects:\n p._cleanup(root)\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n def select(self, selector=None):\n \"\"\"\n Iterates over the Viewable and any potential children in the\n applying the Selector.\n\n Arguments\n ---------\n selector: type or callable or None\n The selector allows selecting a subset of Viewables by\n declaring a type or callable function to filter by.\n\n Returns\n -------\n viewables: list(Viewable)\n \"\"\"\n objects = super(Panel, self).select(selector)\n for obj in self:\n objects += obj.select(selector)\n return objects\n\n\n\nclass ListPanel(Panel):\n \"\"\"\n An abstract baseclass for Panel objects with list-like children.\n \"\"\"\n\n margin = param.Parameter(default=0, doc=\"\"\"\n Allows to create additional space around the component. May\n be specified as a two-tuple of the form (vertical, horizontal)\n or a four-tuple (top, right, bottom, left).\"\"\")\n\n objects = param.List(default=[], doc=\"\"\"\n The list of child objects that make up the layout.\"\"\")\n\n __abstract = True\n\n def __init__(self, *objects, **params):\n from .pane import panel\n if objects:\n if 'objects' in params:\n raise ValueError(\"A %s's objects should be supplied either \"\n \"as positional arguments or as a keyword, \"\n \"not both.\" % type(self).__name__)\n params['objects'] = [panel(pane) for pane in objects]\n super(Panel, self).__init__(**params)\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n def __getitem__(self, index):\n return self.objects[index]\n\n def __len__(self):\n return len(self.objects)\n\n def __iter__(self):\n for obj in self.objects:\n yield obj\n\n def __contains__(self, obj):\n return obj in self.objects\n\n def __setitem__(self, index, panes):\n from .pane import panel\n new_objects = list(self)\n if not isinstance(index, slice):\n start, end = index, index+1\n if start > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (end, type(self).__name__, len(self.objects)))\n panes = [panes]\n else:\n start = index.start or 0\n end = len(self) if index.stop is None else index.stop\n if index.start is None and index.stop is None:\n if not isinstance(panes, list):\n raise IndexError('Expected a list of objects to '\n 'replace the objects in the %s, '\n 'got a %s type.' %\n (type(self).__name__, type(panes).__name__))\n expected = len(panes)\n new_objects = [None]*expected\n end = expected\n elif end > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (end, type(self).__name__, len(self.objects)))\n else:\n expected = end-start\n if not isinstance(panes, list) or len(panes) != expected:\n raise IndexError('Expected a list of %d objects to set '\n 'on the %s to match the supplied slice.' %\n (expected, type(self).__name__))\n for i, pane in zip(range(start, end), panes):\n new_objects[i] = panel(pane)\n\n self.objects = new_objects\n\n def clone(self, *objects, **params):\n \"\"\"\n Makes a copy of the layout sharing the same parameters.\n\n Arguments\n ---------\n objects: Objects to add to the cloned layout.\n params: Keyword arguments override the parameters on the clone.\n\n Returns\n -------\n Cloned layout object\n \"\"\"\n if not objects:\n if 'objects' in params:\n objects = params.pop('objects')\n else:\n objects = self.objects\n elif 'objects' in params:\n raise ValueError(\"A %s's objects should be supplied either \"\n \"as arguments or as a keyword, not both.\"\n % type(self).__name__)\n p = dict(self.param.get_param_values(), **params)\n del p['objects']\n return type(self)(*objects, **params)\n\n def append(self, obj):\n \"\"\"\n Appends an object to the layout.\n\n Arguments\n ---------\n obj (object): Panel component to add to the layout.\n \"\"\"\n from .pane import panel\n new_objects = list(self)\n new_objects.append(panel(obj))\n self.objects = new_objects\n\n def clear(self):\n \"\"\"\n Clears the objects on this layout.\n \"\"\"\n self.objects = []\n\n def extend(self, objects):\n \"\"\"\n Extends the objects on this layout with a list.\n\n Arguments\n ---------\n objects (list): List of panel components to add to the layout.\n \"\"\"\n from .pane import panel\n new_objects = list(self)\n new_objects.extend(list(map(panel, objects)))\n self.objects = new_objects\n\n def insert(self, index, obj):\n \"\"\"\n Inserts an object in the layout at the specified index.\n\n Arguments\n ---------\n index (int): Index at which to insert the object.\n object (object): Panel components to insert in the layout.\n \"\"\"\n from .pane import panel\n new_objects = list(self)\n new_objects.insert(index, panel(obj))\n self.objects = new_objects\n\n def pop(self, index):\n \"\"\"\n Pops an item from the layout by index.\n\n Arguments\n ---------\n index (int): The index of the item to pop from the layout.\n \"\"\"\n new_objects = list(self)\n if index in new_objects:\n index = new_objects.index(index)\n obj = new_objects.pop(index)\n self.objects = new_objects\n return obj\n\n def remove(self, obj):\n \"\"\"\n Removes an object from the layout.\n\n Arguments\n ---------\n obj (object): The object to remove from the layout.\n \"\"\"\n new_objects = list(self)\n new_objects.remove(obj)\n self.objects = new_objects\n\n def reverse(self):\n \"\"\"\n Reverses the objects in the layout.\n \"\"\"\n new_objects = list(self)\n new_objects.reverse()\n self.objects = new_objects\n\n\nclass Row(ListPanel):\n \"\"\"\n Horizontal layout of Viewables.\n \"\"\"\n\n _bokeh_model = BkRow\n\n\nclass Column(ListPanel):\n \"\"\"\n Vertical layout of Viewables.\n \"\"\"\n\n _bokeh_model = BkColumn\n\n\n\nclass GridBox(ListPanel):\n \"\"\"\n List-like Grid which wraps depending on the specified number of\n rows or columns.\n \"\"\"\n\n nrows = param.Integer(default=None, bounds=(0, None), doc=\"\"\"\n Number of rows to reflow the layout into.\"\"\")\n\n ncols = param.Integer(default=None, bounds=(0, None), doc=\"\"\"\n Number of columns to reflow the layout into.\"\"\")\n\n _bokeh_model = BkGridBox\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n model = self._bokeh_model()\n if root is None:\n root = model\n objects = self._get_objects(model, [], doc, root, comm)\n grid = _bk_grid(objects, nrows=self.nrows, ncols=self.ncols,\n sizing_mode=self.sizing_mode)\n model.children = grid.children\n props = {k: v for k, v in self._init_properties().items()\n if k not in ('nrows', 'ncols')}\n model.update(**self._process_param_change(props))\n self._models[root.ref['id']] = (model, parent)\n self._link_props(model, self._linked_props, doc, root, comm)\n return model\n\n def _update_model(self, events, msg, root, model, doc, comm=None):\n if self._rename['objects'] in msg or 'ncols' in msg or 'nrows' in msg:\n if 'objects' in events:\n old = events['objects'].old\n else:\n old = self.objects\n objects = self._get_objects(model, old, doc, root, comm)\n grid = _bk_grid(objects, nrows=self.nrows, ncols=self.ncols,\n sizing_mode=self.sizing_mode)\n children = grid.children\n msg[self._rename['objects']] = children\n\n held = doc._hold\n if comm is None and not held:\n doc.hold()\n model.update(**{k: v for k, v in msg.items() if k not in ('nrows', 'ncols')})\n\n from .io import state\n ref = root.ref['id']\n if ref in state._views:\n state._views[ref][0]._preprocess(root)\n\n if comm is None and not held:\n doc.unhold()\n\n\n\nclass WidgetBox(ListPanel):\n \"\"\"\n Vertical layout of widgets.\n \"\"\"\n\n _rename = {'objects': 'children', 'horizontal': None}\n\n horizontal = param.Boolean(default=False, doc=\"\"\"Whether to lay out the\n widgets in a Row layout as opposed to a Column layout.\"\"\")\n\n @property\n def _bokeh_model(self):\n return BkRow if self.horizontal else BkColumn\n\n css_classes = param.List(default=['widget-box'], doc=\"\"\"\n CSS classes to apply to the layout.\"\"\")\n\n margin = param.Parameter(default=5, doc=\"\"\"\n Allows to create additional space around the component. May\n be specified as a two-tuple of the form (vertical, horizontal)\n or a four-tuple (top, right, bottom, left).\"\"\")\n\n\nclass Tabs(ListPanel):\n \"\"\"\n Panel of Viewables to be displayed in separate tabs.\n \"\"\"\n\n active = param.Integer(default=0, doc=\"\"\"\n Number of the currently active tab.\"\"\")\n\n closable = param.Boolean(default=False, doc=\"\"\"\n Whether it should be possible to close tabs.\"\"\")\n\n objects = param.List(default=[], doc=\"\"\"\n The list of child objects that make up the tabs.\"\"\")\n\n tabs_location = param.ObjectSelector(\n default='above', objects=['above', 'below', 'left', 'right'], doc=\"\"\"\n The location of the tabs relative to the tab contents.\"\"\")\n\n height = param.Integer(default=None, bounds=(0, None))\n\n width = param.Integer(default=None, bounds=(0, None))\n\n _bokeh_model = BkTabs\n\n _rename = {'objects': 'tabs'}\n\n _linked_props = ['active']\n\n def __init__(self, *items, **params):\n if 'objects' in params:\n if items:\n raise ValueError('Tabs objects should be supplied either '\n 'as positional arguments or as a keyword, '\n 'not both.')\n items = params['objects']\n objects, self._names = self._to_objects_and_names(items)\n super(Tabs, self).__init__(*objects, **params)\n self.param.watch(self._update_names, 'objects')\n # ALERT: Ensure that name update happens first, should be\n # replaced by watch precedence support in param\n self._param_watchers['objects']['value'].reverse()\n\n def _to_object_and_name(self, item):\n from .pane import panel\n if isinstance(item, tuple):\n name, item = item\n else:\n name = getattr(item, 'name', None)\n pane = panel(item, name=name)\n name = param_name(pane.name) if name is None else name\n return pane, name\n\n def _to_objects_and_names(self, items):\n objects, names = [], []\n for item in items:\n pane, name = self._to_object_and_name(item)\n objects.append(pane)\n names.append(name)\n return objects, names\n\n def _init_properties(self):\n return {k: v for k, v in self.param.get_param_values()\n if v is not None and k != 'closable'}\n\n #----------------------------------------------------------------\n # Callback API\n #----------------------------------------------------------------\n\n def _update_names(self, event):\n if len(event.new) == len(self._names):\n return\n names = []\n for obj in event.new:\n if obj in event.old:\n index = event.old.index(obj)\n name = self._names[index]\n else:\n name = obj.name\n names.append(name)\n self._names = names\n\n #----------------------------------------------------------------\n # Model API\n #----------------------------------------------------------------\n\n def _update_model(self, events, msg, root, model, doc, comm=None):\n if 'closable' in msg:\n closable = msg.pop('closable')\n for child in model.tabs:\n child.closable = closable\n super(Tabs, self)._update_model(events, msg, root, model, doc, comm)\n\n def _get_objects(self, model, old_objects, doc, root, comm=None):\n \"\"\"\n Returns new child models for the layout while reusing unchanged\n models and cleaning up any dropped objects.\n \"\"\"\n from .pane import panel\n new_models = []\n if len(self._names) != len(self):\n raise ValueError('Tab names do not match objects, ensure '\n 'that the Tabs.objects are not modified '\n 'directly. Found %d names, expected %d.' %\n (len(self._names), len(self)))\n for i, (name, pane) in enumerate(zip(self._names, self)):\n pane = panel(pane, name=name)\n self.objects[i] = pane\n\n for obj in old_objects:\n if obj not in self.objects:\n obj._cleanup(root)\n\n for i, (name, pane) in enumerate(zip(self._names, self)):\n if pane in old_objects:\n child, _ = pane._models[root.ref['id']]\n else:\n child = pane._get_model(doc, root, model, comm)\n child = BkPanel(title=name, name=pane.name, child=child,\n closable=self.closable)\n new_models.append(child)\n return new_models\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n def __setitem__(self, index, panes):\n new_objects = list(self)\n if not isinstance(index, slice):\n if index > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (index, type(self).__name__, len(self.objects)))\n start, end = index, index+1\n panes = [panes]\n else:\n start = index.start or 0\n end = len(self.objects) if index.stop is None else index.stop\n if index.start is None and index.stop is None:\n if not isinstance(panes, list):\n raise IndexError('Expected a list of objects to '\n 'replace the objects in the %s, '\n 'got a %s type.' %\n (type(self).__name__, type(panes).__name__))\n expected = len(panes)\n new_objects = [None]*expected\n self._names = [None]*len(panes)\n end = expected\n else:\n expected = end-start\n if end > len(self.objects):\n raise IndexError('Index %d out of bounds on %s '\n 'containing %d objects.' %\n (end, type(self).__name__, len(self.objects)))\n if not isinstance(panes, list) or len(panes) != expected:\n raise IndexError('Expected a list of %d objects to set '\n 'on the %s to match the supplied slice.' %\n (expected, type(self).__name__))\n for i, pane in zip(range(start, end), panes):\n new_objects[i], self._names[i] = self._to_object_and_name(pane)\n self.objects = new_objects\n\n def clone(self, *objects, **params):\n \"\"\"\n Makes a copy of the Tabs sharing the same parameters.\n\n Arguments\n ---------\n objects: Objects to add to the cloned Tabs object.\n params: Keyword arguments override the parameters on the clone.\n\n Returns\n -------\n Cloned Tabs object\n \"\"\"\n if not objects:\n if 'objects' in params:\n objects = params.pop('objects')\n else:\n objects = zip(self._names, self.objects)\n elif 'objects' in params:\n raise ValueError('Tabs objects should be supplied either '\n 'as positional arguments or as a keyword, '\n 'not both.')\n p = dict(self.param.get_param_values(), **params)\n del p['objects']\n return type(self)(*objects, **params)\n\n def append(self, pane):\n \"\"\"\n Appends an object to the tabs.\n\n Arguments\n ---------\n obj (object): Panel component to add as a tab.\n \"\"\"\n new_object, new_name = self._to_object_and_name(pane)\n new_objects = list(self)\n new_objects.append(new_object)\n self._names.append(new_name)\n self.objects = new_objects\n\n def clear(self):\n \"\"\"\n Clears the tabs.\n \"\"\"\n self._names = []\n self.objects = []\n\n def extend(self, panes):\n \"\"\"\n Extends the the tabs with a list.\n\n Arguments\n ---------\n objects (list): List of panel components to add as tabs.\n \"\"\"\n new_objects, new_names = self._to_objects_and_names(panes)\n objects = list(self)\n objects.extend(new_objects)\n self._names.extend(new_names)\n self.objects = objects\n\n def insert(self, index, pane):\n \"\"\"\n Inserts an object in the tabs at the specified index.\n\n Arguments\n ---------\n index (int): Index at which to insert the object.\n object (object): Panel components to insert as tabs.\n \"\"\"\n new_object, new_name = self._to_object_and_name(pane)\n new_objects = list(self.objects)\n new_objects.insert(index, new_object)\n self._names.insert(index, new_name)\n self.objects = new_objects\n\n def pop(self, index):\n \"\"\"\n Pops an item from the tabs by index.\n\n Arguments\n ---------\n index (int): The index of the item to pop from the tabs.\n \"\"\"\n new_objects = list(self)\n if index in new_objects:\n index = new_objects.index(index)\n new_objects.pop(index)\n self._names.pop(index)\n self.objects = new_objects\n\n def remove(self, pane):\n \"\"\"\n Removes an object from the tabs.\n\n Arguments\n ---------\n obj (object): The object to remove from the tabs.\n \"\"\"\n new_objects = list(self)\n if pane in new_objects:\n index = new_objects.index(pane)\n new_objects.remove(pane)\n self._names.pop(index)\n self.objects = new_objects\n\n def reverse(self):\n \"\"\"\n Reverses the tabs.\n \"\"\"\n new_objects = list(self)\n new_objects.reverse()\n self._names.reverse()\n self.objects = new_objects\n\n\nclass GridSpec(Panel):\n\n objects = param.Dict(default={}, doc=\"\"\"\n The dictionary of child objects that make up the grid.\"\"\")\n\n mode = param.ObjectSelector(\n default='warn', objects=['warn', 'error', 'override'], doc=\"\"\"\n Whether to warn, error or simply override on overlapping\n assignment.\"\"\")\n\n width = param.Integer(default=600)\n\n height = param.Integer(default=600)\n\n _bokeh_model = BkGridBox\n\n _rename = {'objects': 'children', 'mode': None}\n\n def __init__(self, **params):\n if 'objects' not in params:\n params['objects'] = OrderedDict()\n super(GridSpec, self).__init__(**params)\n\n def _init_properties(self):\n properties = super(GridSpec, self)._init_properties()\n if self.sizing_mode not in ['fixed', None]:\n if 'min_width' not in properties and 'width' in properties:\n properties['min_width'] = properties['width']\n if 'min_height' not in properties and 'height' in properties:\n properties['min_height'] = properties['height']\n return properties\n\n def _get_objects(self, model, old_objects, doc, root, comm=None):\n if self.ncols:\n width = int(float(self.width)/self.ncols)\n else:\n width = 0\n\n if self.nrows:\n height = int(float(self.height)/self.nrows)\n else:\n height = 0\n\n children = []\n for (y0, x0, y1, x1), obj in self.objects.items():\n x0 = 0 if x0 is None else x0\n x1 = (self.ncols) if x1 is None else x1\n y0 = 0 if y0 is None else y0\n y1 = (self.nrows) if y1 is None else y1\n r, c, h, w = (y0, x0, y1-y0, x1-x0)\n\n if self.sizing_mode in ['fixed', None]:\n properties = {'width': w*width, 'height': h*height}\n else:\n properties = {'sizing_mode': self.sizing_mode}\n obj.set_param(**properties)\n model = obj._get_model(doc, root, model, comm)\n\n if isinstance(model, BkMarkup) and self.sizing_mode not in ['fixed', None]:\n if model.style is None:\n model.style = {}\n style = {}\n if 'width' not in model.style:\n style['width'] = '100%'\n if 'height' not in model.style:\n style['height'] = '100%'\n if style:\n model.style.update(style)\n\n if isinstance(model, BkBox) and len(model.children) == 1:\n model.children[0].update(**properties)\n else:\n model.update(**properties)\n children.append((model, r, c, h, w))\n\n new_objects = list(self.objects.values())\n if isinstance(old_objects, dict):\n old_objects = list(old_objects.values())\n for old in old_objects:\n if old not in new_objects:\n old._cleanup(root)\n return children\n\n @property\n def _xoffset(self):\n min_xidx = [x0 for (_, x0, _, _) in self.objects if x0 is not None]\n return min(min_xidx) if min_xidx and len(min_xidx) == len(self.objects) else 0\n\n @property\n def _yoffset(self):\n min_yidx = [y0 for (y0, x0, _, _) in self.objects if y0 is not None]\n return min(min_yidx) if min_yidx and len(min_yidx) == len(self.objects) else 0\n\n @property\n def _object_grid(self):\n grid = np.full((self.nrows, self.ncols), None, dtype=object)\n for i, ((y0, x0, y1, x1), obj) in enumerate(self.objects.items()):\n l = 0 if x0 is None else x0\n r = self.ncols if x1 is None else x1\n t = 0 if y0 is None else y0\n b = self.nrows if y1 is None else y1\n for y in range(t, b):\n for x in range(l, r):\n grid[y, x] = {((y0, x0, y1, x1), obj)}\n return grid\n\n #----------------------------------------------------------------\n # Public API\n #----------------------------------------------------------------\n\n @property\n def nrows(self):\n max_yidx = [y1 for (_, _, y1, _) in self.objects if y1 is not None]\n return max(max_yidx) if max_yidx else 0\n\n @property\n def ncols(self):\n max_xidx = [x1 for (_, _, _, x1) in self.objects if x1 is not None]\n return max(max_xidx) if max_xidx else 0\n\n @property\n def grid(self):\n grid = np.zeros((self.nrows, self.ncols), dtype='uint8')\n for (y0, x0, y1, x1) in self.objects:\n x0 = 0 if x0 is None else x0\n x1 = self.ncols if x1 is None else x1\n y0 = 0 if y0 is None else y0\n y1 = self.nrows if y1 is None else y1\n grid[y0:y1, x0:x1] += 1\n return grid\n\n def clone(self, **params):\n \"\"\"\n Makes a copy of the GridSpec sharing the same parameters.\n\n Arguments\n ---------\n params: Keyword arguments override the parameters on the clone.\n\n Returns\n -------\n Cloned GridSpec object\n \"\"\"\n p = dict(self.param.get_param_values(), **params)\n return type(self)(**p)\n\n def __iter__(self):\n for obj in self.objects.values():\n yield obj\n\n def __delitem__(self, index, trigger=True):\n if isinstance(index, tuple):\n yidx, xidx = index\n else:\n yidx, xidx = index, slice(None)\n\n subgrid = self._object_grid[yidx, xidx]\n if isinstance(subgrid, np.ndarray):\n deleted = OrderedDict([list(o)[0] for o in subgrid.flatten()])\n else:\n deleted = [list(subgrid)[0][0]]\n if deleted:\n for key in deleted:\n del self.objects[key]\n if trigger:\n self.param.trigger('objects')\n\n def __getitem__(self, index):\n if isinstance(index, tuple):\n yidx, xidx = index\n else:\n yidx, xidx = index, slice(None)\n\n subgrid = self._object_grid[yidx, xidx]\n if isinstance(subgrid, np.ndarray):\n params = dict(self.get_param_values())\n params['objects'] = OrderedDict([list(o)[0] for o in subgrid.flatten()])\n gspec = GridSpec(**params)\n xoff, yoff = gspec._xoffset, gspec._yoffset\n adjusted = []\n for (y0, x0, y1, x1), obj in gspec.objects.items():\n if y0 is not None: y0 -= yoff\n if y1 is not None: y1 -= yoff\n if x0 is not None: x0 -= xoff\n if x1 is not None: x1 -= xoff\n if ((y0, x0, y1, x1), obj) not in adjusted:\n adjusted.append(((y0, x0, y1, x1), obj))\n gspec.objects = OrderedDict(adjusted)\n width_scale = gspec.ncols/float(self.ncols)\n height_scale = gspec.nrows/float(self.nrows)\n if gspec.width:\n gspec.width = int(gspec.width * width_scale)\n if gspec.height:\n gspec.height = int(gspec.height * height_scale)\n if gspec.max_width:\n gspec.max_width = int(gspec.max_width * width_scale)\n if gspec.max_height:\n gspec.max_height = int(gspec.max_height * height_scale)\n return gspec\n else:\n return list(subgrid)[0][1]\n\n def __setitem__(self, index, obj):\n from .pane.base import Pane\n if not isinstance(index, tuple):\n raise IndexError('Must supply a 2D index for GridSpec assignment.')\n\n yidx, xidx = index\n if isinstance(xidx, slice):\n x0, x1 = (xidx.start, xidx.stop)\n else:\n x0, x1 = (xidx, xidx+1)\n\n if isinstance(yidx, slice):\n y0, y1 = (yidx.start, yidx.stop)\n else:\n y0, y1 = (yidx, yidx+1)\n\n l = 0 if x0 is None else x0\n r = self.nrows if x1 is None else x1\n t = 0 if y0 is None else y0\n b = self.ncols if y1 is None else y1\n\n key = (y0, x0, y1, x1)\n overlap = key in self.objects\n clone = self.clone(mode='override')\n if not overlap:\n clone.objects[key] = Pane(obj)\n grid = clone.grid\n else:\n grid = clone.grid\n grid[t:b, l:r] += 1\n\n overlap_grid = grid>1\n if (overlap_grid).any():\n overlapping = ''\n objects = []\n for (yidx, xidx) in zip(*np.where(overlap_grid)):\n old_obj = self[yidx, xidx]\n if old_obj not in objects:\n objects.append(old_obj)\n overlapping += ' (%d, %d): %s\\n\\n' % (yidx, xidx, old_obj)\n overlap_text = ('Specified region overlaps with the following '\n 'existing object(s) in the grid:\\n\\n'+overlapping+\n 'The following shows a view of the grid '\n '(empty: 0, occupied: 1, overlapping: 2):\\n\\n'+\n str(grid.astype('uint8')))\n if self.mode == 'error':\n raise IndexError(overlap_text)\n elif self.mode == 'warn':\n self.param.warning(overlap_text)\n self.__delitem__(index, False)\n self.objects[key] = Pane(obj)\n self.param.trigger('objects')\n\n\nclass Spacer(Reactive):\n \"\"\"Empty object used to control formatting (using positive or negative space)\"\"\"\n\n _bokeh_model = BkSpacer\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n properties = self._process_param_change(self._init_properties())\n model = self._bokeh_model(**properties)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n\n\nclass VSpacer(Spacer):\n \"\"\"\n Spacer which automatically fills all available vertical space.\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_height', readonly=True)\n\n\nclass HSpacer(Spacer):\n \"\"\"\n Spacer which automatically fills all available horizontal space.\n \"\"\"\n\n sizing_mode = param.Parameter(default='stretch_width', readonly=True)\n", "path": "panel/layout.py" } ]
diff --git a/panel/layout.py b/panel/layout.py index 7289c9c8e4..301bf0f1d7 100644 --- a/panel/layout.py +++ b/panel/layout.py @@ -24,9 +24,6 @@ class Panel(Reactive): Abstract baseclass for a layout of Viewables. """ - objects = param.Parameter(default=[], doc=""" - The list of child objects that make up the layout.""") - _bokeh_model = None __abstract = True
huggingface__transformers-4916
🐛 TPU Training broken due to recent changes # 🐛 Bug Looks like due to changes in file_utils.py, the TPU Training has become broken. Reverting transformers to a version before https://github.com/huggingface/transformers/commit/2cfb947f59861d5d910f84eba3be57da200b5599 fixes the problem. ## Information Seems like file_utils.py is trying to reinitialize the TPU system right after being imported. This fails because xla_spawn.py has already initialized the TPU. Model I am using (Bert, XLNet ...): roberta (but doesn't matter) Language I am using the model on (English, Chinese ...): English The problem arises when using: * [x] the official example scripts: (give details below) * [ ] my own modified scripts: (give details below) The tasks I am working on is: * [x] an official GLUE/SQUaD task: (give the name) * [ ] my own task or dataset: (give details below) ## To reproduce Steps to reproduce the behavior: With a setup capable of training on TPU, replicating the official language modeling example ``` /transformers/examples$ python xla_spawn.py --num_cores 8 language-modeling/run_language_modeling.py --output_dir=output --model_type=roberta --model_name_or_path=roberta-base --do_train --train_data_file=$TRAIN_FILE --do_eval --eval_data_file=$TEST_FILE --mlm ``` <!-- If you have code snippets, error messages, stack traces please provide them here as well. Important! Use code tags to correctly format your code. See https://help.github.com/en/github/writing-on-github/creating-and-highlighting-code-blocks#syntax-highlighting Do not use screenshots, as they are hard to read and (more importantly) don't allow others to copy-and-paste your code.--> The failure stacktrace- ``` File "/home/saurabh/chat-ai/vendor/transformers/examples/language-modeling/run_language_modeling.py", line 29, in <module> self = reduction.pickle.load(from_parent) from transformers import ( File "/home/saurabh/chat-ai/vendor/transformers/examples/language-modeling/run_language_modeling.py", line 29, in <module> File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/__init__.py", line 23, in <module> from transformers import ( File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/__init__.py", line 23, in <module> from transformers import ( from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/__init__.py", line 23, in <module> File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/configuration_albert.py", line 18, in <modul e> from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/configuration_albert.py", line 18, in <modul e> from .configuration_utils import PretrainedConfig File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/configuration_utils.py", line 25, in <module > from .configuration_albert import ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, AlbertConfig File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/configuration_albert.py", line 18, in <modul e> from .configuration_utils import PretrainedConfig File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/configuration_utils.py", line 25, in <module > from .file_utils import CONFIG_NAME, cached_path, hf_bucket_url, is_remote_url File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/file_utils.py", line 76, in <module> from .configuration_utils import PretrainedConfig from .file_utils import CONFIG_NAME, cached_path, hf_bucket_url, is_remote_url File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/configuration_utils.py", line 25, in <module > File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/file_utils.py", line 76, in <module> tpu_device = xm.xla_device() from .file_utils import CONFIG_NAME, cached_path, hf_bucket_url, is_remote_url File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/torch_xla/core/xla_model.py", line 146, in xla_device File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/transformers/file_utils.py", line 76, in <module> tpu_device = xm.xla_device() File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/torch_xla/core/xla_model.py", line 146, in xla_device tpu_device = xm.xla_device() devkind=[devkind] if devkind is not None else None) File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/torch_xla/core/xla_model.py", line 146, in xla_device File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/torch_xla/core/xla_model.py", line 50, in get_xla_support ed_devices devkind=[devkind] if devkind is not None else None) File "/anaconda3/envs/torch-xla-nightly/lib/python3.6/site-packages/torch_xla/core/xla_model.py", line 50, in get_xla_support ed_devices xla_devices = torch_xla._XLAC._xla_get_devices() devkind=[devkind] if devkind is not None else None) RuntimeError: tensorflow/compiler/xla/xla_client/xrt_computation_client.cc:1245 : Check failed: session.Run({tensorflow::Output (result, 0)}, &outputs) == ::tensorflow::Status::OK() (Already exists: From /job:tpu_worker/replica:0/task:0: 2 root error(s) found. (0) Already exists: Resource localhost/tpu_mesh_common_state/N10tensorflow3tpu21TpuMeshStateInterfaceE [[{{node configure_distributed_tpu/_0}}]] (1) Already exists: Resource localhost/tpu_mesh_common_state/N10tensorflow3tpu21TpuMeshStateInterfaceE [[{{node configure_distributed_tpu/_0}}]] 0 successful operations. 0 derived errors ignored. vs. OK) ``` ## Expected behavior Model trains ## Environment info <!-- You can run the command `transformers-cli env` and copy-and-paste its output below. Don't forget to fill out the missing fields in that output! --> - `transformers` version: 2.11.0 (master) - Platform: Linux-4.9.0-12-amd64-x86_64-with-debian-9.12 - Python version: 3.6.10 - PyTorch version (GPU?): 1.6.0a0+af05158 (False) - Tensorflow version (GPU?): not installed (NA) - Using GPU in script?: no - Using distributed or parallel set-up in script?: yes, 8 way parallelism with xla_spawn.py
[ { "content": "\"\"\"\nUtilities for working with the local dataset cache.\nThis file is adapted from the AllenNLP library at https://github.com/allenai/allennlp\nCopyright by the AllenNLP authors.\n\"\"\"\n\nimport fnmatch\nimport json\nimport logging\nimport os\nimport shutil\nimport sys\nimport tarfile\nimport tempfile\nfrom contextlib import contextmanager\nfrom functools import partial, wraps\nfrom hashlib import sha256\nfrom pathlib import Path\nfrom typing import Optional\nfrom urllib.parse import urlparse\nfrom zipfile import ZipFile, is_zipfile\n\nimport requests\nfrom filelock import FileLock\nfrom tqdm.auto import tqdm\n\nfrom . import __version__\n\n\nlogger = logging.getLogger(__name__) # pylint: disable=invalid-name\n\ntry:\n USE_TF = os.environ.get(\"USE_TF\", \"AUTO\").upper()\n USE_TORCH = os.environ.get(\"USE_TORCH\", \"AUTO\").upper()\n if USE_TORCH in (\"1\", \"ON\", \"YES\", \"AUTO\") and USE_TF not in (\"1\", \"ON\", \"YES\"):\n import torch\n\n _torch_available = True # pylint: disable=invalid-name\n logger.info(\"PyTorch version {} available.\".format(torch.__version__))\n else:\n logger.info(\"Disabling PyTorch because USE_TF is set\")\n _torch_available = False\nexcept ImportError:\n _torch_available = False # pylint: disable=invalid-name\n\ntry:\n USE_TF = os.environ.get(\"USE_TF\", \"AUTO\").upper()\n USE_TORCH = os.environ.get(\"USE_TORCH\", \"AUTO\").upper()\n\n if USE_TF in (\"1\", \"ON\", \"YES\", \"AUTO\") and USE_TORCH not in (\"1\", \"ON\", \"YES\"):\n import tensorflow as tf\n\n assert hasattr(tf, \"__version__\") and int(tf.__version__[0]) >= 2\n _tf_available = True # pylint: disable=invalid-name\n logger.info(\"TensorFlow version {} available.\".format(tf.__version__))\n else:\n logger.info(\"Disabling Tensorflow because USE_TORCH is set\")\n _tf_available = False\nexcept (ImportError, AssertionError):\n _tf_available = False # pylint: disable=invalid-name\n\n\ntry:\n from torch.hub import _get_torch_home\n\n torch_cache_home = _get_torch_home()\nexcept ImportError:\n torch_cache_home = os.path.expanduser(\n os.getenv(\"TORCH_HOME\", os.path.join(os.getenv(\"XDG_CACHE_HOME\", \"~/.cache\"), \"torch\"))\n )\n\n\ntry:\n import torch_xla.core.xla_model as xm\n\n tpu_device = xm.xla_device()\n\n if _torch_available:\n _torch_tpu_available = True # pylint: disable=\n else:\n _torch_tpu_available = False\nexcept ImportError:\n _torch_tpu_available = False\n\n\ndefault_cache_path = os.path.join(torch_cache_home, \"transformers\")\n\n\nPYTORCH_PRETRAINED_BERT_CACHE = os.getenv(\"PYTORCH_PRETRAINED_BERT_CACHE\", default_cache_path)\nPYTORCH_TRANSFORMERS_CACHE = os.getenv(\"PYTORCH_TRANSFORMERS_CACHE\", PYTORCH_PRETRAINED_BERT_CACHE)\nTRANSFORMERS_CACHE = os.getenv(\"TRANSFORMERS_CACHE\", PYTORCH_TRANSFORMERS_CACHE)\n\nWEIGHTS_NAME = \"pytorch_model.bin\"\nTF2_WEIGHTS_NAME = \"tf_model.h5\"\nTF_WEIGHTS_NAME = \"model.ckpt\"\nCONFIG_NAME = \"config.json\"\nMODEL_CARD_NAME = \"modelcard.json\"\n\n\nMULTIPLE_CHOICE_DUMMY_INPUTS = [[[0], [1]], [[0], [1]]]\nDUMMY_INPUTS = [[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]]\nDUMMY_MASK = [[1, 1, 1, 1, 1], [1, 1, 1, 0, 0], [0, 0, 0, 1, 1]]\n\nS3_BUCKET_PREFIX = \"https://s3.amazonaws.com/models.huggingface.co/bert\"\nCLOUDFRONT_DISTRIB_PREFIX = \"https://cdn.huggingface.co\"\n\n\ndef is_torch_available():\n return _torch_available\n\n\ndef is_tf_available():\n return _tf_available\n\n\ndef is_torch_tpu_available():\n return _torch_tpu_available\n\n\ndef add_start_docstrings(*docstr):\n def docstring_decorator(fn):\n fn.__doc__ = \"\".join(docstr) + (fn.__doc__ if fn.__doc__ is not None else \"\")\n return fn\n\n return docstring_decorator\n\n\ndef add_start_docstrings_to_callable(*docstr):\n def docstring_decorator(fn):\n class_name = \":class:`~transformers.{}`\".format(fn.__qualname__.split(\".\")[0])\n intro = \" The {} forward method, overrides the :func:`__call__` special method.\".format(class_name)\n note = r\"\"\"\n\n .. note::\n Although the recipe for forward pass needs to be defined within\n this function, one should call the :class:`Module` instance afterwards\n instead of this since the former takes care of running the\n pre and post processing steps while the latter silently ignores them.\n \"\"\"\n fn.__doc__ = intro + note + \"\".join(docstr) + (fn.__doc__ if fn.__doc__ is not None else \"\")\n return fn\n\n return docstring_decorator\n\n\ndef add_end_docstrings(*docstr):\n def docstring_decorator(fn):\n fn.__doc__ = fn.__doc__ + \"\".join(docstr)\n return fn\n\n return docstring_decorator\n\n\ndef is_remote_url(url_or_filename):\n parsed = urlparse(url_or_filename)\n return parsed.scheme in (\"http\", \"https\")\n\n\ndef hf_bucket_url(model_id: str, filename: str, use_cdn=True) -> str:\n \"\"\"\n Resolve a model identifier, and a file name, to a HF-hosted url\n on either S3 or Cloudfront (a Content Delivery Network, or CDN).\n\n Cloudfront is replicated over the globe so downloads are way faster\n for the end user (and it also lowers our bandwidth costs). However, it\n is more aggressively cached by default, so may not always reflect the\n latest changes to the underlying file (default TTL is 24 hours).\n\n In terms of client-side caching from this library, even though\n Cloudfront relays the ETags from S3, using one or the other\n (or switching from one to the other) will affect caching: cached files\n are not shared between the two because the cached file's name contains\n a hash of the url.\n \"\"\"\n endpoint = CLOUDFRONT_DISTRIB_PREFIX if use_cdn else S3_BUCKET_PREFIX\n legacy_format = \"/\" not in model_id\n if legacy_format:\n return f\"{endpoint}/{model_id}-{filename}\"\n else:\n return f\"{endpoint}/{model_id}/{filename}\"\n\n\ndef url_to_filename(url, etag=None):\n \"\"\"\n Convert `url` into a hashed filename in a repeatable way.\n If `etag` is specified, append its hash to the url's, delimited\n by a period.\n If the url ends with .h5 (Keras HDF5 weights) adds '.h5' to the name\n so that TF 2.0 can identify it as a HDF5 file\n (see https://github.com/tensorflow/tensorflow/blob/00fad90125b18b80fe054de1055770cfb8fe4ba3/tensorflow/python/keras/engine/network.py#L1380)\n \"\"\"\n url_bytes = url.encode(\"utf-8\")\n url_hash = sha256(url_bytes)\n filename = url_hash.hexdigest()\n\n if etag:\n etag_bytes = etag.encode(\"utf-8\")\n etag_hash = sha256(etag_bytes)\n filename += \".\" + etag_hash.hexdigest()\n\n if url.endswith(\".h5\"):\n filename += \".h5\"\n\n return filename\n\n\ndef filename_to_url(filename, cache_dir=None):\n \"\"\"\n Return the url and etag (which may be ``None``) stored for `filename`.\n Raise ``EnvironmentError`` if `filename` or its stored metadata do not exist.\n \"\"\"\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n if isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n\n cache_path = os.path.join(cache_dir, filename)\n if not os.path.exists(cache_path):\n raise EnvironmentError(\"file {} not found\".format(cache_path))\n\n meta_path = cache_path + \".json\"\n if not os.path.exists(meta_path):\n raise EnvironmentError(\"file {} not found\".format(meta_path))\n\n with open(meta_path, encoding=\"utf-8\") as meta_file:\n metadata = json.load(meta_file)\n url = metadata[\"url\"]\n etag = metadata[\"etag\"]\n\n return url, etag\n\n\ndef cached_path(\n url_or_filename,\n cache_dir=None,\n force_download=False,\n proxies=None,\n resume_download=False,\n user_agent=None,\n extract_compressed_file=False,\n force_extract=False,\n local_files_only=False,\n) -> Optional[str]:\n \"\"\"\n Given something that might be a URL (or might be a local path),\n determine which. If it's a URL, download the file and cache it, and\n return the path to the cached file. If it's already a local path,\n make sure the file exists and then return the path.\n Args:\n cache_dir: specify a cache directory to save the file to (overwrite the default cache dir).\n force_download: if True, re-dowload the file even if it's already cached in the cache dir.\n resume_download: if True, resume the download if incompletly recieved file is found.\n user_agent: Optional string or dict that will be appended to the user-agent on remote requests.\n extract_compressed_file: if True and the path point to a zip or tar file, extract the compressed\n file in a folder along the archive.\n force_extract: if True when extract_compressed_file is True and the archive was already extracted,\n re-extract the archive and overide the folder where it was extracted.\n\n Return:\n None in case of non-recoverable file (non-existent or inaccessible url + no cache on disk).\n Local path (string) otherwise\n \"\"\"\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n if isinstance(url_or_filename, Path):\n url_or_filename = str(url_or_filename)\n if isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n\n if is_remote_url(url_or_filename):\n # URL, so get it from the cache (downloading if necessary)\n output_path = get_from_cache(\n url_or_filename,\n cache_dir=cache_dir,\n force_download=force_download,\n proxies=proxies,\n resume_download=resume_download,\n user_agent=user_agent,\n local_files_only=local_files_only,\n )\n elif os.path.exists(url_or_filename):\n # File, and it exists.\n output_path = url_or_filename\n elif urlparse(url_or_filename).scheme == \"\":\n # File, but it doesn't exist.\n raise EnvironmentError(\"file {} not found\".format(url_or_filename))\n else:\n # Something unknown\n raise ValueError(\"unable to parse {} as a URL or as a local path\".format(url_or_filename))\n\n if extract_compressed_file:\n if not is_zipfile(output_path) and not tarfile.is_tarfile(output_path):\n return output_path\n\n # Path where we extract compressed archives\n # We avoid '.' in dir name and add \"-extracted\" at the end: \"./model.zip\" => \"./model-zip-extracted/\"\n output_dir, output_file = os.path.split(output_path)\n output_extract_dir_name = output_file.replace(\".\", \"-\") + \"-extracted\"\n output_path_extracted = os.path.join(output_dir, output_extract_dir_name)\n\n if os.path.isdir(output_path_extracted) and os.listdir(output_path_extracted) and not force_extract:\n return output_path_extracted\n\n # Prevent parallel extractions\n lock_path = output_path + \".lock\"\n with FileLock(lock_path):\n shutil.rmtree(output_path_extracted, ignore_errors=True)\n os.makedirs(output_path_extracted)\n if is_zipfile(output_path):\n with ZipFile(output_path, \"r\") as zip_file:\n zip_file.extractall(output_path_extracted)\n zip_file.close()\n elif tarfile.is_tarfile(output_path):\n tar_file = tarfile.open(output_path)\n tar_file.extractall(output_path_extracted)\n tar_file.close()\n else:\n raise EnvironmentError(\"Archive format of {} could not be identified\".format(output_path))\n\n return output_path_extracted\n\n return output_path\n\n\ndef http_get(url, temp_file, proxies=None, resume_size=0, user_agent=None):\n ua = \"transformers/{}; python/{}\".format(__version__, sys.version.split()[0])\n if is_torch_available():\n ua += \"; torch/{}\".format(torch.__version__)\n if is_tf_available():\n ua += \"; tensorflow/{}\".format(tf.__version__)\n if isinstance(user_agent, dict):\n ua += \"; \" + \"; \".join(\"{}/{}\".format(k, v) for k, v in user_agent.items())\n elif isinstance(user_agent, str):\n ua += \"; \" + user_agent\n headers = {\"user-agent\": ua}\n if resume_size > 0:\n headers[\"Range\"] = \"bytes=%d-\" % (resume_size,)\n response = requests.get(url, stream=True, proxies=proxies, headers=headers)\n if response.status_code == 416: # Range not satisfiable\n return\n content_length = response.headers.get(\"Content-Length\")\n total = resume_size + int(content_length) if content_length is not None else None\n progress = tqdm(\n unit=\"B\",\n unit_scale=True,\n total=total,\n initial=resume_size,\n desc=\"Downloading\",\n disable=bool(logger.getEffectiveLevel() == logging.NOTSET),\n )\n for chunk in response.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n progress.update(len(chunk))\n temp_file.write(chunk)\n progress.close()\n\n\ndef get_from_cache(\n url,\n cache_dir=None,\n force_download=False,\n proxies=None,\n etag_timeout=10,\n resume_download=False,\n user_agent=None,\n local_files_only=False,\n) -> Optional[str]:\n \"\"\"\n Given a URL, look for the corresponding file in the local cache.\n If it's not there, download it. Then return the path to the cached file.\n\n Return:\n None in case of non-recoverable file (non-existent or inaccessible url + no cache on disk).\n Local path (string) otherwise\n \"\"\"\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n if isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n\n os.makedirs(cache_dir, exist_ok=True)\n\n etag = None\n if not local_files_only:\n try:\n response = requests.head(url, allow_redirects=True, proxies=proxies, timeout=etag_timeout)\n if response.status_code == 200:\n etag = response.headers.get(\"ETag\")\n except (EnvironmentError, requests.exceptions.Timeout):\n # etag is already None\n pass\n\n filename = url_to_filename(url, etag)\n\n # get cache path to put the file\n cache_path = os.path.join(cache_dir, filename)\n\n # etag is None = we don't have a connection, or url doesn't exist, or is otherwise inaccessible.\n # try to get the last downloaded one\n if etag is None:\n if os.path.exists(cache_path):\n return cache_path\n else:\n matching_files = [\n file\n for file in fnmatch.filter(os.listdir(cache_dir), filename + \".*\")\n if not file.endswith(\".json\") and not file.endswith(\".lock\")\n ]\n if len(matching_files) > 0:\n return os.path.join(cache_dir, matching_files[-1])\n else:\n # If files cannot be found and local_files_only=True,\n # the models might've been found if local_files_only=False\n # Notify the user about that\n if local_files_only:\n raise ValueError(\n \"Cannot find the requested files in the cached path and outgoing traffic has been\"\n \" disabled. To enable model look-ups and downloads online, set 'local_files_only'\"\n \" to False.\"\n )\n return None\n\n # From now on, etag is not None.\n if os.path.exists(cache_path) and not force_download:\n return cache_path\n\n # Prevent parallel downloads of the same file with a lock.\n lock_path = cache_path + \".lock\"\n with FileLock(lock_path):\n\n # If the download just completed while the lock was activated.\n if os.path.exists(cache_path) and not force_download:\n # Even if returning early like here, the lock will be released.\n return cache_path\n\n if resume_download:\n incomplete_path = cache_path + \".incomplete\"\n\n @contextmanager\n def _resumable_file_manager():\n with open(incomplete_path, \"a+b\") as f:\n yield f\n\n temp_file_manager = _resumable_file_manager\n if os.path.exists(incomplete_path):\n resume_size = os.stat(incomplete_path).st_size\n else:\n resume_size = 0\n else:\n temp_file_manager = partial(tempfile.NamedTemporaryFile, dir=cache_dir, delete=False)\n resume_size = 0\n\n # Download to temporary file, then copy to cache dir once finished.\n # Otherwise you get corrupt cache entries if the download gets interrupted.\n with temp_file_manager() as temp_file:\n logger.info(\"%s not found in cache or force_download set to True, downloading to %s\", url, temp_file.name)\n\n http_get(url, temp_file, proxies=proxies, resume_size=resume_size, user_agent=user_agent)\n\n logger.info(\"storing %s in cache at %s\", url, cache_path)\n os.replace(temp_file.name, cache_path)\n\n logger.info(\"creating metadata file for %s\", cache_path)\n meta = {\"url\": url, \"etag\": etag}\n meta_path = cache_path + \".json\"\n with open(meta_path, \"w\") as meta_file:\n json.dump(meta, meta_file)\n\n return cache_path\n\n\nclass cached_property(property):\n \"\"\"\n Descriptor that mimics @property but caches output in member variable.\n\n From tensorflow_datasets\n\n Built-in in functools from Python 3.8.\n \"\"\"\n\n def __get__(self, obj, objtype=None):\n # See docs.python.org/3/howto/descriptor.html#properties\n if obj is None:\n return self\n if self.fget is None:\n raise AttributeError(\"unreadable attribute\")\n attr = \"__cached_\" + self.fget.__name__\n cached = getattr(obj, attr, None)\n if cached is None:\n cached = self.fget(obj)\n setattr(obj, attr, cached)\n return cached\n\n\ndef torch_required(func):\n # Chose a different decorator name than in tests so it's clear they are not the same.\n @wraps(func)\n def wrapper(*args, **kwargs):\n if is_torch_available():\n return func(*args, **kwargs)\n else:\n raise ImportError(f\"Method `{func.__name__}` requires PyTorch.\")\n\n return wrapper\n\n\ndef tf_required(func):\n # Chose a different decorator name than in tests so it's clear they are not the same.\n @wraps(func)\n def wrapper(*args, **kwargs):\n if is_tf_available():\n return func(*args, **kwargs)\n else:\n raise ImportError(f\"Method `{func.__name__}` requires TF.\")\n\n return wrapper\n", "path": "src/transformers/file_utils.py" } ]
[ { "content": "\"\"\"\nUtilities for working with the local dataset cache.\nThis file is adapted from the AllenNLP library at https://github.com/allenai/allennlp\nCopyright by the AllenNLP authors.\n\"\"\"\n\nimport fnmatch\nimport json\nimport logging\nimport os\nimport shutil\nimport sys\nimport tarfile\nimport tempfile\nfrom contextlib import contextmanager\nfrom functools import partial, wraps\nfrom hashlib import sha256\nfrom pathlib import Path\nfrom typing import Optional\nfrom urllib.parse import urlparse\nfrom zipfile import ZipFile, is_zipfile\n\nimport requests\nfrom filelock import FileLock\nfrom tqdm.auto import tqdm\n\nfrom . import __version__\n\n\nlogger = logging.getLogger(__name__) # pylint: disable=invalid-name\n\ntry:\n USE_TF = os.environ.get(\"USE_TF\", \"AUTO\").upper()\n USE_TORCH = os.environ.get(\"USE_TORCH\", \"AUTO\").upper()\n if USE_TORCH in (\"1\", \"ON\", \"YES\", \"AUTO\") and USE_TF not in (\"1\", \"ON\", \"YES\"):\n import torch\n\n _torch_available = True # pylint: disable=invalid-name\n logger.info(\"PyTorch version {} available.\".format(torch.__version__))\n else:\n logger.info(\"Disabling PyTorch because USE_TF is set\")\n _torch_available = False\nexcept ImportError:\n _torch_available = False # pylint: disable=invalid-name\n\ntry:\n USE_TF = os.environ.get(\"USE_TF\", \"AUTO\").upper()\n USE_TORCH = os.environ.get(\"USE_TORCH\", \"AUTO\").upper()\n\n if USE_TF in (\"1\", \"ON\", \"YES\", \"AUTO\") and USE_TORCH not in (\"1\", \"ON\", \"YES\"):\n import tensorflow as tf\n\n assert hasattr(tf, \"__version__\") and int(tf.__version__[0]) >= 2\n _tf_available = True # pylint: disable=invalid-name\n logger.info(\"TensorFlow version {} available.\".format(tf.__version__))\n else:\n logger.info(\"Disabling Tensorflow because USE_TORCH is set\")\n _tf_available = False\nexcept (ImportError, AssertionError):\n _tf_available = False # pylint: disable=invalid-name\n\n\ntry:\n from torch.hub import _get_torch_home\n\n torch_cache_home = _get_torch_home()\nexcept ImportError:\n torch_cache_home = os.path.expanduser(\n os.getenv(\"TORCH_HOME\", os.path.join(os.getenv(\"XDG_CACHE_HOME\", \"~/.cache\"), \"torch\"))\n )\n\n\ntry:\n import torch_xla.core.xla_model as xm # noqa: F401\n\n if _torch_available:\n _torch_tpu_available = True # pylint: disable=\n else:\n _torch_tpu_available = False\nexcept ImportError:\n _torch_tpu_available = False\n\n\ndefault_cache_path = os.path.join(torch_cache_home, \"transformers\")\n\n\nPYTORCH_PRETRAINED_BERT_CACHE = os.getenv(\"PYTORCH_PRETRAINED_BERT_CACHE\", default_cache_path)\nPYTORCH_TRANSFORMERS_CACHE = os.getenv(\"PYTORCH_TRANSFORMERS_CACHE\", PYTORCH_PRETRAINED_BERT_CACHE)\nTRANSFORMERS_CACHE = os.getenv(\"TRANSFORMERS_CACHE\", PYTORCH_TRANSFORMERS_CACHE)\n\nWEIGHTS_NAME = \"pytorch_model.bin\"\nTF2_WEIGHTS_NAME = \"tf_model.h5\"\nTF_WEIGHTS_NAME = \"model.ckpt\"\nCONFIG_NAME = \"config.json\"\nMODEL_CARD_NAME = \"modelcard.json\"\n\n\nMULTIPLE_CHOICE_DUMMY_INPUTS = [[[0], [1]], [[0], [1]]]\nDUMMY_INPUTS = [[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]]\nDUMMY_MASK = [[1, 1, 1, 1, 1], [1, 1, 1, 0, 0], [0, 0, 0, 1, 1]]\n\nS3_BUCKET_PREFIX = \"https://s3.amazonaws.com/models.huggingface.co/bert\"\nCLOUDFRONT_DISTRIB_PREFIX = \"https://cdn.huggingface.co\"\n\n\ndef is_torch_available():\n return _torch_available\n\n\ndef is_tf_available():\n return _tf_available\n\n\ndef is_torch_tpu_available():\n return _torch_tpu_available\n\n\ndef add_start_docstrings(*docstr):\n def docstring_decorator(fn):\n fn.__doc__ = \"\".join(docstr) + (fn.__doc__ if fn.__doc__ is not None else \"\")\n return fn\n\n return docstring_decorator\n\n\ndef add_start_docstrings_to_callable(*docstr):\n def docstring_decorator(fn):\n class_name = \":class:`~transformers.{}`\".format(fn.__qualname__.split(\".\")[0])\n intro = \" The {} forward method, overrides the :func:`__call__` special method.\".format(class_name)\n note = r\"\"\"\n\n .. note::\n Although the recipe for forward pass needs to be defined within\n this function, one should call the :class:`Module` instance afterwards\n instead of this since the former takes care of running the\n pre and post processing steps while the latter silently ignores them.\n \"\"\"\n fn.__doc__ = intro + note + \"\".join(docstr) + (fn.__doc__ if fn.__doc__ is not None else \"\")\n return fn\n\n return docstring_decorator\n\n\ndef add_end_docstrings(*docstr):\n def docstring_decorator(fn):\n fn.__doc__ = fn.__doc__ + \"\".join(docstr)\n return fn\n\n return docstring_decorator\n\n\ndef is_remote_url(url_or_filename):\n parsed = urlparse(url_or_filename)\n return parsed.scheme in (\"http\", \"https\")\n\n\ndef hf_bucket_url(model_id: str, filename: str, use_cdn=True) -> str:\n \"\"\"\n Resolve a model identifier, and a file name, to a HF-hosted url\n on either S3 or Cloudfront (a Content Delivery Network, or CDN).\n\n Cloudfront is replicated over the globe so downloads are way faster\n for the end user (and it also lowers our bandwidth costs). However, it\n is more aggressively cached by default, so may not always reflect the\n latest changes to the underlying file (default TTL is 24 hours).\n\n In terms of client-side caching from this library, even though\n Cloudfront relays the ETags from S3, using one or the other\n (or switching from one to the other) will affect caching: cached files\n are not shared between the two because the cached file's name contains\n a hash of the url.\n \"\"\"\n endpoint = CLOUDFRONT_DISTRIB_PREFIX if use_cdn else S3_BUCKET_PREFIX\n legacy_format = \"/\" not in model_id\n if legacy_format:\n return f\"{endpoint}/{model_id}-{filename}\"\n else:\n return f\"{endpoint}/{model_id}/{filename}\"\n\n\ndef url_to_filename(url, etag=None):\n \"\"\"\n Convert `url` into a hashed filename in a repeatable way.\n If `etag` is specified, append its hash to the url's, delimited\n by a period.\n If the url ends with .h5 (Keras HDF5 weights) adds '.h5' to the name\n so that TF 2.0 can identify it as a HDF5 file\n (see https://github.com/tensorflow/tensorflow/blob/00fad90125b18b80fe054de1055770cfb8fe4ba3/tensorflow/python/keras/engine/network.py#L1380)\n \"\"\"\n url_bytes = url.encode(\"utf-8\")\n url_hash = sha256(url_bytes)\n filename = url_hash.hexdigest()\n\n if etag:\n etag_bytes = etag.encode(\"utf-8\")\n etag_hash = sha256(etag_bytes)\n filename += \".\" + etag_hash.hexdigest()\n\n if url.endswith(\".h5\"):\n filename += \".h5\"\n\n return filename\n\n\ndef filename_to_url(filename, cache_dir=None):\n \"\"\"\n Return the url and etag (which may be ``None``) stored for `filename`.\n Raise ``EnvironmentError`` if `filename` or its stored metadata do not exist.\n \"\"\"\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n if isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n\n cache_path = os.path.join(cache_dir, filename)\n if not os.path.exists(cache_path):\n raise EnvironmentError(\"file {} not found\".format(cache_path))\n\n meta_path = cache_path + \".json\"\n if not os.path.exists(meta_path):\n raise EnvironmentError(\"file {} not found\".format(meta_path))\n\n with open(meta_path, encoding=\"utf-8\") as meta_file:\n metadata = json.load(meta_file)\n url = metadata[\"url\"]\n etag = metadata[\"etag\"]\n\n return url, etag\n\n\ndef cached_path(\n url_or_filename,\n cache_dir=None,\n force_download=False,\n proxies=None,\n resume_download=False,\n user_agent=None,\n extract_compressed_file=False,\n force_extract=False,\n local_files_only=False,\n) -> Optional[str]:\n \"\"\"\n Given something that might be a URL (or might be a local path),\n determine which. If it's a URL, download the file and cache it, and\n return the path to the cached file. If it's already a local path,\n make sure the file exists and then return the path.\n Args:\n cache_dir: specify a cache directory to save the file to (overwrite the default cache dir).\n force_download: if True, re-dowload the file even if it's already cached in the cache dir.\n resume_download: if True, resume the download if incompletly recieved file is found.\n user_agent: Optional string or dict that will be appended to the user-agent on remote requests.\n extract_compressed_file: if True and the path point to a zip or tar file, extract the compressed\n file in a folder along the archive.\n force_extract: if True when extract_compressed_file is True and the archive was already extracted,\n re-extract the archive and overide the folder where it was extracted.\n\n Return:\n None in case of non-recoverable file (non-existent or inaccessible url + no cache on disk).\n Local path (string) otherwise\n \"\"\"\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n if isinstance(url_or_filename, Path):\n url_or_filename = str(url_or_filename)\n if isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n\n if is_remote_url(url_or_filename):\n # URL, so get it from the cache (downloading if necessary)\n output_path = get_from_cache(\n url_or_filename,\n cache_dir=cache_dir,\n force_download=force_download,\n proxies=proxies,\n resume_download=resume_download,\n user_agent=user_agent,\n local_files_only=local_files_only,\n )\n elif os.path.exists(url_or_filename):\n # File, and it exists.\n output_path = url_or_filename\n elif urlparse(url_or_filename).scheme == \"\":\n # File, but it doesn't exist.\n raise EnvironmentError(\"file {} not found\".format(url_or_filename))\n else:\n # Something unknown\n raise ValueError(\"unable to parse {} as a URL or as a local path\".format(url_or_filename))\n\n if extract_compressed_file:\n if not is_zipfile(output_path) and not tarfile.is_tarfile(output_path):\n return output_path\n\n # Path where we extract compressed archives\n # We avoid '.' in dir name and add \"-extracted\" at the end: \"./model.zip\" => \"./model-zip-extracted/\"\n output_dir, output_file = os.path.split(output_path)\n output_extract_dir_name = output_file.replace(\".\", \"-\") + \"-extracted\"\n output_path_extracted = os.path.join(output_dir, output_extract_dir_name)\n\n if os.path.isdir(output_path_extracted) and os.listdir(output_path_extracted) and not force_extract:\n return output_path_extracted\n\n # Prevent parallel extractions\n lock_path = output_path + \".lock\"\n with FileLock(lock_path):\n shutil.rmtree(output_path_extracted, ignore_errors=True)\n os.makedirs(output_path_extracted)\n if is_zipfile(output_path):\n with ZipFile(output_path, \"r\") as zip_file:\n zip_file.extractall(output_path_extracted)\n zip_file.close()\n elif tarfile.is_tarfile(output_path):\n tar_file = tarfile.open(output_path)\n tar_file.extractall(output_path_extracted)\n tar_file.close()\n else:\n raise EnvironmentError(\"Archive format of {} could not be identified\".format(output_path))\n\n return output_path_extracted\n\n return output_path\n\n\ndef http_get(url, temp_file, proxies=None, resume_size=0, user_agent=None):\n ua = \"transformers/{}; python/{}\".format(__version__, sys.version.split()[0])\n if is_torch_available():\n ua += \"; torch/{}\".format(torch.__version__)\n if is_tf_available():\n ua += \"; tensorflow/{}\".format(tf.__version__)\n if isinstance(user_agent, dict):\n ua += \"; \" + \"; \".join(\"{}/{}\".format(k, v) for k, v in user_agent.items())\n elif isinstance(user_agent, str):\n ua += \"; \" + user_agent\n headers = {\"user-agent\": ua}\n if resume_size > 0:\n headers[\"Range\"] = \"bytes=%d-\" % (resume_size,)\n response = requests.get(url, stream=True, proxies=proxies, headers=headers)\n if response.status_code == 416: # Range not satisfiable\n return\n content_length = response.headers.get(\"Content-Length\")\n total = resume_size + int(content_length) if content_length is not None else None\n progress = tqdm(\n unit=\"B\",\n unit_scale=True,\n total=total,\n initial=resume_size,\n desc=\"Downloading\",\n disable=bool(logger.getEffectiveLevel() == logging.NOTSET),\n )\n for chunk in response.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n progress.update(len(chunk))\n temp_file.write(chunk)\n progress.close()\n\n\ndef get_from_cache(\n url,\n cache_dir=None,\n force_download=False,\n proxies=None,\n etag_timeout=10,\n resume_download=False,\n user_agent=None,\n local_files_only=False,\n) -> Optional[str]:\n \"\"\"\n Given a URL, look for the corresponding file in the local cache.\n If it's not there, download it. Then return the path to the cached file.\n\n Return:\n None in case of non-recoverable file (non-existent or inaccessible url + no cache on disk).\n Local path (string) otherwise\n \"\"\"\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n if isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n\n os.makedirs(cache_dir, exist_ok=True)\n\n etag = None\n if not local_files_only:\n try:\n response = requests.head(url, allow_redirects=True, proxies=proxies, timeout=etag_timeout)\n if response.status_code == 200:\n etag = response.headers.get(\"ETag\")\n except (EnvironmentError, requests.exceptions.Timeout):\n # etag is already None\n pass\n\n filename = url_to_filename(url, etag)\n\n # get cache path to put the file\n cache_path = os.path.join(cache_dir, filename)\n\n # etag is None = we don't have a connection, or url doesn't exist, or is otherwise inaccessible.\n # try to get the last downloaded one\n if etag is None:\n if os.path.exists(cache_path):\n return cache_path\n else:\n matching_files = [\n file\n for file in fnmatch.filter(os.listdir(cache_dir), filename + \".*\")\n if not file.endswith(\".json\") and not file.endswith(\".lock\")\n ]\n if len(matching_files) > 0:\n return os.path.join(cache_dir, matching_files[-1])\n else:\n # If files cannot be found and local_files_only=True,\n # the models might've been found if local_files_only=False\n # Notify the user about that\n if local_files_only:\n raise ValueError(\n \"Cannot find the requested files in the cached path and outgoing traffic has been\"\n \" disabled. To enable model look-ups and downloads online, set 'local_files_only'\"\n \" to False.\"\n )\n return None\n\n # From now on, etag is not None.\n if os.path.exists(cache_path) and not force_download:\n return cache_path\n\n # Prevent parallel downloads of the same file with a lock.\n lock_path = cache_path + \".lock\"\n with FileLock(lock_path):\n\n # If the download just completed while the lock was activated.\n if os.path.exists(cache_path) and not force_download:\n # Even if returning early like here, the lock will be released.\n return cache_path\n\n if resume_download:\n incomplete_path = cache_path + \".incomplete\"\n\n @contextmanager\n def _resumable_file_manager():\n with open(incomplete_path, \"a+b\") as f:\n yield f\n\n temp_file_manager = _resumable_file_manager\n if os.path.exists(incomplete_path):\n resume_size = os.stat(incomplete_path).st_size\n else:\n resume_size = 0\n else:\n temp_file_manager = partial(tempfile.NamedTemporaryFile, dir=cache_dir, delete=False)\n resume_size = 0\n\n # Download to temporary file, then copy to cache dir once finished.\n # Otherwise you get corrupt cache entries if the download gets interrupted.\n with temp_file_manager() as temp_file:\n logger.info(\"%s not found in cache or force_download set to True, downloading to %s\", url, temp_file.name)\n\n http_get(url, temp_file, proxies=proxies, resume_size=resume_size, user_agent=user_agent)\n\n logger.info(\"storing %s in cache at %s\", url, cache_path)\n os.replace(temp_file.name, cache_path)\n\n logger.info(\"creating metadata file for %s\", cache_path)\n meta = {\"url\": url, \"etag\": etag}\n meta_path = cache_path + \".json\"\n with open(meta_path, \"w\") as meta_file:\n json.dump(meta, meta_file)\n\n return cache_path\n\n\nclass cached_property(property):\n \"\"\"\n Descriptor that mimics @property but caches output in member variable.\n\n From tensorflow_datasets\n\n Built-in in functools from Python 3.8.\n \"\"\"\n\n def __get__(self, obj, objtype=None):\n # See docs.python.org/3/howto/descriptor.html#properties\n if obj is None:\n return self\n if self.fget is None:\n raise AttributeError(\"unreadable attribute\")\n attr = \"__cached_\" + self.fget.__name__\n cached = getattr(obj, attr, None)\n if cached is None:\n cached = self.fget(obj)\n setattr(obj, attr, cached)\n return cached\n\n\ndef torch_required(func):\n # Chose a different decorator name than in tests so it's clear they are not the same.\n @wraps(func)\n def wrapper(*args, **kwargs):\n if is_torch_available():\n return func(*args, **kwargs)\n else:\n raise ImportError(f\"Method `{func.__name__}` requires PyTorch.\")\n\n return wrapper\n\n\ndef tf_required(func):\n # Chose a different decorator name than in tests so it's clear they are not the same.\n @wraps(func)\n def wrapper(*args, **kwargs):\n if is_tf_available():\n return func(*args, **kwargs)\n else:\n raise ImportError(f\"Method `{func.__name__}` requires TF.\")\n\n return wrapper\n", "path": "src/transformers/file_utils.py" } ]
diff --git a/src/transformers/file_utils.py b/src/transformers/file_utils.py index a6925aa0827f..433c77ae5add 100644 --- a/src/transformers/file_utils.py +++ b/src/transformers/file_utils.py @@ -71,9 +71,7 @@ try: - import torch_xla.core.xla_model as xm - - tpu_device = xm.xla_device() + import torch_xla.core.xla_model as xm # noqa: F401 if _torch_available: _torch_tpu_available = True # pylint: disable=
mindee__doctr-404
WeasyPrint import error Python 3.7 ## 🐛 Bug When importing weasyprint with python 3.7 I have an error: `AttributeError: 'OutStream' object has no attribute 'buffer'`* ## To Reproduce Steps to reproduce the behavior: `from doctr.models import ocr_predictor` leads to: ``` AttributeError Traceback (most recent call last) <ipython-input-4-19f78ebc9b57> in <module>() ----> 1 from doctr.models import ocr_predictor 2 3 # Load predictor 4 model = ocr_predictor(pretrained=True) 7 frames /usr/local/lib/python3.7/dist-packages/doctr/__init__.py in <module>() 1 from .file_utils import is_tf_available, is_torch_available 2 from .version import __version__ # noqa: F401 ----> 3 from . import documents 4 from . import transforms 5 from . import utils /usr/local/lib/python3.7/dist-packages/doctr/documents/__init__.py in <module>() 1 from .elements import * ----> 2 from .reader import * /usr/local/lib/python3.7/dist-packages/doctr/documents/reader.py in <module>() 8 from pathlib import Path 9 import fitz ---> 10 from weasyprint import HTML 11 from typing import List, Tuple, Optional, Any, Union, Sequence, Dict 12 /usr/local/lib/python3.7/dist-packages/weasyprint/__init__.py in <module>() 321 # Work around circular imports. 322 from .css import preprocess_stylesheet # noqa isort:skip --> 323 from .html import ( # noqa isort:skip 324 HTML5_UA_COUNTER_STYLE, HTML5_UA_STYLESHEET, HTML5_PH_STYLESHEET, 325 find_base_url) /usr/local/lib/python3.7/dist-packages/weasyprint/html.py in <module>() 21 from .css.counters import CounterStyle 22 from .formatting_structure import boxes ---> 23 from .images import SVGImage 24 from .logger import LOGGER 25 from .urls import get_url_attribute /usr/local/lib/python3.7/dist-packages/weasyprint/images.py in <module>() 11 from itertools import cycle 12 ---> 13 import pydyf 14 from PIL import Image 15 /usr/local/lib/python3.7/dist-packages/pydyf/__init__.py in <module>() 402 403 --> 404 class PDF: 405 """PDF document.""" 406 def __init__(self): /usr/local/lib/python3.7/dist-packages/pydyf/__init__.py in PDF() 506 self.write_line(b'%%EOF', output) 507 --> 508 def write(self, output=sys.stdout.buffer): 509 """Write PDF to output. 510 AttributeError: 'OutStream' object has no attribute 'buffer' ``` ## Expected behavior Nothing, special ## Environment ``` DocTR version: 0.3.0 TensorFlow version: 2.5.0 PyTorch version: 1.9.0+cu102 (torchvision 0.10.0+cu102) OpenCV version: 4.5.3 OS: Ubuntu 18.04.5 LTS Python version: 3.7 Is CUDA available (TensorFlow): No Is CUDA available (PyTorch): No CUDA runtime version: 11.0.221 GPU models and configuration: Could not collect Nvidia driver version: Could not collect ```
[ { "content": "# Copyright (C) 2021, Mindee.\n\n# This program is licensed under the Apache License version 2.\n# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.\n\n\"\"\"\nPackage installation setup\n\"\"\"\n\nimport os\nimport re\nfrom pathlib import Path\nimport subprocess\n\nfrom setuptools import find_packages, setup\n\n\nversion = \"0.3.1a0\"\nsha = 'Unknown'\npackage_name = 'doctr'\n\ncwd = Path(__file__).parent.absolute()\n\nif os.getenv('BUILD_VERSION'):\n version = os.getenv('BUILD_VERSION')\nelif sha != 'Unknown':\n try:\n sha = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd).decode('ascii').strip()\n except Exception:\n pass\n version += '+' + sha[:7]\nprint(f\"Building wheel {package_name}-{version}\")\n\nwith open(cwd.joinpath(package_name, 'version.py'), 'w') as f:\n f.write(f\"__version__ = '{version}'\\n\")\n\nwith open('README.md', 'r') as f:\n readme = f.read()\n\n# Borrowed from https://github.com/huggingface/transformers/blob/master/setup.py\n_deps = [\n \"importlib_metadata\",\n \"numpy>=1.16.0\",\n \"scipy>=1.4.0\",\n \"opencv-python>=4.2\",\n \"tensorflow>=2.4.0\",\n \"PyMuPDF>=1.16.0,<1.18.11\",\n \"pyclipper>=1.2.0\",\n \"shapely>=1.6.0\",\n \"matplotlib>=3.1.0\",\n \"mplcursors>=0.3\",\n \"weasyprint>=52.2\",\n \"unidecode>=1.0.0\",\n \"tensorflow-cpu>=2.4.0\",\n \"torch>=1.8.0\",\n \"torchvision>=0.9.0\",\n \"Pillow>=8.0.0,<8.3.0\", # cf. https://github.com/python-pillow/Pillow/issues/5571\n \"tqdm>=4.30.0\",\n \"tensorflow-addons>=0.13.0\"\n]\n\ndeps = {b: a for a, b in (re.findall(r\"^(([^!=<>]+)(?:[!=<>].*)?$)\", x)[0] for x in _deps)}\n\n\ndef deps_list(*pkgs):\n return [deps[pkg] for pkg in pkgs]\n\n\ninstall_requires = [\n deps[\"importlib_metadata\"] + \";python_version<'3.8'\", # importlib_metadata for Python versions that don't have it\n deps[\"numpy\"],\n deps[\"scipy\"],\n deps[\"opencv-python\"],\n deps[\"PyMuPDF\"],\n deps[\"pyclipper\"],\n deps[\"shapely\"],\n deps[\"matplotlib\"],\n deps[\"mplcursors\"],\n deps[\"weasyprint\"],\n deps[\"unidecode\"],\n deps[\"Pillow\"],\n deps[\"tqdm\"],\n]\n\nextras = {}\nextras[\"tf\"] = deps_list(\"tensorflow\", \"tensorflow-addons\")\nextras[\"tf-cpu\"] = deps_list(\"tensorflow-cpu\", \"tensorflow-addons\")\nextras[\"torch\"] = deps_list(\"torch\", \"torchvision\")\nextras[\"all\"] = (\n extras[\"tf\"]\n + extras[\"torch\"]\n)\n\nsetup(\n # Metadata\n name=os.getenv('PKG_INDEX') if os.getenv('PKG_INDEX') else package_name,\n version=version,\n author='François-Guillaume Fernandez, Charles Gaillard',\n author_email='[email protected]',\n description='Extract valuable text information from your documents',\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n url='https://github.com/mindee/doctr',\n download_url='https://github.com/mindee/doctr/tags',\n license='Apache',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n \"Intended Audience :: Education\",\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n ],\n keywords=['OCR', 'deep learning', 'computer vision', 'tensorflow', 'pytorch', 'text detection', 'text recognition'],\n\n # Package info\n packages=find_packages(exclude=('test',)),\n zip_safe=True,\n python_requires='>=3.6.0',\n include_package_data=True,\n install_requires=install_requires,\n extras_require=extras,\n package_data={'': ['LICENSE']}\n)\n", "path": "setup.py" } ]
[ { "content": "# Copyright (C) 2021, Mindee.\n\n# This program is licensed under the Apache License version 2.\n# See LICENSE or go to <https://www.apache.org/licenses/LICENSE-2.0.txt> for full license details.\n\n\"\"\"\nPackage installation setup\n\"\"\"\n\nimport os\nimport re\nfrom pathlib import Path\nimport subprocess\n\nfrom setuptools import find_packages, setup\n\n\nversion = \"0.3.1a0\"\nsha = 'Unknown'\npackage_name = 'doctr'\n\ncwd = Path(__file__).parent.absolute()\n\nif os.getenv('BUILD_VERSION'):\n version = os.getenv('BUILD_VERSION')\nelif sha != 'Unknown':\n try:\n sha = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd).decode('ascii').strip()\n except Exception:\n pass\n version += '+' + sha[:7]\nprint(f\"Building wheel {package_name}-{version}\")\n\nwith open(cwd.joinpath(package_name, 'version.py'), 'w') as f:\n f.write(f\"__version__ = '{version}'\\n\")\n\nwith open('README.md', 'r') as f:\n readme = f.read()\n\n# Borrowed from https://github.com/huggingface/transformers/blob/master/setup.py\n_deps = [\n \"importlib_metadata\",\n \"numpy>=1.16.0\",\n \"scipy>=1.4.0\",\n \"opencv-python>=4.2\",\n \"tensorflow>=2.4.0\",\n \"PyMuPDF>=1.16.0,<1.18.11\",\n \"pyclipper>=1.2.0\",\n \"shapely>=1.6.0\",\n \"matplotlib>=3.1.0\",\n \"mplcursors>=0.3\",\n \"weasyprint>=52.2,<53.0\",\n \"unidecode>=1.0.0\",\n \"tensorflow-cpu>=2.4.0\",\n \"torch>=1.8.0\",\n \"torchvision>=0.9.0\",\n \"Pillow>=8.0.0,<8.3.0\", # cf. https://github.com/python-pillow/Pillow/issues/5571\n \"tqdm>=4.30.0\",\n \"tensorflow-addons>=0.13.0\"\n]\n\ndeps = {b: a for a, b in (re.findall(r\"^(([^!=<>]+)(?:[!=<>].*)?$)\", x)[0] for x in _deps)}\n\n\ndef deps_list(*pkgs):\n return [deps[pkg] for pkg in pkgs]\n\n\ninstall_requires = [\n deps[\"importlib_metadata\"] + \";python_version<'3.8'\", # importlib_metadata for Python versions that don't have it\n deps[\"numpy\"],\n deps[\"scipy\"],\n deps[\"opencv-python\"],\n deps[\"PyMuPDF\"],\n deps[\"pyclipper\"],\n deps[\"shapely\"],\n deps[\"matplotlib\"],\n deps[\"mplcursors\"],\n deps[\"weasyprint\"],\n deps[\"unidecode\"],\n deps[\"Pillow\"],\n deps[\"tqdm\"],\n]\n\nextras = {}\nextras[\"tf\"] = deps_list(\"tensorflow\", \"tensorflow-addons\")\nextras[\"tf-cpu\"] = deps_list(\"tensorflow-cpu\", \"tensorflow-addons\")\nextras[\"torch\"] = deps_list(\"torch\", \"torchvision\")\nextras[\"all\"] = (\n extras[\"tf\"]\n + extras[\"torch\"]\n)\n\nsetup(\n # Metadata\n name=os.getenv('PKG_INDEX') if os.getenv('PKG_INDEX') else package_name,\n version=version,\n author='François-Guillaume Fernandez, Charles Gaillard',\n author_email='[email protected]',\n description='Extract valuable text information from your documents',\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n url='https://github.com/mindee/doctr',\n download_url='https://github.com/mindee/doctr/tags',\n license='Apache',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n \"Intended Audience :: Education\",\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n ],\n keywords=['OCR', 'deep learning', 'computer vision', 'tensorflow', 'pytorch', 'text detection', 'text recognition'],\n\n # Package info\n packages=find_packages(exclude=('test',)),\n zip_safe=True,\n python_requires='>=3.6.0',\n include_package_data=True,\n install_requires=install_requires,\n extras_require=extras,\n package_data={'': ['LICENSE']}\n)\n", "path": "setup.py" } ]
diff --git a/requirements.txt b/requirements.txt index c0df7a7617..c244b2ba87 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ pyclipper>=1.2.0 shapely>=1.6.0 matplotlib>=3.1.0 mplcursors>=0.3 -weasyprint>=52.2 +weasyprint>=52.2,<53.0 unidecode>=1.0.0 tensorflow>=2.4.0 Pillow>=8.0.0 diff --git a/setup.py b/setup.py index 7ccec5a6d4..64abf6f766 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ "shapely>=1.6.0", "matplotlib>=3.1.0", "mplcursors>=0.3", - "weasyprint>=52.2", + "weasyprint>=52.2,<53.0", "unidecode>=1.0.0", "tensorflow-cpu>=2.4.0", "torch>=1.8.0",
qtile__qtile-2674
utils.has_transparency has print statement left in from testing # Issue description utils.has_transparency is printing bar colors to stdout. https://github.com/qtile/qtile/blob/a3dcd5db984f3ab08ef3f89eff86e014dd367ee1/libqtile/utils.py#L127 I would submit a pr myself but my fork is currently a little snafu. # Qtile version 0.18 # Configuration in my config I have ``` bar.BAR( #other stuff background=["#000000","#000000","#000000","#003300"],) ``` This is showing up in stdout as: `['#000000', '#000000', '#000000', '#003300']`
[ { "content": "# Copyright (c) 2008, Aldo Cortesi. All rights reserved.\n# Copyright (c) 2020, Matt Colligan. All rights reserved.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport asyncio\nimport glob\nimport importlib\nimport os\nimport traceback\nfrom collections import defaultdict\nfrom collections.abc import Sequence\nfrom random import randint\nfrom shutil import which\nfrom typing import List, Tuple, Union\n\ntry:\n from dbus_next import Message, Variant\n from dbus_next.aio import MessageBus\n from dbus_next.constants import BusType, MessageType\n has_dbus = True\nexcept ImportError:\n has_dbus = False\n\nfrom libqtile.log_utils import logger\n\n\nclass QtileError(Exception):\n pass\n\n\ndef lget(o, v):\n try:\n return o[v]\n except (IndexError, TypeError):\n return None\n\n\ndef shuffle_up(lst):\n if len(lst) > 1:\n c = lst[-1]\n lst.remove(c)\n lst.insert(0, c)\n\n\ndef shuffle_down(lst):\n if len(lst) > 1:\n c = lst[0]\n lst.remove(c)\n lst.append(c)\n\n\nColorType = Union[str, Tuple[int, int, int], Tuple[int, int, int, float]]\n\n\ndef rgb(x):\n \"\"\"\n Returns a valid RGBA tuple.\n\n Here are some valid specifcations:\n #ff0000\n with alpha: #ff000080\n ff0000\n with alpha: ff0000.5\n (255, 0, 0)\n with alpha: (255, 0, 0, 0.5)\n \"\"\"\n if isinstance(x, (tuple, list)):\n if len(x) == 4:\n alpha = x[3]\n else:\n alpha = 1\n return (x[0] / 255.0, x[1] / 255.0, x[2] / 255.0, alpha)\n elif isinstance(x, str):\n if x.startswith(\"#\"):\n x = x[1:]\n if \".\" in x:\n x, alpha = x.split(\".\")\n alpha = float(\"0.\" + alpha)\n else:\n alpha = 1\n if len(x) not in (6, 8):\n raise ValueError(\"RGB specifier must be 6 or 8 characters long.\")\n vals = [int(i, 16) for i in (x[0:2], x[2:4], x[4:6])]\n if len(x) == 8:\n alpha = int(x[6:8], 16) / 255.0\n vals.append(alpha)\n return rgb(vals)\n raise ValueError(\"Invalid RGB specifier.\")\n\n\ndef hex(x):\n r, g, b, _ = rgb(x)\n return '#%02x%02x%02x' % (int(r * 255), int(g * 255), int(b * 255))\n\n\ndef has_transparency(colour: Union[ColorType, List[ColorType]]):\n \"\"\"\n Returns True if the colour is not fully opaque.\n\n Where a list of colours is passed, returns True if any\n colour is not fully opaque.\n \"\"\"\n def has_alpha(col):\n return rgb(col)[3] < 1\n\n if isinstance(colour, (str, tuple)):\n return has_alpha(colour)\n\n elif isinstance(colour, list):\n print([c for c in colour])\n return any([has_transparency(c) for c in colour])\n\n return False\n\n\ndef remove_transparency(colour: Union[ColorType, List[ColorType]]):\n \"\"\"\n Returns a tuple of (r, g, b) with no alpha.\n \"\"\"\n def remove_alpha(col):\n stripped = tuple(x * 255.0 for x in rgb(col)[:3])\n return stripped\n\n if isinstance(colour, (str, tuple)):\n return remove_alpha(colour)\n\n elif isinstance(colour, list):\n return [remove_transparency(c) for c in colour]\n\n return (0, 0, 0)\n\n\ndef scrub_to_utf8(text):\n if not text:\n return \"\"\n elif isinstance(text, str):\n return text\n else:\n return text.decode(\"utf-8\", \"ignore\")\n\n\ndef get_cache_dir():\n \"\"\"\n Returns the cache directory and create if it doesn't exists\n \"\"\"\n\n cache_directory = os.path.expandvars('$XDG_CACHE_HOME')\n if cache_directory == '$XDG_CACHE_HOME':\n # if variable wasn't set\n cache_directory = os.path.expanduser(\"~/.cache\")\n cache_directory = os.path.join(cache_directory, 'qtile')\n if not os.path.exists(cache_directory):\n os.makedirs(cache_directory)\n return cache_directory\n\n\ndef describe_attributes(obj, attrs, func=lambda x: x):\n \"\"\"\n Helper for __repr__ functions to list attributes with truthy values only\n (or values that return a truthy value by func)\n \"\"\"\n\n pairs = []\n\n for attr in attrs:\n value = getattr(obj, attr, None)\n if func(value):\n pairs.append('%s=%s' % (attr, value))\n\n return ', '.join(pairs)\n\n\ndef import_class(module_path, class_name, fallback=None):\n \"\"\"Import a class safely\n\n Try to import the class module, and if it fails because of an ImporError\n it logs on WARNING, and logs the traceback on DEBUG level\n \"\"\"\n try:\n module = importlib.import_module(module_path, __package__)\n return getattr(module, class_name)\n except ImportError as error:\n logger.warning(\"Unmet dependencies for '%s.%s': %s\", module_path,\n class_name, error)\n if fallback:\n logger.debug(\"%s\", traceback.format_exc())\n return fallback(module_path, class_name)\n raise\n\n\ndef lazify_imports(registry, package, fallback=None):\n \"\"\"Leverage PEP 562 to make imports lazy in an __init__.py\n\n The registry must be a dictionary with the items to import as keys and the\n modules they belong to as a value.\n \"\"\"\n __all__ = tuple(registry.keys())\n\n def __dir__():\n return __all__\n\n def __getattr__(name):\n if name not in registry:\n raise AttributeError\n module_path = \"{}.{}\".format(package, registry[name])\n return import_class(module_path, name, fallback=fallback)\n\n return __all__, __dir__, __getattr__\n\n\ndef send_notification(title, message, urgent=False, timeout=10000, id=None):\n \"\"\"\n Send a notification.\n\n The id argument, if passed, requests the notification server to replace a visible\n notification with the same ID. An ID is returned for each call; this would then be\n passed when calling this function again to replace that notification. See:\n https://developer.gnome.org/notification-spec/\n \"\"\"\n if not has_dbus:\n logger.warning(\n \"dbus-next is not installed. Unable to send notifications.\"\n )\n return -1\n\n id = randint(10, 1000) if id is None else id\n urgency = 2 if urgent else 1\n\n try:\n loop = asyncio.get_running_loop()\n except RuntimeError:\n logger.warning(\"Eventloop has not started. Cannot send notification.\")\n else:\n loop.create_task(_notify(title, message, urgency, timeout, id))\n\n return id\n\n\nasync def _notify(title, message, urgency, timeout, id):\n notification = [\"qtile\", # Application name\n id, # id\n \"\", # icon\n title, # summary\n message, # body\n [], # actions\n {\"urgency\": Variant(\"y\", urgency)}, # hints\n timeout] # timeout\n\n bus, msg = await _send_dbus_message(True,\n MessageType.METHOD_CALL,\n \"org.freedesktop.Notifications\",\n \"org.freedesktop.Notifications\",\n \"/org/freedesktop/Notifications\",\n \"Notify\",\n \"susssasa{sv}i\",\n notification)\n\n if msg.message_type == MessageType.ERROR:\n logger.warning(\"Unable to send notification. \"\n \"Is a notification server running?\")\n\n # a new bus connection is made each time a notification is sent so\n # we disconnect when the notification is done\n bus.disconnect()\n\n\ndef guess_terminal(preference=None):\n \"\"\"Try to guess terminal.\"\"\"\n test_terminals = []\n if isinstance(preference, str):\n test_terminals += [preference]\n elif isinstance(preference, Sequence):\n test_terminals += list(preference)\n test_terminals += [\n 'roxterm',\n 'sakura',\n 'hyper',\n 'alacritty',\n 'terminator',\n 'termite',\n 'gnome-terminal',\n 'konsole',\n 'xfce4-terminal',\n 'lxterminal',\n 'mate-terminal',\n 'kitty',\n 'yakuake',\n 'tilda',\n 'guake',\n 'eterm',\n 'st',\n 'urxvt',\n 'xterm',\n 'x-terminal-emulator',\n ]\n\n for terminal in test_terminals:\n logger.debug('Guessing terminal: {}'.format(terminal))\n if not which(terminal, os.X_OK):\n continue\n\n logger.info('Terminal found: {}'.format(terminal))\n return terminal\n\n logger.error('Default terminal has not been found.')\n\n\ndef scan_files(dirpath, *names):\n \"\"\"\n Search a folder recursively for files matching those passed as arguments, with\n globbing. Returns a dict with keys equal to entries in names, and values a list of\n matching paths. E.g.:\n\n >>> scan_files('/wallpapers', '*.png', '*.jpg')\n defaultdict(<class 'list'>, {'*.png': ['/wallpapers/w1.png'], '*.jpg':\n ['/wallpapers/w2.jpg', '/wallpapers/w3.jpg']})\n\n \"\"\"\n files = defaultdict(list)\n\n for name in names:\n found = glob.glob(os.path.join(dirpath, '**', name), recursive=True)\n files[name].extend(found)\n\n return files\n\n\nasync def _send_dbus_message(session_bus, message_type, destination, interface,\n path, member, signature, body):\n \"\"\"\n Private method to send messages to dbus via dbus_next.\n\n Returns a tuple of the bus object and message response.\n \"\"\"\n if session_bus:\n bus_type = BusType.SESSION\n else:\n bus_type = BusType.SYSTEM\n\n if isinstance(body, str):\n body = [body]\n\n bus = await MessageBus(bus_type=bus_type).connect()\n\n msg = await bus.call(\n Message(message_type=message_type,\n destination=destination,\n interface=interface,\n path=path,\n member=member,\n signature=signature,\n body=body))\n\n return bus, msg\n\n\nasync def add_signal_receiver(callback, session_bus=False, signal_name=None,\n dbus_interface=None, bus_name=None, path=None):\n \"\"\"\n Helper function which aims to recreate python-dbus's add_signal_receiver\n method in dbus_next with asyncio calls.\n\n Returns True if subscription is successful.\n \"\"\"\n if not has_dbus:\n logger.warning(\n \"dbus-next is not installed. \"\n \"Unable to subscribe to signals\"\n )\n return False\n\n match_args = {\n \"type\": \"signal\",\n \"sender\": bus_name,\n \"member\": signal_name,\n \"path\": path,\n \"interface\": dbus_interface\n }\n\n rule = \",\".join(\"{}='{}'\".format(k, v)\n for k, v in match_args.items() if v)\n\n bus, msg = await _send_dbus_message(session_bus,\n MessageType.METHOD_CALL,\n \"org.freedesktop.DBus\",\n \"org.freedesktop.DBus\",\n \"/org/freedesktop/DBus\",\n \"AddMatch\",\n \"s\",\n rule)\n\n # Check if message sent successfully\n if msg.message_type == MessageType.METHOD_RETURN:\n bus.add_message_handler(callback)\n return True\n\n else:\n return False\n", "path": "libqtile/utils.py" } ]
[ { "content": "# Copyright (c) 2008, Aldo Cortesi. All rights reserved.\n# Copyright (c) 2020, Matt Colligan. All rights reserved.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nimport asyncio\nimport glob\nimport importlib\nimport os\nimport traceback\nfrom collections import defaultdict\nfrom collections.abc import Sequence\nfrom random import randint\nfrom shutil import which\nfrom typing import List, Tuple, Union\n\ntry:\n from dbus_next import Message, Variant\n from dbus_next.aio import MessageBus\n from dbus_next.constants import BusType, MessageType\n has_dbus = True\nexcept ImportError:\n has_dbus = False\n\nfrom libqtile.log_utils import logger\n\n\nclass QtileError(Exception):\n pass\n\n\ndef lget(o, v):\n try:\n return o[v]\n except (IndexError, TypeError):\n return None\n\n\ndef shuffle_up(lst):\n if len(lst) > 1:\n c = lst[-1]\n lst.remove(c)\n lst.insert(0, c)\n\n\ndef shuffle_down(lst):\n if len(lst) > 1:\n c = lst[0]\n lst.remove(c)\n lst.append(c)\n\n\nColorType = Union[str, Tuple[int, int, int], Tuple[int, int, int, float]]\n\n\ndef rgb(x):\n \"\"\"\n Returns a valid RGBA tuple.\n\n Here are some valid specifcations:\n #ff0000\n with alpha: #ff000080\n ff0000\n with alpha: ff0000.5\n (255, 0, 0)\n with alpha: (255, 0, 0, 0.5)\n \"\"\"\n if isinstance(x, (tuple, list)):\n if len(x) == 4:\n alpha = x[3]\n else:\n alpha = 1\n return (x[0] / 255.0, x[1] / 255.0, x[2] / 255.0, alpha)\n elif isinstance(x, str):\n if x.startswith(\"#\"):\n x = x[1:]\n if \".\" in x:\n x, alpha = x.split(\".\")\n alpha = float(\"0.\" + alpha)\n else:\n alpha = 1\n if len(x) not in (6, 8):\n raise ValueError(\"RGB specifier must be 6 or 8 characters long.\")\n vals = [int(i, 16) for i in (x[0:2], x[2:4], x[4:6])]\n if len(x) == 8:\n alpha = int(x[6:8], 16) / 255.0\n vals.append(alpha)\n return rgb(vals)\n raise ValueError(\"Invalid RGB specifier.\")\n\n\ndef hex(x):\n r, g, b, _ = rgb(x)\n return '#%02x%02x%02x' % (int(r * 255), int(g * 255), int(b * 255))\n\n\ndef has_transparency(colour: Union[ColorType, List[ColorType]]):\n \"\"\"\n Returns True if the colour is not fully opaque.\n\n Where a list of colours is passed, returns True if any\n colour is not fully opaque.\n \"\"\"\n def has_alpha(col):\n return rgb(col)[3] < 1\n\n if isinstance(colour, (str, tuple)):\n return has_alpha(colour)\n\n elif isinstance(colour, list):\n return any([has_transparency(c) for c in colour])\n\n return False\n\n\ndef remove_transparency(colour: Union[ColorType, List[ColorType]]):\n \"\"\"\n Returns a tuple of (r, g, b) with no alpha.\n \"\"\"\n def remove_alpha(col):\n stripped = tuple(x * 255.0 for x in rgb(col)[:3])\n return stripped\n\n if isinstance(colour, (str, tuple)):\n return remove_alpha(colour)\n\n elif isinstance(colour, list):\n return [remove_transparency(c) for c in colour]\n\n return (0, 0, 0)\n\n\ndef scrub_to_utf8(text):\n if not text:\n return \"\"\n elif isinstance(text, str):\n return text\n else:\n return text.decode(\"utf-8\", \"ignore\")\n\n\ndef get_cache_dir():\n \"\"\"\n Returns the cache directory and create if it doesn't exists\n \"\"\"\n\n cache_directory = os.path.expandvars('$XDG_CACHE_HOME')\n if cache_directory == '$XDG_CACHE_HOME':\n # if variable wasn't set\n cache_directory = os.path.expanduser(\"~/.cache\")\n cache_directory = os.path.join(cache_directory, 'qtile')\n if not os.path.exists(cache_directory):\n os.makedirs(cache_directory)\n return cache_directory\n\n\ndef describe_attributes(obj, attrs, func=lambda x: x):\n \"\"\"\n Helper for __repr__ functions to list attributes with truthy values only\n (or values that return a truthy value by func)\n \"\"\"\n\n pairs = []\n\n for attr in attrs:\n value = getattr(obj, attr, None)\n if func(value):\n pairs.append('%s=%s' % (attr, value))\n\n return ', '.join(pairs)\n\n\ndef import_class(module_path, class_name, fallback=None):\n \"\"\"Import a class safely\n\n Try to import the class module, and if it fails because of an ImporError\n it logs on WARNING, and logs the traceback on DEBUG level\n \"\"\"\n try:\n module = importlib.import_module(module_path, __package__)\n return getattr(module, class_name)\n except ImportError as error:\n logger.warning(\"Unmet dependencies for '%s.%s': %s\", module_path,\n class_name, error)\n if fallback:\n logger.debug(\"%s\", traceback.format_exc())\n return fallback(module_path, class_name)\n raise\n\n\ndef lazify_imports(registry, package, fallback=None):\n \"\"\"Leverage PEP 562 to make imports lazy in an __init__.py\n\n The registry must be a dictionary with the items to import as keys and the\n modules they belong to as a value.\n \"\"\"\n __all__ = tuple(registry.keys())\n\n def __dir__():\n return __all__\n\n def __getattr__(name):\n if name not in registry:\n raise AttributeError\n module_path = \"{}.{}\".format(package, registry[name])\n return import_class(module_path, name, fallback=fallback)\n\n return __all__, __dir__, __getattr__\n\n\ndef send_notification(title, message, urgent=False, timeout=10000, id=None):\n \"\"\"\n Send a notification.\n\n The id argument, if passed, requests the notification server to replace a visible\n notification with the same ID. An ID is returned for each call; this would then be\n passed when calling this function again to replace that notification. See:\n https://developer.gnome.org/notification-spec/\n \"\"\"\n if not has_dbus:\n logger.warning(\n \"dbus-next is not installed. Unable to send notifications.\"\n )\n return -1\n\n id = randint(10, 1000) if id is None else id\n urgency = 2 if urgent else 1\n\n try:\n loop = asyncio.get_running_loop()\n except RuntimeError:\n logger.warning(\"Eventloop has not started. Cannot send notification.\")\n else:\n loop.create_task(_notify(title, message, urgency, timeout, id))\n\n return id\n\n\nasync def _notify(title, message, urgency, timeout, id):\n notification = [\"qtile\", # Application name\n id, # id\n \"\", # icon\n title, # summary\n message, # body\n [], # actions\n {\"urgency\": Variant(\"y\", urgency)}, # hints\n timeout] # timeout\n\n bus, msg = await _send_dbus_message(True,\n MessageType.METHOD_CALL,\n \"org.freedesktop.Notifications\",\n \"org.freedesktop.Notifications\",\n \"/org/freedesktop/Notifications\",\n \"Notify\",\n \"susssasa{sv}i\",\n notification)\n\n if msg.message_type == MessageType.ERROR:\n logger.warning(\"Unable to send notification. \"\n \"Is a notification server running?\")\n\n # a new bus connection is made each time a notification is sent so\n # we disconnect when the notification is done\n bus.disconnect()\n\n\ndef guess_terminal(preference=None):\n \"\"\"Try to guess terminal.\"\"\"\n test_terminals = []\n if isinstance(preference, str):\n test_terminals += [preference]\n elif isinstance(preference, Sequence):\n test_terminals += list(preference)\n test_terminals += [\n 'roxterm',\n 'sakura',\n 'hyper',\n 'alacritty',\n 'terminator',\n 'termite',\n 'gnome-terminal',\n 'konsole',\n 'xfce4-terminal',\n 'lxterminal',\n 'mate-terminal',\n 'kitty',\n 'yakuake',\n 'tilda',\n 'guake',\n 'eterm',\n 'st',\n 'urxvt',\n 'xterm',\n 'x-terminal-emulator',\n ]\n\n for terminal in test_terminals:\n logger.debug('Guessing terminal: {}'.format(terminal))\n if not which(terminal, os.X_OK):\n continue\n\n logger.info('Terminal found: {}'.format(terminal))\n return terminal\n\n logger.error('Default terminal has not been found.')\n\n\ndef scan_files(dirpath, *names):\n \"\"\"\n Search a folder recursively for files matching those passed as arguments, with\n globbing. Returns a dict with keys equal to entries in names, and values a list of\n matching paths. E.g.:\n\n >>> scan_files('/wallpapers', '*.png', '*.jpg')\n defaultdict(<class 'list'>, {'*.png': ['/wallpapers/w1.png'], '*.jpg':\n ['/wallpapers/w2.jpg', '/wallpapers/w3.jpg']})\n\n \"\"\"\n files = defaultdict(list)\n\n for name in names:\n found = glob.glob(os.path.join(dirpath, '**', name), recursive=True)\n files[name].extend(found)\n\n return files\n\n\nasync def _send_dbus_message(session_bus, message_type, destination, interface,\n path, member, signature, body):\n \"\"\"\n Private method to send messages to dbus via dbus_next.\n\n Returns a tuple of the bus object and message response.\n \"\"\"\n if session_bus:\n bus_type = BusType.SESSION\n else:\n bus_type = BusType.SYSTEM\n\n if isinstance(body, str):\n body = [body]\n\n bus = await MessageBus(bus_type=bus_type).connect()\n\n msg = await bus.call(\n Message(message_type=message_type,\n destination=destination,\n interface=interface,\n path=path,\n member=member,\n signature=signature,\n body=body))\n\n return bus, msg\n\n\nasync def add_signal_receiver(callback, session_bus=False, signal_name=None,\n dbus_interface=None, bus_name=None, path=None):\n \"\"\"\n Helper function which aims to recreate python-dbus's add_signal_receiver\n method in dbus_next with asyncio calls.\n\n Returns True if subscription is successful.\n \"\"\"\n if not has_dbus:\n logger.warning(\n \"dbus-next is not installed. \"\n \"Unable to subscribe to signals\"\n )\n return False\n\n match_args = {\n \"type\": \"signal\",\n \"sender\": bus_name,\n \"member\": signal_name,\n \"path\": path,\n \"interface\": dbus_interface\n }\n\n rule = \",\".join(\"{}='{}'\".format(k, v)\n for k, v in match_args.items() if v)\n\n bus, msg = await _send_dbus_message(session_bus,\n MessageType.METHOD_CALL,\n \"org.freedesktop.DBus\",\n \"org.freedesktop.DBus\",\n \"/org/freedesktop/DBus\",\n \"AddMatch\",\n \"s\",\n rule)\n\n # Check if message sent successfully\n if msg.message_type == MessageType.METHOD_RETURN:\n bus.add_message_handler(callback)\n return True\n\n else:\n return False\n", "path": "libqtile/utils.py" } ]
diff --git a/libqtile/utils.py b/libqtile/utils.py index 5947ebf800..9a378488e4 100644 --- a/libqtile/utils.py +++ b/libqtile/utils.py @@ -124,7 +124,6 @@ def has_alpha(col): return has_alpha(colour) elif isinstance(colour, list): - print([c for c in colour]) return any([has_transparency(c) for c in colour]) return False
liqd__a4-opin-906
styling of categories in dashboard (Safari) When using Safari the styling of categories in the dashboard is broken. ![safari styling issue](https://user-images.githubusercontent.com/15954895/28914159-fe84edde-783a-11e7-8ae4-09f0a6b978cd.png)
[ { "content": "from adhocracy4.categories import forms as category_forms\n\nfrom . import models\n\n\nclass IdeaForm(category_forms.CategorizableForm):\n class Meta:\n model = models.Idea\n fields = ['name', 'description', 'image', 'category']\n", "path": "euth/ideas/forms.py" } ]
[ { "content": "from adhocracy4.categories import forms as category_forms\n\nfrom . import models\n\n\nclass IdeaForm(category_forms.CategorizableForm):\n class Meta:\n model = models.Idea\n fields = ['name', 'description', 'image', 'category']\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.fields['category'].empty_label = '---'\n", "path": "euth/ideas/forms.py" } ]
diff --git a/euth/ideas/forms.py b/euth/ideas/forms.py index 0c07e55ec..056c2b412 100644 --- a/euth/ideas/forms.py +++ b/euth/ideas/forms.py @@ -7,3 +7,7 @@ class IdeaForm(category_forms.CategorizableForm): class Meta: model = models.Idea fields = ['name', 'description', 'image', 'category'] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields['category'].empty_label = '---' diff --git a/euth/ideas/templates/euth_ideas/idea_form.html b/euth/ideas/templates/euth_ideas/idea_form.html index 54b115ef7..00155960b 100644 --- a/euth/ideas/templates/euth_ideas/idea_form.html +++ b/euth/ideas/templates/euth_ideas/idea_form.html @@ -65,6 +65,8 @@ <h3 class="sans-serif"> </div> {{ form.image.errors }} </div> + + {% if form.show_categories %} <div class="form-group"> <label class="control-label" for="{{ form.category.id_for_label }}">{% trans 'Category'%}</label> {% if form.category.errors %} @@ -74,6 +76,7 @@ <h3 class="sans-serif"> {% endif %} {{ form.category.errors }} </div> + {% endif %} {% block additional_fields %}{% endblock %} {% block post_form %} diff --git a/euth_wagtail/assets/scss/components/_category-form.scss b/euth_wagtail/assets/scss/components/_category-form.scss index 2a641e968..0bfaf3e2f 100644 --- a/euth_wagtail/assets/scss/components/_category-form.scss +++ b/euth_wagtail/assets/scss/components/_category-form.scss @@ -17,9 +17,7 @@ } .category__delete { - visibility: collapse; - width: 100%; - left: 0; + @extend .sr-only; &:checked ~ input[type="text"] { text-decoration: line-through;
huggingface__transformers-4448
LayerNorm not excluded from weight decay in TF # 🐛 Bug ## Information Model I am using (Bert, XLNet ...): bert-base-cased Language I am using the model on (English, Chinese ...): English The problem arises when using: * [X] the official example scripts: (give details below) * [ ] my own modified scripts: (give details below) The tasks I am working on is: * [X] an official GLUE/SQUaD task: (give the name) * [ ] my own task or dataset: (give details below) ## To reproduce Steps to reproduce the behavior: 1. Add a print statement to `_do_use_weight_decay` in [AdamWeightDecay](https://github.com/huggingface/transformers/blob/master/src/transformers/optimization_tf.py) to see which parameters are actually excluded: ```python def _do_use_weight_decay(self, param_name): """Whether to use L2 weight decay for `param_name`.""" if self.weight_decay_rate == 0: return False if self._include_in_weight_decay: for r in self._include_in_weight_decay: if re.search(r, param_name) is not None: return True if self._exclude_from_weight_decay: for r in self._exclude_from_weight_decay: if re.search(r, param_name) is not None: print(f"Found: {param_name}") return False return True ``` 2. run `python examples/text-classification/run_tf_glue.py --model_name_or_path bert-base-cased --task_name mrpc --output_dir temp --logging_dir temp --do_train --overwrite_output_dir --optimizer_name adamw`. 3. Observe that no weights related to layer norms are printed. <!-- If you have code snippets, error messages, stack traces please provide them here as well. Important! Use code tags to correctly format your code. See https://help.github.com/en/github/writing-on-github/creating-and-highlighting-code-blocks#syntax-highlighting Do not use screenshots, as they are hard to read and (more importantly) don't allow others to copy-and-paste your code.--> ## Expected behavior <!-- A clear and concise description of what you would expect to happen. --> The weights of the layer norms (and the biases) should be printed. See for example: https://github.com/google-research/bert/blob/f39e881b169b9d53bea03d2d341b31707a6c052b/optimization.py. Based on the fact that no layer norm weights are printed with "layer_norm" simply switching "layer_norm" to "LayerNorm" seems like the easiest change. ## Environment info <!-- You can run the command `transformers-cli env` and copy-and-paste its output below. Don't forget to fill out the missing fields in that output! --> - `transformers` version: 2.9.0 - Platform: Darwin-19.4.0-x86_64-i386-64bit - Python version: 3.7.7 - PyTorch version (GPU?): 1.5.0 (False) - Tensorflow version (GPU?): 2.2.0 (False) - Using GPU in script?: No - Using distributed or parallel set-up in script?: No
[ { "content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Functions and classes related to optimization (weight updates).\"\"\"\n\n\nimport re\n\nimport tensorflow as tf\n\n\nclass WarmUp(tf.keras.optimizers.schedules.LearningRateSchedule):\n \"\"\"Applies a warmup schedule on a given learning rate decay schedule.\"\"\"\n\n def __init__(\n self, initial_learning_rate, decay_schedule_fn, warmup_steps, power=1.0, name=None,\n ):\n super().__init__()\n self.initial_learning_rate = initial_learning_rate\n self.warmup_steps = warmup_steps\n self.power = power\n self.decay_schedule_fn = decay_schedule_fn\n self.name = name\n\n def __call__(self, step):\n with tf.name_scope(self.name or \"WarmUp\") as name:\n # Implements polynomial warmup. i.e., if global_step < warmup_steps, the\n # learning rate will be `global_step/num_warmup_steps * init_lr`.\n global_step_float = tf.cast(step, tf.float32)\n warmup_steps_float = tf.cast(self.warmup_steps, tf.float32)\n warmup_percent_done = global_step_float / warmup_steps_float\n warmup_learning_rate = self.initial_learning_rate * tf.math.pow(warmup_percent_done, self.power)\n return tf.cond(\n global_step_float < warmup_steps_float,\n lambda: warmup_learning_rate,\n lambda: self.decay_schedule_fn(step),\n name=name,\n )\n\n def get_config(self):\n return {\n \"initial_learning_rate\": self.initial_learning_rate,\n \"decay_schedule_fn\": self.decay_schedule_fn,\n \"warmup_steps\": self.warmup_steps,\n \"power\": self.power,\n \"name\": self.name,\n }\n\n\ndef create_optimizer(init_lr, num_train_steps, num_warmup_steps, end_lr=0.0, optimizer_type=\"adamw\"):\n \"\"\"Creates an optimizer with learning rate schedule.\"\"\"\n # Implements linear decay of the learning rate.\n lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay(\n initial_learning_rate=init_lr, decay_steps=num_train_steps, end_learning_rate=end_lr,\n )\n if num_warmup_steps:\n lr_schedule = WarmUp(\n initial_learning_rate=init_lr, decay_schedule_fn=lr_schedule, warmup_steps=num_warmup_steps,\n )\n\n optimizer = AdamWeightDecay(\n learning_rate=lr_schedule,\n weight_decay_rate=0.01,\n beta_1=0.9,\n beta_2=0.999,\n epsilon=1e-6,\n exclude_from_weight_decay=[\"layer_norm\", \"bias\"],\n )\n\n return optimizer\n\n\nclass AdamWeightDecay(tf.keras.optimizers.Adam):\n \"\"\"Adam enables L2 weight decay and clip_by_global_norm on gradients.\n Just adding the square of the weights to the loss function is *not* the\n correct way of using L2 regularization/weight decay with Adam, since that will\n interact with the m and v parameters in strange ways.\n Instead we want ot decay the weights in a manner that doesn't interact with\n the m/v parameters. This is equivalent to adding the square of the weights to\n the loss with plain (non-momentum) SGD.\n \"\"\"\n\n def __init__(\n self,\n learning_rate=0.001,\n beta_1=0.9,\n beta_2=0.999,\n epsilon=1e-7,\n amsgrad=False,\n weight_decay_rate=0.0,\n include_in_weight_decay=None,\n exclude_from_weight_decay=None,\n name=\"AdamWeightDecay\",\n **kwargs\n ):\n super().__init__(learning_rate, beta_1, beta_2, epsilon, amsgrad, name, **kwargs)\n self.weight_decay_rate = weight_decay_rate\n self._include_in_weight_decay = include_in_weight_decay\n self._exclude_from_weight_decay = exclude_from_weight_decay\n\n @classmethod\n def from_config(cls, config):\n \"\"\"Creates an optimizer from its config with WarmUp custom object.\"\"\"\n custom_objects = {\"WarmUp\": WarmUp}\n return super(AdamWeightDecay, cls).from_config(config, custom_objects=custom_objects)\n\n def _prepare_local(self, var_device, var_dtype, apply_state):\n super(AdamWeightDecay, self)._prepare_local(var_device, var_dtype, apply_state)\n apply_state[(var_device, var_dtype)][\"weight_decay_rate\"] = tf.constant(\n self.weight_decay_rate, name=\"adam_weight_decay_rate\"\n )\n\n def _decay_weights_op(self, var, learning_rate, apply_state):\n do_decay = self._do_use_weight_decay(var.name)\n if do_decay:\n return var.assign_sub(\n learning_rate * var * apply_state[(var.device, var.dtype.base_dtype)][\"weight_decay_rate\"],\n use_locking=self._use_locking,\n )\n return tf.no_op()\n\n def apply_gradients(self, grads_and_vars, name=None):\n grads, tvars = list(zip(*grads_and_vars))\n return super(AdamWeightDecay, self).apply_gradients(zip(grads, tvars), name=name,)\n\n def _get_lr(self, var_device, var_dtype, apply_state):\n \"\"\"Retrieves the learning rate with the given state.\"\"\"\n if apply_state is None:\n return self._decayed_lr_t[var_dtype], {}\n\n apply_state = apply_state or {}\n coefficients = apply_state.get((var_device, var_dtype))\n if coefficients is None:\n coefficients = self._fallback_apply_state(var_device, var_dtype)\n apply_state[(var_device, var_dtype)] = coefficients\n\n return coefficients[\"lr_t\"], dict(apply_state=apply_state)\n\n def _resource_apply_dense(self, grad, var, apply_state=None):\n lr_t, kwargs = self._get_lr(var.device, var.dtype.base_dtype, apply_state)\n decay = self._decay_weights_op(var, lr_t, apply_state)\n with tf.control_dependencies([decay]):\n return super(AdamWeightDecay, self)._resource_apply_dense(grad, var, **kwargs)\n\n def _resource_apply_sparse(self, grad, var, indices, apply_state=None):\n lr_t, kwargs = self._get_lr(var.device, var.dtype.base_dtype, apply_state)\n decay = self._decay_weights_op(var, lr_t, apply_state)\n with tf.control_dependencies([decay]):\n return super(AdamWeightDecay, self)._resource_apply_sparse(grad, var, indices, **kwargs)\n\n def get_config(self):\n config = super().get_config()\n config.update({\"weight_decay_rate\": self.weight_decay_rate})\n return config\n\n def _do_use_weight_decay(self, param_name):\n \"\"\"Whether to use L2 weight decay for `param_name`.\"\"\"\n if self.weight_decay_rate == 0:\n return False\n\n if self._include_in_weight_decay:\n for r in self._include_in_weight_decay:\n if re.search(r, param_name) is not None:\n return True\n\n if self._exclude_from_weight_decay:\n for r in self._exclude_from_weight_decay:\n if re.search(r, param_name) is not None:\n return False\n return True\n\n\n# Extracted from https://github.com/OpenNMT/OpenNMT-tf/blob/master/opennmt/optimizers/utils.py\nclass GradientAccumulator(object):\n \"\"\"Gradient accumulation utility.\n When used with a distribution strategy, the accumulator should be called in a\n replica context. Gradients will be accumulated locally on each replica and\n without synchronization. Users should then call ``.gradients``, scale the\n gradients if required, and pass the result to ``apply_gradients``.\n \"\"\"\n\n # We use the ON_READ synchronization policy so that no synchronization is\n # performed on assignment. To get the value, we call .value() which returns the\n # value on the current replica without synchronization.\n\n def __init__(self):\n \"\"\"Initializes the accumulator.\"\"\"\n self._gradients = []\n self._accum_steps = None\n\n @property\n def step(self):\n \"\"\"Number of accumulated steps.\"\"\"\n if self._accum_steps is None:\n self._accum_steps = tf.Variable(\n tf.constant(0, dtype=tf.int64),\n trainable=False,\n synchronization=tf.VariableSynchronization.ON_READ,\n aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA,\n )\n\n return self._accum_steps.value()\n\n @property\n def gradients(self):\n \"\"\"The accumulated gradients on the current replica.\"\"\"\n if not self._gradients:\n raise ValueError(\"The accumulator should be called first to initialize the gradients\")\n return list(gradient.value() if gradient is not None else gradient for gradient in self._gradients)\n\n def __call__(self, gradients):\n \"\"\"Accumulates :obj:`gradients` on the current replica.\"\"\"\n if not self._gradients:\n _ = self.step # Create the step variable.\n self._gradients.extend(\n [\n tf.Variable(\n tf.zeros_like(gradient),\n trainable=False,\n synchronization=tf.VariableSynchronization.ON_READ,\n aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA,\n )\n if gradient is not None\n else gradient\n for gradient in gradients\n ]\n )\n if len(gradients) != len(self._gradients):\n raise ValueError(\"Expected %s gradients, but got %d\" % (len(self._gradients), len(gradients)))\n\n for accum_gradient, gradient in zip(self._gradients, gradients):\n if accum_gradient is not None and gradient is not None:\n accum_gradient.assign_add(gradient)\n\n self._accum_steps.assign_add(1)\n\n def reset(self):\n \"\"\"Resets the accumulated gradients on the current replica.\"\"\"\n if not self._gradients:\n return\n self._accum_steps.assign(0)\n for gradient in self._gradients:\n if gradient is not None:\n gradient.assign(tf.zeros_like(gradient))\n", "path": "src/transformers/optimization_tf.py" } ]
[ { "content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Functions and classes related to optimization (weight updates).\"\"\"\n\n\nimport re\n\nimport tensorflow as tf\n\n\nclass WarmUp(tf.keras.optimizers.schedules.LearningRateSchedule):\n \"\"\"Applies a warmup schedule on a given learning rate decay schedule.\"\"\"\n\n def __init__(\n self, initial_learning_rate, decay_schedule_fn, warmup_steps, power=1.0, name=None,\n ):\n super().__init__()\n self.initial_learning_rate = initial_learning_rate\n self.warmup_steps = warmup_steps\n self.power = power\n self.decay_schedule_fn = decay_schedule_fn\n self.name = name\n\n def __call__(self, step):\n with tf.name_scope(self.name or \"WarmUp\") as name:\n # Implements polynomial warmup. i.e., if global_step < warmup_steps, the\n # learning rate will be `global_step/num_warmup_steps * init_lr`.\n global_step_float = tf.cast(step, tf.float32)\n warmup_steps_float = tf.cast(self.warmup_steps, tf.float32)\n warmup_percent_done = global_step_float / warmup_steps_float\n warmup_learning_rate = self.initial_learning_rate * tf.math.pow(warmup_percent_done, self.power)\n return tf.cond(\n global_step_float < warmup_steps_float,\n lambda: warmup_learning_rate,\n lambda: self.decay_schedule_fn(step),\n name=name,\n )\n\n def get_config(self):\n return {\n \"initial_learning_rate\": self.initial_learning_rate,\n \"decay_schedule_fn\": self.decay_schedule_fn,\n \"warmup_steps\": self.warmup_steps,\n \"power\": self.power,\n \"name\": self.name,\n }\n\n\ndef create_optimizer(init_lr, num_train_steps, num_warmup_steps, end_lr=0.0, optimizer_type=\"adamw\"):\n \"\"\"Creates an optimizer with learning rate schedule.\"\"\"\n # Implements linear decay of the learning rate.\n lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay(\n initial_learning_rate=init_lr, decay_steps=num_train_steps, end_learning_rate=end_lr,\n )\n if num_warmup_steps:\n lr_schedule = WarmUp(\n initial_learning_rate=init_lr, decay_schedule_fn=lr_schedule, warmup_steps=num_warmup_steps,\n )\n\n optimizer = AdamWeightDecay(\n learning_rate=lr_schedule,\n weight_decay_rate=0.01,\n beta_1=0.9,\n beta_2=0.999,\n epsilon=1e-6,\n exclude_from_weight_decay=[\"LayerNorm\", \"layer_norm\", \"bias\"],\n )\n\n return optimizer\n\n\nclass AdamWeightDecay(tf.keras.optimizers.Adam):\n \"\"\"Adam enables L2 weight decay and clip_by_global_norm on gradients.\n Just adding the square of the weights to the loss function is *not* the\n correct way of using L2 regularization/weight decay with Adam, since that will\n interact with the m and v parameters in strange ways.\n Instead we want ot decay the weights in a manner that doesn't interact with\n the m/v parameters. This is equivalent to adding the square of the weights to\n the loss with plain (non-momentum) SGD.\n \"\"\"\n\n def __init__(\n self,\n learning_rate=0.001,\n beta_1=0.9,\n beta_2=0.999,\n epsilon=1e-7,\n amsgrad=False,\n weight_decay_rate=0.0,\n include_in_weight_decay=None,\n exclude_from_weight_decay=None,\n name=\"AdamWeightDecay\",\n **kwargs\n ):\n super().__init__(learning_rate, beta_1, beta_2, epsilon, amsgrad, name, **kwargs)\n self.weight_decay_rate = weight_decay_rate\n self._include_in_weight_decay = include_in_weight_decay\n self._exclude_from_weight_decay = exclude_from_weight_decay\n\n @classmethod\n def from_config(cls, config):\n \"\"\"Creates an optimizer from its config with WarmUp custom object.\"\"\"\n custom_objects = {\"WarmUp\": WarmUp}\n return super(AdamWeightDecay, cls).from_config(config, custom_objects=custom_objects)\n\n def _prepare_local(self, var_device, var_dtype, apply_state):\n super(AdamWeightDecay, self)._prepare_local(var_device, var_dtype, apply_state)\n apply_state[(var_device, var_dtype)][\"weight_decay_rate\"] = tf.constant(\n self.weight_decay_rate, name=\"adam_weight_decay_rate\"\n )\n\n def _decay_weights_op(self, var, learning_rate, apply_state):\n do_decay = self._do_use_weight_decay(var.name)\n if do_decay:\n return var.assign_sub(\n learning_rate * var * apply_state[(var.device, var.dtype.base_dtype)][\"weight_decay_rate\"],\n use_locking=self._use_locking,\n )\n return tf.no_op()\n\n def apply_gradients(self, grads_and_vars, name=None):\n grads, tvars = list(zip(*grads_and_vars))\n return super(AdamWeightDecay, self).apply_gradients(zip(grads, tvars), name=name,)\n\n def _get_lr(self, var_device, var_dtype, apply_state):\n \"\"\"Retrieves the learning rate with the given state.\"\"\"\n if apply_state is None:\n return self._decayed_lr_t[var_dtype], {}\n\n apply_state = apply_state or {}\n coefficients = apply_state.get((var_device, var_dtype))\n if coefficients is None:\n coefficients = self._fallback_apply_state(var_device, var_dtype)\n apply_state[(var_device, var_dtype)] = coefficients\n\n return coefficients[\"lr_t\"], dict(apply_state=apply_state)\n\n def _resource_apply_dense(self, grad, var, apply_state=None):\n lr_t, kwargs = self._get_lr(var.device, var.dtype.base_dtype, apply_state)\n decay = self._decay_weights_op(var, lr_t, apply_state)\n with tf.control_dependencies([decay]):\n return super(AdamWeightDecay, self)._resource_apply_dense(grad, var, **kwargs)\n\n def _resource_apply_sparse(self, grad, var, indices, apply_state=None):\n lr_t, kwargs = self._get_lr(var.device, var.dtype.base_dtype, apply_state)\n decay = self._decay_weights_op(var, lr_t, apply_state)\n with tf.control_dependencies([decay]):\n return super(AdamWeightDecay, self)._resource_apply_sparse(grad, var, indices, **kwargs)\n\n def get_config(self):\n config = super().get_config()\n config.update({\"weight_decay_rate\": self.weight_decay_rate})\n return config\n\n def _do_use_weight_decay(self, param_name):\n \"\"\"Whether to use L2 weight decay for `param_name`.\"\"\"\n if self.weight_decay_rate == 0:\n return False\n\n if self._include_in_weight_decay:\n for r in self._include_in_weight_decay:\n if re.search(r, param_name) is not None:\n return True\n\n if self._exclude_from_weight_decay:\n for r in self._exclude_from_weight_decay:\n if re.search(r, param_name) is not None:\n return False\n return True\n\n\n# Extracted from https://github.com/OpenNMT/OpenNMT-tf/blob/master/opennmt/optimizers/utils.py\nclass GradientAccumulator(object):\n \"\"\"Gradient accumulation utility.\n When used with a distribution strategy, the accumulator should be called in a\n replica context. Gradients will be accumulated locally on each replica and\n without synchronization. Users should then call ``.gradients``, scale the\n gradients if required, and pass the result to ``apply_gradients``.\n \"\"\"\n\n # We use the ON_READ synchronization policy so that no synchronization is\n # performed on assignment. To get the value, we call .value() which returns the\n # value on the current replica without synchronization.\n\n def __init__(self):\n \"\"\"Initializes the accumulator.\"\"\"\n self._gradients = []\n self._accum_steps = None\n\n @property\n def step(self):\n \"\"\"Number of accumulated steps.\"\"\"\n if self._accum_steps is None:\n self._accum_steps = tf.Variable(\n tf.constant(0, dtype=tf.int64),\n trainable=False,\n synchronization=tf.VariableSynchronization.ON_READ,\n aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA,\n )\n\n return self._accum_steps.value()\n\n @property\n def gradients(self):\n \"\"\"The accumulated gradients on the current replica.\"\"\"\n if not self._gradients:\n raise ValueError(\"The accumulator should be called first to initialize the gradients\")\n return list(gradient.value() for gradient in self._gradients)\n\n def __call__(self, gradients):\n \"\"\"Accumulates :obj:`gradients` on the current replica.\"\"\"\n if not self._gradients:\n _ = self.step # Create the step variable.\n self._gradients.extend(\n [\n tf.Variable(\n tf.zeros_like(gradient),\n trainable=False,\n synchronization=tf.VariableSynchronization.ON_READ,\n aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA,\n )\n for gradient in gradients\n ]\n )\n if len(gradients) != len(self._gradients):\n raise ValueError(\"Expected %s gradients, but got %d\" % (len(self._gradients), len(gradients)))\n\n for accum_gradient, gradient in zip(self._gradients, gradients):\n accum_gradient.assign_add(gradient)\n\n self._accum_steps.assign_add(1)\n\n def reset(self):\n \"\"\"Resets the accumulated gradients on the current replica.\"\"\"\n if not self._gradients:\n return\n self._accum_steps.assign(0)\n for gradient in self._gradients:\n gradient.assign(tf.zeros_like(gradient))\n", "path": "src/transformers/optimization_tf.py" } ]
diff --git a/src/transformers/optimization_tf.py b/src/transformers/optimization_tf.py index 6f4e78908919..b72e54905054 100644 --- a/src/transformers/optimization_tf.py +++ b/src/transformers/optimization_tf.py @@ -75,7 +75,7 @@ def create_optimizer(init_lr, num_train_steps, num_warmup_steps, end_lr=0.0, opt beta_1=0.9, beta_2=0.999, epsilon=1e-6, - exclude_from_weight_decay=["layer_norm", "bias"], + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], ) return optimizer
DDMAL__CantusDB-776
Chant Search Manuscript view - change URL path to match OldCantus I understand we're trying to keep URLs the same between OldCantus and NewCantus, but there's a difference in the Chant Search Manuscript view. OldCantus uses `/searchms/` (e.g. https://cantus.uwaterloo.ca/searchms/123610?t=est), whereas NewCantus uses `/chant-search-ms/` (e.g. http://206.12.93.196/chant-search-ms/123610?t=est). This doesn't strike me as a particularly vital difference - I doubt many people will have bookmarked and or cited a Search Manuscript page. But this would be a fairly simple fix, so we may as well make NewCantus work the same as OldCantus in this case. Bigger picture question: how important is it that all URL paths match between OldCantus and New? @annamorphism, do you have a sense of this?
[ { "content": "from django.urls import include, path, reverse\nfrom django.contrib.auth.views import (\n PasswordResetView,\n PasswordResetDoneView,\n PasswordResetConfirmView,\n PasswordResetCompleteView,\n)\nfrom main_app.views import views\nimport debug_toolbar\nfrom main_app.views.century import (\n CenturyDetailView,\n)\nfrom main_app.views.chant import (\n ChantByCantusIDView,\n ChantCreateView,\n ChantDeleteView,\n ChantDetailView,\n ChantEditSyllabificationView,\n ChantIndexView,\n ChantListView,\n ChantProofreadView,\n ChantSearchView,\n ChantSearchMSView,\n CISearchView,\n MelodySearchView,\n SourceEditChantsView,\n)\nfrom main_app.views.feast import (\n FeastDetailView,\n FeastListView,\n)\nfrom main_app.views.genre import (\n GenreDetailView,\n GenreListView,\n)\nfrom main_app.views.notation import (\n NotationDetailView,\n)\nfrom main_app.views.office import (\n OfficeListView,\n OfficeDetailView,\n)\nfrom main_app.views.provenance import (\n ProvenanceDetailView,\n)\nfrom main_app.views.sequence import (\n SequenceDetailView,\n SequenceEditView,\n SequenceListView,\n)\nfrom main_app.views.source import (\n SourceCreateView,\n SourceDetailView,\n SourceEditView,\n SourceListView,\n)\nfrom main_app.views.user import (\n CustomLoginView,\n CustomLogoutView,\n IndexerListView,\n UserDetailView,\n UserListView,\n UserSourceListView,\n)\n\nurlpatterns = [\n path(\"__debug__/\", include(debug_toolbar.urls)),\n path(\n \"contact/\",\n views.contact,\n name=\"contact\",\n ),\n # login/logout/user\n path(\n \"login/\",\n CustomLoginView.as_view(redirect_authenticated_user=True),\n name=\"login\",\n ),\n path(\n \"logout/\",\n CustomLogoutView.as_view(),\n name=\"logout\",\n ),\n path(\n \"my-sources/\",\n UserSourceListView.as_view(),\n name=\"my-sources\",\n ),\n path(\n \"user/<int:pk>\",\n UserDetailView.as_view(),\n name=\"user-detail\",\n ),\n path(\n \"users/\",\n UserListView.as_view(),\n name=\"user-list\",\n ),\n path(\n \"change-password/\",\n views.change_password,\n name=\"change-password\",\n ),\n # password reset views\n path(\n # here, user can initiate a request to send a password reset email\n \"reset-password/\",\n PasswordResetView.as_view(\n template_name=\"registration/reset_password.html\",\n email_template_name=\"registration/reset_password_email.html\",\n success_url=\"/reset-password-sent/\",\n ),\n name=\"reset_password\",\n ),\n path(\n # we display this page once the password reset email has been sent\n \"reset-password-sent/\",\n PasswordResetDoneView.as_view(\n template_name=\"registration/reset_password_sent.html\",\n ),\n name=\"reset_password_done\",\n ),\n path(\n # here, the user can specify their new password\n \"reset/<uidb64>/<token>\",\n PasswordResetConfirmView.as_view(\n template_name=\"registration/reset_password_confirm.html\",\n success_url=\"/reset-password-complete/\",\n ),\n name=\"reset_password_confirm\",\n ),\n path(\n # we display this page once a user has completed a password reset\n # depending on whether their attempt was successful, this page either shows\n # a success message or a non-success message.\n \"reset-password-complete/\",\n PasswordResetCompleteView.as_view(\n template_name=\"registration/reset_password_complete.html\"\n ),\n name=\"reset_password_complete\",\n ),\n # century\n path(\"century/<int:pk>\", CenturyDetailView.as_view(), name=\"century-detail\"),\n # chant\n path(\n \"chants/\",\n ChantListView.as_view(),\n name=\"chant-list\",\n ), # /chants/?source={source id}\n path(\n \"chant/<int:pk>\",\n ChantDetailView.as_view(),\n name=\"chant-detail\",\n ),\n path(\n \"chant-search/\",\n ChantSearchView.as_view(),\n name=\"chant-search\",\n ),\n path(\n \"chant-create/<int:source_pk>\",\n ChantCreateView.as_view(),\n name=\"chant-create\",\n ),\n path(\n \"id/<str:cantus_id>\",\n ChantByCantusIDView.as_view(),\n name=\"chant-by-cantus-id\",\n ),\n path(\n \"chant-delete/<int:pk>\",\n ChantDeleteView.as_view(),\n name=\"chant-delete\",\n ),\n path(\n \"edit-chants/<int:source_id>\",\n SourceEditChantsView.as_view(),\n name=\"source-edit-chants\",\n ),\n path(\n \"proofread-chant/<int:source_id>\",\n ChantProofreadView.as_view(),\n name=\"chant-proofread\",\n ),\n path(\n \"edit-syllabification/<int:chant_id>\",\n ChantEditSyllabificationView.as_view(),\n name=\"source-edit-syllabification\",\n ),\n path(\n \"index/\",\n ChantIndexView.as_view(),\n name=\"chant-index\",\n ), # /index/?source={source id}\n # feast\n path(\n \"feasts/\",\n FeastListView.as_view(),\n name=\"feast-list\",\n ),\n path(\n \"feast/<int:pk>\",\n FeastDetailView.as_view(),\n name=\"feast-detail\",\n ),\n # genre\n path(\n \"genres/\",\n GenreListView.as_view(),\n name=\"genre-list\",\n ),\n path(\n \"genre/<int:pk>\",\n GenreDetailView.as_view(),\n name=\"genre-detail\",\n ),\n # indexer\n path(\n \"indexers/\",\n IndexerListView.as_view(),\n name=\"indexer-list\",\n ),\n # notation\n path(\n \"notation/<int:pk>\",\n NotationDetailView.as_view(),\n name=\"notation-detail\",\n ),\n # office\n path(\n \"offices/\",\n OfficeListView.as_view(),\n name=\"office-list\",\n ),\n path(\n \"office/<int:pk>\",\n OfficeDetailView.as_view(),\n name=\"office-detail\",\n ),\n # provenance\n path(\n \"provenance/<int:pk>\",\n ProvenanceDetailView.as_view(),\n name=\"provenance-detail\",\n ),\n # sequence\n path(\n \"sequences/\",\n SequenceListView.as_view(),\n name=\"sequence-list\",\n ),\n path(\n \"sequence/<int:pk>\",\n SequenceDetailView.as_view(),\n name=\"sequence-detail\",\n ),\n path(\n \"edit-sequence/<int:sequence_id>\",\n SequenceEditView.as_view(),\n name=\"sequence-edit\",\n ),\n # source\n path(\n \"sources/\",\n SourceListView.as_view(),\n name=\"source-list\",\n ),\n path(\n \"source/<int:pk>\",\n SourceDetailView.as_view(),\n name=\"source-detail\",\n ),\n path(\n \"source-create/\",\n SourceCreateView.as_view(),\n name=\"source-create\",\n ),\n path(\n \"edit-source/<int:source_id>\",\n SourceEditView.as_view(),\n name=\"source-edit\",\n ),\n # melody\n path(\n \"melody/\",\n MelodySearchView.as_view(),\n name=\"melody-search\",\n ),\n path(\n \"ajax/melody/<str:cantus_id>\",\n views.ajax_melody_list,\n name=\"ajax-melody\",\n ),\n path(\n \"ajax/melody-search/\",\n views.ajax_melody_search,\n name=\"ajax-melody-search\",\n ),\n # json api\n path(\n \"json-sources/\",\n views.json_sources_export,\n name=\"json-sources-export\",\n ),\n path(\n \"json-node/<str:id>\",\n views.json_node_export,\n name=\"json-node-export\",\n ),\n path(\n \"json-nextchants/<str:cantus_id>\",\n views.json_nextchants,\n name=\"json-nextchants\",\n ),\n path(\n \"json-melody/<str:cantus_id>\",\n views.json_melody_export,\n name=\"json-melody-export\",\n ),\n # misc search\n path(\n \"chant-search-ms/<int:source_pk>\",\n ChantSearchMSView.as_view(),\n name=\"chant-search-ms\",\n ),\n path(\n \"ci-search/<str:search_term>\",\n CISearchView.as_view(),\n name=\"ci-search\",\n ),\n path(\n \"ajax/search-bar/<str:search_term>\",\n views.ajax_search_bar,\n name=\"ajax-search-bar\",\n ),\n # misc\n path(\n \"content-statistics\",\n views.items_count,\n name=\"items-count\",\n ),\n path(\n \"source/<str:source_id>/csv/\",\n views.csv_export,\n name=\"csv-export\",\n ),\n path(\n \"sites/default/files/csv/<str:source_id>.csv\",\n views.csv_export_redirect_from_old_path,\n name=\"csv-export-old-path\",\n ),\n path(\n \"ajax/concordance/<str:cantus_id>\",\n views.ajax_concordance_list,\n name=\"ajax-concordance\",\n ),\n # content overview (for project managers)\n path(\n \"content-overview/\",\n views.content_overview,\n name=\"content-overview\",\n ),\n # /node/ url redirects\n path(\n \"node/<int:pk>\",\n views.redirect_node_url,\n name=\"redirect-node-url\",\n ),\n # /indexer/ url redirects\n path(\n \"indexer/<int:pk>\",\n views.redirect_indexer,\n name=\"redirect-indexer\",\n ),\n]\n\nhandler404 = \"main_app.views.views.handle404\"\n", "path": "django/cantusdb_project/main_app/urls.py" } ]
[ { "content": "from django.urls import include, path, reverse\nfrom django.contrib.auth.views import (\n PasswordResetView,\n PasswordResetDoneView,\n PasswordResetConfirmView,\n PasswordResetCompleteView,\n)\nfrom main_app.views import views\nimport debug_toolbar\nfrom main_app.views.century import (\n CenturyDetailView,\n)\nfrom main_app.views.chant import (\n ChantByCantusIDView,\n ChantCreateView,\n ChantDeleteView,\n ChantDetailView,\n ChantEditSyllabificationView,\n ChantIndexView,\n ChantListView,\n ChantProofreadView,\n ChantSearchView,\n ChantSearchMSView,\n CISearchView,\n MelodySearchView,\n SourceEditChantsView,\n)\nfrom main_app.views.feast import (\n FeastDetailView,\n FeastListView,\n)\nfrom main_app.views.genre import (\n GenreDetailView,\n GenreListView,\n)\nfrom main_app.views.notation import (\n NotationDetailView,\n)\nfrom main_app.views.office import (\n OfficeListView,\n OfficeDetailView,\n)\nfrom main_app.views.provenance import (\n ProvenanceDetailView,\n)\nfrom main_app.views.sequence import (\n SequenceDetailView,\n SequenceEditView,\n SequenceListView,\n)\nfrom main_app.views.source import (\n SourceCreateView,\n SourceDetailView,\n SourceEditView,\n SourceListView,\n)\nfrom main_app.views.user import (\n CustomLoginView,\n CustomLogoutView,\n IndexerListView,\n UserDetailView,\n UserListView,\n UserSourceListView,\n)\n\nurlpatterns = [\n path(\"__debug__/\", include(debug_toolbar.urls)),\n path(\n \"contact/\",\n views.contact,\n name=\"contact\",\n ),\n # login/logout/user\n path(\n \"login/\",\n CustomLoginView.as_view(redirect_authenticated_user=True),\n name=\"login\",\n ),\n path(\n \"logout/\",\n CustomLogoutView.as_view(),\n name=\"logout\",\n ),\n path(\n \"my-sources/\",\n UserSourceListView.as_view(),\n name=\"my-sources\",\n ),\n path(\n \"user/<int:pk>\",\n UserDetailView.as_view(),\n name=\"user-detail\",\n ),\n path(\n \"users/\",\n UserListView.as_view(),\n name=\"user-list\",\n ),\n path(\n \"change-password/\",\n views.change_password,\n name=\"change-password\",\n ),\n # password reset views\n path(\n # here, user can initiate a request to send a password reset email\n \"reset-password/\",\n PasswordResetView.as_view(\n template_name=\"registration/reset_password.html\",\n email_template_name=\"registration/reset_password_email.html\",\n success_url=\"/reset-password-sent/\",\n ),\n name=\"reset_password\",\n ),\n path(\n # we display this page once the password reset email has been sent\n \"reset-password-sent/\",\n PasswordResetDoneView.as_view(\n template_name=\"registration/reset_password_sent.html\",\n ),\n name=\"reset_password_done\",\n ),\n path(\n # here, the user can specify their new password\n \"reset/<uidb64>/<token>\",\n PasswordResetConfirmView.as_view(\n template_name=\"registration/reset_password_confirm.html\",\n success_url=\"/reset-password-complete/\",\n ),\n name=\"reset_password_confirm\",\n ),\n path(\n # we display this page once a user has completed a password reset\n # depending on whether their attempt was successful, this page either shows\n # a success message or a non-success message.\n \"reset-password-complete/\",\n PasswordResetCompleteView.as_view(\n template_name=\"registration/reset_password_complete.html\"\n ),\n name=\"reset_password_complete\",\n ),\n # century\n path(\"century/<int:pk>\", CenturyDetailView.as_view(), name=\"century-detail\"),\n # chant\n path(\n \"chants/\",\n ChantListView.as_view(),\n name=\"chant-list\",\n ), # /chants/?source={source id}\n path(\n \"chant/<int:pk>\",\n ChantDetailView.as_view(),\n name=\"chant-detail\",\n ),\n path(\n \"chant-search/\",\n ChantSearchView.as_view(),\n name=\"chant-search\",\n ),\n path(\n \"chant-create/<int:source_pk>\",\n ChantCreateView.as_view(),\n name=\"chant-create\",\n ),\n path(\n \"id/<str:cantus_id>\",\n ChantByCantusIDView.as_view(),\n name=\"chant-by-cantus-id\",\n ),\n path(\n \"chant-delete/<int:pk>\",\n ChantDeleteView.as_view(),\n name=\"chant-delete\",\n ),\n path(\n \"edit-chants/<int:source_id>\",\n SourceEditChantsView.as_view(),\n name=\"source-edit-chants\",\n ),\n path(\n \"proofread-chant/<int:source_id>\",\n ChantProofreadView.as_view(),\n name=\"chant-proofread\",\n ),\n path(\n \"edit-syllabification/<int:chant_id>\",\n ChantEditSyllabificationView.as_view(),\n name=\"source-edit-syllabification\",\n ),\n path(\n \"index/\",\n ChantIndexView.as_view(),\n name=\"chant-index\",\n ), # /index/?source={source id}\n # feast\n path(\n \"feasts/\",\n FeastListView.as_view(),\n name=\"feast-list\",\n ),\n path(\n \"feast/<int:pk>\",\n FeastDetailView.as_view(),\n name=\"feast-detail\",\n ),\n # genre\n path(\n \"genres/\",\n GenreListView.as_view(),\n name=\"genre-list\",\n ),\n path(\n \"genre/<int:pk>\",\n GenreDetailView.as_view(),\n name=\"genre-detail\",\n ),\n # indexer\n path(\n \"indexers/\",\n IndexerListView.as_view(),\n name=\"indexer-list\",\n ),\n # notation\n path(\n \"notation/<int:pk>\",\n NotationDetailView.as_view(),\n name=\"notation-detail\",\n ),\n # office\n path(\n \"offices/\",\n OfficeListView.as_view(),\n name=\"office-list\",\n ),\n path(\n \"office/<int:pk>\",\n OfficeDetailView.as_view(),\n name=\"office-detail\",\n ),\n # provenance\n path(\n \"provenance/<int:pk>\",\n ProvenanceDetailView.as_view(),\n name=\"provenance-detail\",\n ),\n # sequence\n path(\n \"sequences/\",\n SequenceListView.as_view(),\n name=\"sequence-list\",\n ),\n path(\n \"sequence/<int:pk>\",\n SequenceDetailView.as_view(),\n name=\"sequence-detail\",\n ),\n path(\n \"edit-sequence/<int:sequence_id>\",\n SequenceEditView.as_view(),\n name=\"sequence-edit\",\n ),\n # source\n path(\n \"sources/\",\n SourceListView.as_view(),\n name=\"source-list\",\n ),\n path(\n \"source/<int:pk>\",\n SourceDetailView.as_view(),\n name=\"source-detail\",\n ),\n path(\n \"source-create/\",\n SourceCreateView.as_view(),\n name=\"source-create\",\n ),\n path(\n \"edit-source/<int:source_id>\",\n SourceEditView.as_view(),\n name=\"source-edit\",\n ),\n # melody\n path(\n \"melody/\",\n MelodySearchView.as_view(),\n name=\"melody-search\",\n ),\n path(\n \"ajax/melody/<str:cantus_id>\",\n views.ajax_melody_list,\n name=\"ajax-melody\",\n ),\n path(\n \"ajax/melody-search/\",\n views.ajax_melody_search,\n name=\"ajax-melody-search\",\n ),\n # json api\n path(\n \"json-sources/\",\n views.json_sources_export,\n name=\"json-sources-export\",\n ),\n path(\n \"json-node/<str:id>\",\n views.json_node_export,\n name=\"json-node-export\",\n ),\n path(\n \"json-nextchants/<str:cantus_id>\",\n views.json_nextchants,\n name=\"json-nextchants\",\n ),\n path(\n \"json-melody/<str:cantus_id>\",\n views.json_melody_export,\n name=\"json-melody-export\",\n ),\n # misc search\n path(\n \"searchms/<int:source_pk>\",\n ChantSearchMSView.as_view(),\n name=\"chant-search-ms\",\n ),\n path(\n \"ci-search/<str:search_term>\",\n CISearchView.as_view(),\n name=\"ci-search\",\n ),\n path(\n \"ajax/search-bar/<str:search_term>\",\n views.ajax_search_bar,\n name=\"ajax-search-bar\",\n ),\n # misc\n path(\n \"content-statistics\",\n views.items_count,\n name=\"items-count\",\n ),\n path(\n \"source/<str:source_id>/csv/\",\n views.csv_export,\n name=\"csv-export\",\n ),\n path(\n \"sites/default/files/csv/<str:source_id>.csv\",\n views.csv_export_redirect_from_old_path,\n name=\"csv-export-old-path\",\n ),\n path(\n \"ajax/concordance/<str:cantus_id>\",\n views.ajax_concordance_list,\n name=\"ajax-concordance\",\n ),\n # content overview (for project managers)\n path(\n \"content-overview/\",\n views.content_overview,\n name=\"content-overview\",\n ),\n # /node/ url redirects\n path(\n \"node/<int:pk>\",\n views.redirect_node_url,\n name=\"redirect-node-url\",\n ),\n # /indexer/ url redirects\n path(\n \"indexer/<int:pk>\",\n views.redirect_indexer,\n name=\"redirect-indexer\",\n ),\n]\n\nhandler404 = \"main_app.views.views.handle404\"\n", "path": "django/cantusdb_project/main_app/urls.py" } ]
diff --git a/django/cantusdb_project/main_app/urls.py b/django/cantusdb_project/main_app/urls.py index 0e40b0ec9..0f355fc53 100644 --- a/django/cantusdb_project/main_app/urls.py +++ b/django/cantusdb_project/main_app/urls.py @@ -319,7 +319,7 @@ ), # misc search path( - "chant-search-ms/<int:source_pk>", + "searchms/<int:source_pk>", ChantSearchMSView.as_view(), name="chant-search-ms", ),
StackStorm__st2-5091
St2Stream service broken when using SSL with mongodb ## SUMMARY This issue is an extension to #4832 however this time it is the st2stream service, I have looked that the code and can see the same monkey patch code hasn't been applied to the st2stream app ### STACKSTORM VERSION Paste the output of ``st2 --version``: 3.3.0 ##### OS, environment, install method Docker compose with the split services and mongo db references commented out so that an external db can be used https://github.com/StackStorm/st2-docker/blob/master/docker-compose.yml All other services correctly connected to mongodb.net test instance with the exception of st2stream. ## Steps to reproduce the problem use docker yaml at https://github.com/StackStorm/st2-docker/blob/master/docker-compose.yml, comment out mongo container and references, adjust files/st2-docker.conf to point to external DB with SSL = True enabled. docker-compose up ## Expected Results What did you expect to happen when running the steps above? st2stream to operate correctly ## Actual Results What happened? What output did you get? 2020-11-16 05:48:55,053 WARNING [-] Retry on ConnectionError - Cannot connect to database default : maximum recursion depth exceeded Adding monkey patch code to st2stream app resolves the issue (manually injected into container to test). file: st2stream/cmd/api.py Code: from st2common.util.monkey_patch import monkey_patch monkey_patch()
[ { "content": "# Copyright 2020 The StackStorm Authors.\n# Copyright 2019 Extreme Networks, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport sys\n\nimport eventlet\nfrom oslo_config import cfg\nfrom eventlet import wsgi\n\nfrom st2common import log as logging\nfrom st2common.service_setup import setup as common_setup\nfrom st2common.service_setup import teardown as common_teardown\nfrom st2common.stream.listener import get_listener_if_set\nfrom st2common.util.wsgi import shutdown_server_kill_pending_requests\nfrom st2stream.signal_handlers import register_stream_signal_handlers\nfrom st2stream import config\nconfig.register_opts()\nfrom st2stream import app\n\n__all__ = [\n 'main'\n]\n\n\neventlet.monkey_patch(\n os=True,\n select=True,\n socket=True,\n thread=False if '--use-debugger' in sys.argv else True,\n time=True)\n\nLOG = logging.getLogger(__name__)\n\n# How much time to give to the request in progress to finish in seconds before killing them\nWSGI_SERVER_REQUEST_SHUTDOWN_TIME = 2\n\n\ndef _setup():\n capabilities = {\n 'name': 'stream',\n 'listen_host': cfg.CONF.stream.host,\n 'listen_port': cfg.CONF.stream.port,\n 'type': 'active'\n }\n common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True,\n register_signal_handlers=True, register_internal_trigger_types=False,\n run_migrations=False, service_registry=True, capabilities=capabilities)\n\n\ndef _run_server():\n host = cfg.CONF.stream.host\n port = cfg.CONF.stream.port\n\n LOG.info('(PID=%s) ST2 Stream API is serving on http://%s:%s.', os.getpid(), host, port)\n\n max_pool_size = eventlet.wsgi.DEFAULT_MAX_SIMULTANEOUS_REQUESTS\n worker_pool = eventlet.GreenPool(max_pool_size)\n sock = eventlet.listen((host, port))\n\n def queue_shutdown(signal_number, stack_frame):\n eventlet.spawn_n(shutdown_server_kill_pending_requests, sock=sock,\n worker_pool=worker_pool, wait_time=WSGI_SERVER_REQUEST_SHUTDOWN_TIME)\n\n # We register a custom SIGINT handler which allows us to kill long running active requests.\n # Note: Eventually we will support draining (waiting for short-running requests), but we\n # will still want to kill long running stream requests.\n register_stream_signal_handlers(handler_func=queue_shutdown)\n\n wsgi.server(sock, app.setup_app(), custom_pool=worker_pool)\n return 0\n\n\ndef _teardown():\n common_teardown()\n\n\ndef main():\n try:\n _setup()\n return _run_server()\n except SystemExit as exit_code:\n sys.exit(exit_code)\n except KeyboardInterrupt:\n listener = get_listener_if_set(name='stream')\n\n if listener:\n listener.shutdown()\n except Exception:\n LOG.exception('(PID=%s) ST2 Stream API quit due to exception.', os.getpid())\n return 1\n finally:\n _teardown()\n", "path": "st2stream/st2stream/cmd/api.py" } ]
[ { "content": "# Copyright 2020 The StackStorm Authors.\n# Copyright 2019 Extreme Networks, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom st2common.util.monkey_patch import monkey_patch\nmonkey_patch()\n\nimport os\nimport sys\n\nimport eventlet\nfrom oslo_config import cfg\nfrom eventlet import wsgi\n\nfrom st2common import log as logging\nfrom st2common.service_setup import setup as common_setup\nfrom st2common.service_setup import teardown as common_teardown\nfrom st2common.stream.listener import get_listener_if_set\nfrom st2common.util.wsgi import shutdown_server_kill_pending_requests\nfrom st2stream.signal_handlers import register_stream_signal_handlers\nfrom st2stream import config\nconfig.register_opts()\nfrom st2stream import app\n\n__all__ = [\n 'main'\n]\n\n\neventlet.monkey_patch(\n os=True,\n select=True,\n socket=True,\n thread=False if '--use-debugger' in sys.argv else True,\n time=True)\n\nLOG = logging.getLogger(__name__)\n\n# How much time to give to the request in progress to finish in seconds before killing them\nWSGI_SERVER_REQUEST_SHUTDOWN_TIME = 2\n\n\ndef _setup():\n capabilities = {\n 'name': 'stream',\n 'listen_host': cfg.CONF.stream.host,\n 'listen_port': cfg.CONF.stream.port,\n 'type': 'active'\n }\n common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True,\n register_signal_handlers=True, register_internal_trigger_types=False,\n run_migrations=False, service_registry=True, capabilities=capabilities)\n\n\ndef _run_server():\n host = cfg.CONF.stream.host\n port = cfg.CONF.stream.port\n\n LOG.info('(PID=%s) ST2 Stream API is serving on http://%s:%s.', os.getpid(), host, port)\n\n max_pool_size = eventlet.wsgi.DEFAULT_MAX_SIMULTANEOUS_REQUESTS\n worker_pool = eventlet.GreenPool(max_pool_size)\n sock = eventlet.listen((host, port))\n\n def queue_shutdown(signal_number, stack_frame):\n eventlet.spawn_n(shutdown_server_kill_pending_requests, sock=sock,\n worker_pool=worker_pool, wait_time=WSGI_SERVER_REQUEST_SHUTDOWN_TIME)\n\n # We register a custom SIGINT handler which allows us to kill long running active requests.\n # Note: Eventually we will support draining (waiting for short-running requests), but we\n # will still want to kill long running stream requests.\n register_stream_signal_handlers(handler_func=queue_shutdown)\n\n wsgi.server(sock, app.setup_app(), custom_pool=worker_pool)\n return 0\n\n\ndef _teardown():\n common_teardown()\n\n\ndef main():\n try:\n _setup()\n return _run_server()\n except SystemExit as exit_code:\n sys.exit(exit_code)\n except KeyboardInterrupt:\n listener = get_listener_if_set(name='stream')\n\n if listener:\n listener.shutdown()\n except Exception:\n LOG.exception('(PID=%s) ST2 Stream API quit due to exception.', os.getpid())\n return 1\n finally:\n _teardown()\n", "path": "st2stream/st2stream/cmd/api.py" } ]
diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 675b6b48c4..39e389323f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -22,6 +22,7 @@ Changed Fixed ~~~~~~~~~ +* Added monkey patch fix to st2stream to enable it to work with mongodb via SSL. (bug fix) #5078 #5091 * Fix nginx buffering long polling stream to client. Instead of waiting for closed connection wait for final event to be sent to client. (bug fix) #4842 #5042 diff --git a/st2stream/st2stream/cmd/api.py b/st2stream/st2stream/cmd/api.py index 1c7d5f4d8b..cc1eec7d17 100644 --- a/st2stream/st2stream/cmd/api.py +++ b/st2stream/st2stream/cmd/api.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from st2common.util.monkey_patch import monkey_patch +monkey_patch() + import os import sys
alltheplaces__alltheplaces-4303
Domain missing from Holland & Barrett website URLs In the holland_and_barrett spider results, the website values returned are missing the domain, e.g. `"website": "/stores/aylesbury-3180/"`. This is what's in the code that the scraper is reading. But presumably AllThePlaces should return a fully qualified url, i.e. `https://www.hollandandbarrett.com/stores/aylesbury-3180/` in this case. I don't know what the micordata etc standards say about whether relative URLs are allowed, but perhaps the framework code could be modified to automatically complete the URL of the page if a relative URL is harvested.
[ { "content": "from scrapy.spiders import SitemapSpider\n\nfrom locations.linked_data_parser import LinkedDataParser\n\n\nclass HollandAndBarrettSpider(SitemapSpider):\n name = \"holland_and_barrett\"\n item_attributes = {\n \"brand\": \"Holland & Barrett\",\n \"brand_wikidata\": \"Q5880870\",\n }\n sitemap_urls = [\n \"https://www.hollandandbarrett.com/sitemap-stores.xml\",\n \"https://www.hollandandbarrett.nl/sitemap-stores.xml\",\n \"https://www.hollandandbarrett.be/sitemap-stores.xml\",\n \"https://www.hollandandbarrett.ie/sitemap-stores.xml\",\n ]\n sitemap_rules = [(\"/stores/\", \"parse\"), (\"/winkels/\", \"parse\")]\n download_delay = 1.0\n\n def parse(self, response):\n yield LinkedDataParser.parse(response, \"LocalBusiness\")\n", "path": "locations/spiders/holland_and_barrett.py" } ]
[ { "content": "from scrapy.spiders import SitemapSpider\n\nfrom locations.linked_data_parser import LinkedDataParser\n\n\nclass HollandAndBarrettSpider(SitemapSpider):\n name = \"holland_and_barrett\"\n item_attributes = {\n \"brand\": \"Holland & Barrett\",\n \"brand_wikidata\": \"Q5880870\",\n }\n sitemap_urls = [\n \"https://www.hollandandbarrett.com/sitemap-stores.xml\",\n \"https://www.hollandandbarrett.nl/sitemap-stores.xml\",\n \"https://www.hollandandbarrett.be/sitemap-stores.xml\",\n \"https://www.hollandandbarrett.ie/sitemap-stores.xml\",\n ]\n sitemap_rules = [(\"/stores/\", \"parse\"), (\"/winkels/\", \"parse\")]\n download_delay = 1.0\n\n def parse(self, response):\n item = LinkedDataParser.parse(response, \"LocalBusiness\")\n item[\"website\"] = response.urljoin(item[\"website\"])\n yield item\n", "path": "locations/spiders/holland_and_barrett.py" } ]
diff --git a/locations/spiders/holland_and_barrett.py b/locations/spiders/holland_and_barrett.py index d206fef221f..0ecf6ab804e 100644 --- a/locations/spiders/holland_and_barrett.py +++ b/locations/spiders/holland_and_barrett.py @@ -19,4 +19,6 @@ class HollandAndBarrettSpider(SitemapSpider): download_delay = 1.0 def parse(self, response): - yield LinkedDataParser.parse(response, "LocalBusiness") + item = LinkedDataParser.parse(response, "LocalBusiness") + item["website"] = response.urljoin(item["website"]) + yield item
internetarchive__openlibrary-5645
Image uploader does not recognise uploaded file <!-- What problem are we solving? What does the experience look like today? What are the symptoms? --> As of today (8-09-2021) the image uploader does not recognise that an image has been selected and uploaded. Instead, it displays "Please provide an image URL" after hitting submit. ### Evidence / Screenshot (if possible) ### Relevant url? <!-- `https://openlibrary.org/...` --> ### Steps to Reproduce <!-- What steps caused you to find the bug? --> 1. Go to ...any edition 2. Do ...upload an image as a cover and submit. <!-- What actually happened after these steps? What did you expect to happen? --> * Actual: "Please provide an image URL" * Expected: Image should be added as cover. ### Details - **Logged in (Y/N)?** y - **Browser type/version?** Chrome Version 92.0.4515.159 (Official Build) (x86_64) - **Operating system?** MacOS - **Environment (prod/dev/local)?** prod <!-- If not sure, put prod --> ### Proposal & Constraints <!-- What is the proposed solution / implementation? Is there a precedent of this approach succeeding elsewhere? --> ### Related files <!-- Files related to this issue; this is super useful for new contributors who might want to help! If you're not sure, leave this blank; a maintainer will add them. --> ### Stakeholders <!-- @ tag stakeholders of this bug -->
[ { "content": "\"\"\"Handle book cover/author photo upload.\n\"\"\"\nfrom logging import getLogger\n\nimport requests\nimport six\nimport web\nfrom six import BytesIO\n\nfrom infogami.utils import delegate\nfrom infogami.utils.view import safeint\nfrom openlibrary import accounts\nfrom openlibrary.plugins.upstream.models import Image\nfrom openlibrary.plugins.upstream.utils import get_coverstore_url, render_template\n\nlogger = getLogger(\"openlibrary.plugins.upstream.covers\")\ndef setup():\n pass\n\nclass add_cover(delegate.page):\n path = \"(/books/OL\\d+M)/add-cover\"\n cover_category = \"b\"\n\n def GET(self, key):\n book = web.ctx.site.get(key)\n return render_template('covers/add', book)\n\n def POST(self, key):\n book = web.ctx.site.get(key)\n if not book:\n raise web.notfound(\"\")\n\n i = web.input(file={}, url=\"\")\n\n # remove references to field storage objects\n web.ctx.pop(\"_fieldstorage\", None)\n\n data = self.upload(key, i)\n coverid = data.get('id')\n\n if coverid:\n self.save(book, coverid, url=i.url)\n cover = Image(web.ctx.site, \"b\", coverid)\n return render_template(\"covers/saved\", cover)\n else:\n return render_template(\"covers/add\", book, {'url': i.url}, data)\n\n def upload(self, key, i):\n \"\"\"Uploads a cover to coverstore and returns the response.\"\"\"\n olid = key.split(\"/\")[-1]\n\n if i.file is not None and hasattr(i.file, 'value'):\n data = i.file.value\n else:\n data = None\n\n if i.url and i.url.strip() == \"http://\":\n i.url = \"\"\n\n user = accounts.get_current_user()\n params = {\n \"author\": user and user.key,\n \"source_url\": i.url,\n \"olid\": olid,\n \"ip\": web.ctx.ip\n }\n\n upload_url = '%s/%s/upload2' % (\n get_coverstore_url(), self.cover_category)\n\n if upload_url.startswith(\"//\"):\n upload_url = \"http:\" + upload_url\n\n try:\n files = {'data': BytesIO(data)}\n response = requests.post(upload_url, data=params, files=files)\n return web.storage(response.json())\n except requests.HTTPError as e:\n logger.exception(\"Covers upload failed\")\n return web.storage({'error': str(e)})\n\n def save(self, book, coverid, url=None):\n book.covers = [coverid] + [cover.id for cover in book.get_covers()]\n book._save(\"Added new cover\", action=\"add-cover\", data={\"url\": url})\n\nclass add_work_cover(add_cover):\n path = \"(/works/OL\\d+W)/add-cover\"\n cover_category = \"w\"\n\n def upload(self, key, i):\n if \"coverid\" in i and safeint(i.coverid):\n return web.storage(id=int(i.coverid))\n else:\n return add_cover.upload(self, key, i)\n\nclass add_photo(add_cover):\n path = \"(/authors/OL\\d+A)/add-photo\"\n cover_category = \"a\"\n\n def save(self, author, photoid, url=None):\n author.photos = [photoid] + [photo.id for photo in author.get_photos()]\n author._save(\"Added new photo\", action=\"add-photo\", data={\"url\": url})\n\nclass manage_covers(delegate.page):\n path = \"(/books/OL\\d+M)/manage-covers\"\n def GET(self, key):\n book = web.ctx.site.get(key)\n if not book:\n raise web.notfound()\n return render_template(\"covers/manage\", key, self.get_images(book))\n\n def get_images(self, book):\n return book.get_covers()\n\n def get_image(self, book):\n return book.get_cover()\n\n def save_images(self, book, covers):\n book.covers = covers\n book._save('Update covers')\n\n def POST(self, key):\n book = web.ctx.site.get(key)\n if not book:\n raise web.notfound()\n\n images = web.input(image=[]).image\n if '-' in images:\n images = [int(id) for id in images[:images.index('-')]]\n self.save_images(book, images)\n return render_template(\"covers/saved\", self.get_image(book), showinfo=False)\n else:\n # ERROR\n pass\n\nclass manage_work_covers(manage_covers):\n path = \"(/works/OL\\d+W)/manage-covers\"\n\n\nclass manage_photos(manage_covers):\n path = \"(/authors/OL\\d+A)/manage-photos\"\n\n def get_images(self, author):\n return author.get_photos()\n\n def get_image(self, author):\n return author.get_photo()\n\n def save_images(self, author, photos):\n author.photos = photos\n author._save('Update photos')\n", "path": "openlibrary/plugins/upstream/covers.py" } ]
[ { "content": "\"\"\"Handle book cover/author photo upload.\n\"\"\"\nfrom logging import getLogger\n\nimport requests\nimport six\nimport web\nfrom six import BytesIO\n\nfrom infogami.utils import delegate\nfrom infogami.utils.view import safeint\nfrom openlibrary import accounts\nfrom openlibrary.plugins.upstream.models import Image\nfrom openlibrary.plugins.upstream.utils import get_coverstore_url, render_template\n\nlogger = getLogger(\"openlibrary.plugins.upstream.covers\")\ndef setup():\n pass\n\nclass add_cover(delegate.page):\n path = \"(/books/OL\\d+M)/add-cover\"\n cover_category = \"b\"\n\n def GET(self, key):\n book = web.ctx.site.get(key)\n return render_template('covers/add', book)\n\n def POST(self, key):\n book = web.ctx.site.get(key)\n if not book:\n raise web.notfound(\"\")\n\n i = web.input(file={}, url=\"\")\n\n # remove references to field storage objects\n web.ctx.pop(\"_fieldstorage\", None)\n\n data = self.upload(key, i)\n coverid = data.get('id')\n\n if coverid:\n self.save(book, coverid, url=i.url)\n cover = Image(web.ctx.site, \"b\", coverid)\n return render_template(\"covers/saved\", cover)\n else:\n return render_template(\"covers/add\", book, {'url': i.url}, data)\n\n def upload(self, key, i):\n \"\"\"Uploads a cover to coverstore and returns the response.\"\"\"\n olid = key.split(\"/\")[-1]\n\n if i.file is not None and hasattr(i.file, 'value'):\n data = i.file.value\n else:\n data = None\n\n if i.url and i.url.strip() == \"https://\":\n i.url = \"\"\n\n user = accounts.get_current_user()\n params = {\n \"author\": user and user.key,\n \"source_url\": i.url,\n \"olid\": olid,\n \"ip\": web.ctx.ip\n }\n\n upload_url = '%s/%s/upload2' % (\n get_coverstore_url(), self.cover_category)\n\n if upload_url.startswith(\"//\"):\n upload_url = \"http:\" + upload_url\n\n try:\n files = {'data': BytesIO(data)}\n response = requests.post(upload_url, data=params, files=files)\n return web.storage(response.json())\n except requests.HTTPError as e:\n logger.exception(\"Covers upload failed\")\n return web.storage({'error': str(e)})\n\n def save(self, book, coverid, url=None):\n book.covers = [coverid] + [cover.id for cover in book.get_covers()]\n book._save(\"Added new cover\", action=\"add-cover\", data={\"url\": url})\n\nclass add_work_cover(add_cover):\n path = \"(/works/OL\\d+W)/add-cover\"\n cover_category = \"w\"\n\n def upload(self, key, i):\n if \"coverid\" in i and safeint(i.coverid):\n return web.storage(id=int(i.coverid))\n else:\n return add_cover.upload(self, key, i)\n\nclass add_photo(add_cover):\n path = \"(/authors/OL\\d+A)/add-photo\"\n cover_category = \"a\"\n\n def save(self, author, photoid, url=None):\n author.photos = [photoid] + [photo.id for photo in author.get_photos()]\n author._save(\"Added new photo\", action=\"add-photo\", data={\"url\": url})\n\nclass manage_covers(delegate.page):\n path = \"(/books/OL\\d+M)/manage-covers\"\n def GET(self, key):\n book = web.ctx.site.get(key)\n if not book:\n raise web.notfound()\n return render_template(\"covers/manage\", key, self.get_images(book))\n\n def get_images(self, book):\n return book.get_covers()\n\n def get_image(self, book):\n return book.get_cover()\n\n def save_images(self, book, covers):\n book.covers = covers\n book._save('Update covers')\n\n def POST(self, key):\n book = web.ctx.site.get(key)\n if not book:\n raise web.notfound()\n\n images = web.input(image=[]).image\n if '-' in images:\n images = [int(id) for id in images[:images.index('-')]]\n self.save_images(book, images)\n return render_template(\"covers/saved\", self.get_image(book), showinfo=False)\n else:\n # ERROR\n pass\n\nclass manage_work_covers(manage_covers):\n path = \"(/works/OL\\d+W)/manage-covers\"\n\n\nclass manage_photos(manage_covers):\n path = \"(/authors/OL\\d+A)/manage-photos\"\n\n def get_images(self, author):\n return author.get_photos()\n\n def get_image(self, author):\n return author.get_photo()\n\n def save_images(self, author, photos):\n author.photos = photos\n author._save('Update photos')\n", "path": "openlibrary/plugins/upstream/covers.py" } ]
diff --git a/openlibrary/plugins/upstream/covers.py b/openlibrary/plugins/upstream/covers.py index f27e6609d21..9c98ac0bf15 100644 --- a/openlibrary/plugins/upstream/covers.py +++ b/openlibrary/plugins/upstream/covers.py @@ -54,7 +54,7 @@ def upload(self, key, i): else: data = None - if i.url and i.url.strip() == "http://": + if i.url and i.url.strip() == "https://": i.url = "" user = accounts.get_current_user()
wagtail__wagtail-1791
Cachebusting query parameter (e.g. _=1441835249458) not ignored by api From the [documentation for jQuery.ajax, under "cache"](http://api.jquery.com/jquery.ajax/): > Setting cache to false will only work correctly with HEAD and GET requests. It works by appending "_={timestamp}" to the GET parameters. The parameter is not needed for other types of requests, except in IE8 when a POST is made to a URL that has already been requested by a GET. It seems like it's standard practice to ignore the underscore keyword. Unless I'm mistaken this is an oversight and not a disagreement on the principle of the thing. Reproduce: make an Ajax call to any wagtail API endpoint with the cache flag set to false. Or just navigate to something like `http://localhost:8000/api/v1/pages/?type=home.HomePage&_=1441835249458` You'll get this message: ``` { "message": "query parameter is not an operation or a recognised field: _" } ```
[ { "content": "from __future__ import absolute_import\n\nfrom collections import OrderedDict\n\nfrom django.conf.urls import url\nfrom django.http import Http404\n\nfrom rest_framework import status\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import GenericViewSet\n\nfrom wagtail.wagtailcore.models import Page\nfrom wagtail.wagtailimages.models import get_image_model\nfrom wagtail.wagtaildocs.models import Document\nfrom wagtail.wagtailcore.utils import resolve_model_string\n\nfrom .filters import (\n FieldsFilter, OrderingFilter, SearchFilter,\n ChildOfFilter, DescendantOfFilter\n)\nfrom .renderers import WagtailJSONRenderer\nfrom .pagination import WagtailPagination\nfrom .serializers import BaseSerializer, PageSerializer, DocumentSerializer, ImageSerializer, get_serializer_class\nfrom .utils import BadRequestError\n\n\nclass BaseAPIEndpoint(GenericViewSet):\n renderer_classes = [WagtailJSONRenderer]\n pagination_class = WagtailPagination\n base_serializer_class = BaseSerializer\n filter_classes = []\n queryset = None # Set on subclasses or implement `get_queryset()`.\n\n known_query_parameters = frozenset([\n 'limit',\n 'offset',\n 'fields',\n 'order',\n 'search',\n ])\n extra_api_fields = []\n name = None # Set on subclass.\n\n def listing_view(self, request):\n queryset = self.get_queryset()\n self.check_query_parameters(queryset)\n queryset = self.filter_queryset(queryset)\n queryset = self.paginate_queryset(queryset)\n serializer = self.get_serializer(queryset, many=True)\n return self.get_paginated_response(serializer.data)\n\n def detail_view(self, request, pk):\n instance = self.get_object()\n serializer = self.get_serializer(instance)\n return Response(serializer.data)\n\n def handle_exception(self, exc):\n if isinstance(exc, Http404):\n data = {'message': str(exc)}\n return Response(data, status=status.HTTP_404_NOT_FOUND)\n elif isinstance(exc, BadRequestError):\n data = {'message': str(exc)}\n return Response(data, status=status.HTTP_400_BAD_REQUEST)\n return super(BaseAPIEndpoint, self).handle_exception(exc)\n\n def get_api_fields(self, model):\n \"\"\"\n This returns a list of field names that are allowed to\n be used in the API (excluding the id field).\n \"\"\"\n api_fields = self.extra_api_fields[:]\n\n if hasattr(model, 'api_fields'):\n api_fields.extend(model.api_fields)\n\n return api_fields\n\n def check_query_parameters(self, queryset):\n \"\"\"\n Ensure that only valid query paramters are included in the URL.\n \"\"\"\n query_parameters = set(self.request.GET.keys())\n\n # All query paramters must be either a field or an operation\n allowed_query_parameters = set(self.get_api_fields(queryset.model)).union(self.known_query_parameters).union({'id'})\n unknown_parameters = query_parameters - allowed_query_parameters\n if unknown_parameters:\n raise BadRequestError(\"query parameter is not an operation or a recognised field: %s\" % ', '.join(sorted(unknown_parameters)))\n\n def get_serializer_class(self):\n request = self.request\n\n # Get model\n if self.action == 'listing_view':\n model = self.get_queryset().model\n else:\n model = type(self.get_object())\n\n # Get all available fields\n all_fields = self.get_api_fields(model)\n all_fields = list(OrderedDict.fromkeys(all_fields)) # Removes any duplicates in case the developer put \"title\" in api_fields\n\n if self.action == 'listing_view':\n # Listing views just show the title field and any other allowed field the user specified\n if 'fields' in request.GET:\n fields = set(request.GET['fields'].split(','))\n else:\n fields = {'title'}\n\n unknown_fields = fields - set(all_fields)\n\n if unknown_fields:\n raise BadRequestError(\"unknown fields: %s\" % ', '.join(sorted(unknown_fields)))\n\n # Reorder fields so it matches the order of all_fields\n fields = [field for field in all_fields if field in fields]\n else:\n # Detail views show all fields all the time\n fields = all_fields\n\n # Always show id and meta first\n fields = ['id', 'meta'] + fields\n\n # If showing details, add the parent field\n if isinstance(self, PagesAPIEndpoint) and self.get_serializer_context().get('show_details', False):\n fields.insert(2, 'parent')\n\n return get_serializer_class(model, fields, base=self.base_serializer_class)\n\n def get_serializer_context(self):\n \"\"\"\n The serialization context differs between listing and detail views.\n \"\"\"\n request = self.request\n\n if self.action == 'listing_view':\n return {\n 'request': request,\n 'view': self,\n }\n\n return {\n 'request': request,\n 'view': self,\n 'show_details': True\n }\n\n def get_renderer_context(self):\n context = super(BaseAPIEndpoint, self).get_renderer_context()\n context['endpoints'] = [\n PagesAPIEndpoint,\n ImagesAPIEndpoint,\n DocumentsAPIEndpoint\n ]\n return context\n\n @classmethod\n def get_urlpatterns(cls):\n \"\"\"\n This returns a list of URL patterns for the endpoint\n \"\"\"\n return [\n url(r'^$', cls.as_view({'get': 'listing_view'}), name='listing'),\n url(r'^(?P<pk>\\d+)/$', cls.as_view({'get': 'detail_view'}), name='detail'),\n ]\n\n @classmethod\n def has_model(cls, model):\n return NotImplemented\n\n\nclass PagesAPIEndpoint(BaseAPIEndpoint):\n base_serializer_class = PageSerializer\n filter_backends = [\n FieldsFilter,\n ChildOfFilter,\n DescendantOfFilter,\n OrderingFilter,\n SearchFilter\n ]\n known_query_parameters = BaseAPIEndpoint.known_query_parameters.union([\n 'type',\n 'child_of',\n 'descendant_of',\n ])\n extra_api_fields = ['title']\n name = 'pages'\n\n def get_queryset(self):\n request = self.request\n\n # Allow pages to be filtered to a specific type\n if 'type' not in request.GET:\n model = Page\n else:\n model_name = request.GET['type']\n try:\n model = resolve_model_string(model_name)\n except LookupError:\n raise BadRequestError(\"type doesn't exist\")\n if not issubclass(model, Page):\n raise BadRequestError(\"type doesn't exist\")\n\n # Get live pages that are not in a private section\n queryset = model.objects.public().live()\n\n # Filter by site\n queryset = queryset.descendant_of(request.site.root_page, inclusive=True)\n\n return queryset\n\n def get_object(self):\n base = super(PagesAPIEndpoint, self).get_object()\n return base.specific\n\n @classmethod\n def has_model(cls, model):\n return issubclass(model, Page)\n\n\nclass ImagesAPIEndpoint(BaseAPIEndpoint):\n queryset = get_image_model().objects.all().order_by('id')\n base_serializer_class = ImageSerializer\n filter_backends = [FieldsFilter, OrderingFilter, SearchFilter]\n extra_api_fields = ['title', 'tags', 'width', 'height']\n name = 'images'\n\n @classmethod\n def has_model(cls, model):\n return model == get_image_model()\n\n\nclass DocumentsAPIEndpoint(BaseAPIEndpoint):\n queryset = Document.objects.all().order_by('id')\n base_serializer_class = DocumentSerializer\n filter_backends = [FieldsFilter, OrderingFilter, SearchFilter]\n extra_api_fields = ['title', 'tags']\n name = 'documents'\n\n @classmethod\n def has_model(cls, model):\n return model == Document\n", "path": "wagtail/contrib/wagtailapi/endpoints.py" } ]
[ { "content": "from __future__ import absolute_import\n\nfrom collections import OrderedDict\n\nfrom django.conf.urls import url\nfrom django.http import Http404\n\nfrom rest_framework import status\nfrom rest_framework.response import Response\nfrom rest_framework.viewsets import GenericViewSet\n\nfrom wagtail.wagtailcore.models import Page\nfrom wagtail.wagtailimages.models import get_image_model\nfrom wagtail.wagtaildocs.models import Document\nfrom wagtail.wagtailcore.utils import resolve_model_string\n\nfrom .filters import (\n FieldsFilter, OrderingFilter, SearchFilter,\n ChildOfFilter, DescendantOfFilter\n)\nfrom .renderers import WagtailJSONRenderer\nfrom .pagination import WagtailPagination\nfrom .serializers import BaseSerializer, PageSerializer, DocumentSerializer, ImageSerializer, get_serializer_class\nfrom .utils import BadRequestError\n\n\nclass BaseAPIEndpoint(GenericViewSet):\n renderer_classes = [WagtailJSONRenderer]\n pagination_class = WagtailPagination\n base_serializer_class = BaseSerializer\n filter_classes = []\n queryset = None # Set on subclasses or implement `get_queryset()`.\n\n known_query_parameters = frozenset([\n 'limit',\n 'offset',\n 'fields',\n 'order',\n 'search',\n\n # Used by jQuery for cache-busting. See #1671\n '_',\n ])\n extra_api_fields = []\n name = None # Set on subclass.\n\n def listing_view(self, request):\n queryset = self.get_queryset()\n self.check_query_parameters(queryset)\n queryset = self.filter_queryset(queryset)\n queryset = self.paginate_queryset(queryset)\n serializer = self.get_serializer(queryset, many=True)\n return self.get_paginated_response(serializer.data)\n\n def detail_view(self, request, pk):\n instance = self.get_object()\n serializer = self.get_serializer(instance)\n return Response(serializer.data)\n\n def handle_exception(self, exc):\n if isinstance(exc, Http404):\n data = {'message': str(exc)}\n return Response(data, status=status.HTTP_404_NOT_FOUND)\n elif isinstance(exc, BadRequestError):\n data = {'message': str(exc)}\n return Response(data, status=status.HTTP_400_BAD_REQUEST)\n return super(BaseAPIEndpoint, self).handle_exception(exc)\n\n def get_api_fields(self, model):\n \"\"\"\n This returns a list of field names that are allowed to\n be used in the API (excluding the id field).\n \"\"\"\n api_fields = self.extra_api_fields[:]\n\n if hasattr(model, 'api_fields'):\n api_fields.extend(model.api_fields)\n\n return api_fields\n\n def check_query_parameters(self, queryset):\n \"\"\"\n Ensure that only valid query paramters are included in the URL.\n \"\"\"\n query_parameters = set(self.request.GET.keys())\n\n # All query paramters must be either a field or an operation\n allowed_query_parameters = set(self.get_api_fields(queryset.model)).union(self.known_query_parameters).union({'id'})\n unknown_parameters = query_parameters - allowed_query_parameters\n if unknown_parameters:\n raise BadRequestError(\"query parameter is not an operation or a recognised field: %s\" % ', '.join(sorted(unknown_parameters)))\n\n def get_serializer_class(self):\n request = self.request\n\n # Get model\n if self.action == 'listing_view':\n model = self.get_queryset().model\n else:\n model = type(self.get_object())\n\n # Get all available fields\n all_fields = self.get_api_fields(model)\n all_fields = list(OrderedDict.fromkeys(all_fields)) # Removes any duplicates in case the developer put \"title\" in api_fields\n\n if self.action == 'listing_view':\n # Listing views just show the title field and any other allowed field the user specified\n if 'fields' in request.GET:\n fields = set(request.GET['fields'].split(','))\n else:\n fields = {'title'}\n\n unknown_fields = fields - set(all_fields)\n\n if unknown_fields:\n raise BadRequestError(\"unknown fields: %s\" % ', '.join(sorted(unknown_fields)))\n\n # Reorder fields so it matches the order of all_fields\n fields = [field for field in all_fields if field in fields]\n else:\n # Detail views show all fields all the time\n fields = all_fields\n\n # Always show id and meta first\n fields = ['id', 'meta'] + fields\n\n # If showing details, add the parent field\n if isinstance(self, PagesAPIEndpoint) and self.get_serializer_context().get('show_details', False):\n fields.insert(2, 'parent')\n\n return get_serializer_class(model, fields, base=self.base_serializer_class)\n\n def get_serializer_context(self):\n \"\"\"\n The serialization context differs between listing and detail views.\n \"\"\"\n request = self.request\n\n if self.action == 'listing_view':\n return {\n 'request': request,\n 'view': self,\n }\n\n return {\n 'request': request,\n 'view': self,\n 'show_details': True\n }\n\n def get_renderer_context(self):\n context = super(BaseAPIEndpoint, self).get_renderer_context()\n context['endpoints'] = [\n PagesAPIEndpoint,\n ImagesAPIEndpoint,\n DocumentsAPIEndpoint\n ]\n return context\n\n @classmethod\n def get_urlpatterns(cls):\n \"\"\"\n This returns a list of URL patterns for the endpoint\n \"\"\"\n return [\n url(r'^$', cls.as_view({'get': 'listing_view'}), name='listing'),\n url(r'^(?P<pk>\\d+)/$', cls.as_view({'get': 'detail_view'}), name='detail'),\n ]\n\n @classmethod\n def has_model(cls, model):\n return NotImplemented\n\n\nclass PagesAPIEndpoint(BaseAPIEndpoint):\n base_serializer_class = PageSerializer\n filter_backends = [\n FieldsFilter,\n ChildOfFilter,\n DescendantOfFilter,\n OrderingFilter,\n SearchFilter\n ]\n known_query_parameters = BaseAPIEndpoint.known_query_parameters.union([\n 'type',\n 'child_of',\n 'descendant_of',\n ])\n extra_api_fields = ['title']\n name = 'pages'\n\n def get_queryset(self):\n request = self.request\n\n # Allow pages to be filtered to a specific type\n if 'type' not in request.GET:\n model = Page\n else:\n model_name = request.GET['type']\n try:\n model = resolve_model_string(model_name)\n except LookupError:\n raise BadRequestError(\"type doesn't exist\")\n if not issubclass(model, Page):\n raise BadRequestError(\"type doesn't exist\")\n\n # Get live pages that are not in a private section\n queryset = model.objects.public().live()\n\n # Filter by site\n queryset = queryset.descendant_of(request.site.root_page, inclusive=True)\n\n return queryset\n\n def get_object(self):\n base = super(PagesAPIEndpoint, self).get_object()\n return base.specific\n\n @classmethod\n def has_model(cls, model):\n return issubclass(model, Page)\n\n\nclass ImagesAPIEndpoint(BaseAPIEndpoint):\n queryset = get_image_model().objects.all().order_by('id')\n base_serializer_class = ImageSerializer\n filter_backends = [FieldsFilter, OrderingFilter, SearchFilter]\n extra_api_fields = ['title', 'tags', 'width', 'height']\n name = 'images'\n\n @classmethod\n def has_model(cls, model):\n return model == get_image_model()\n\n\nclass DocumentsAPIEndpoint(BaseAPIEndpoint):\n queryset = Document.objects.all().order_by('id')\n base_serializer_class = DocumentSerializer\n filter_backends = [FieldsFilter, OrderingFilter, SearchFilter]\n extra_api_fields = ['title', 'tags']\n name = 'documents'\n\n @classmethod\n def has_model(cls, model):\n return model == Document\n", "path": "wagtail/contrib/wagtailapi/endpoints.py" } ]
diff --git a/wagtail/contrib/wagtailapi/endpoints.py b/wagtail/contrib/wagtailapi/endpoints.py index c6a2489fccf7..26846d598b86 100644 --- a/wagtail/contrib/wagtailapi/endpoints.py +++ b/wagtail/contrib/wagtailapi/endpoints.py @@ -37,6 +37,9 @@ class BaseAPIEndpoint(GenericViewSet): 'fields', 'order', 'search', + + # Used by jQuery for cache-busting. See #1671 + '_', ]) extra_api_fields = [] name = None # Set on subclass.
kymatio__kymatio-352
ENH+TST find a way of testing GPU code With not too much investment in 💲 💰 it should be possible to set up a `jenkins` testing suite on amazon aws: The idea is to have a micro machine that costs 1c/h run the jenkins server. When tests should be run, this should somehow spawn a couple of GPU machines with different GPUs, ideally as spot instances, run the tests and then shut them down again. I looked into this at the very beginning of `kymatio`, but I don't really know how to set this up yet. If anybody has experience with this, feel free to try! :)
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport csv\nimport importlib\nimport os\nimport shutil\nimport sys\nfrom setuptools import setup, find_packages\n\n# Constants\nDISTNAME = 'kymatio'\nDESCRIPTION = 'Wavelet scattering transforms in Python with GPU acceleration'\nURL = 'https://www.kymat.io'\nLICENSE = 'BSD-3-Clause'\n\n\n# Parse description\nwith open('README.md') as f:\n README = f.read().split('\\n')\n LONG_DESCRIPTION = '\\n'.join([x for x in README if not x[:3]=='[!['])\n\n\n# Parse version.py\nkymatio_version_spec = importlib.util.spec_from_file_location(\n 'kymatio_version', 'kymatio/version.py')\nkymatio_version_module = importlib.util.module_from_spec(kymatio_version_spec)\nkymatio_version_spec.loader.exec_module(kymatio_version_module)\nVERSION = kymatio_version_module.version\n\n\n# Parse requirements.txt\nwith open('requirements.txt', 'r') as f:\n REQUIREMENTS = f.read().split('\\n')\n\n\nsetup_info = dict(\n # Metadata\n name=DISTNAME,\n version=VERSION,\n author=('Edouard Oyallon, Eugene Belilovsky, Sergey Zagoruyko, '\n 'Michael Eickenberg, Mathieu Andreux, Georgios Exarchakis, '\n 'Louis Thiry, Vincent Lostanlen, Joakim Andén, '\n 'Tomás Angles, Gabriel Huang, Roberto Leonarduzzi'),\n author_email=('[email protected], [email protected], '\n '[email protected], [email protected], '\n '[email protected], [email protected], '\n '[email protected], [email protected], [email protected], '\n '[email protected], [email protected], [email protected]'),\n url=URL,\n download_url='https://github.com/kymatio/kymatio/releases',\n project_urls={\n 'Documentation': 'https://www.kymat.io/codereference.html',\n 'Source': 'https://github.com/kymatio/kymatio/',\n 'Tracker': 'https://github.com/kymatio/kymatio/issues',\n 'Authors': 'https://github.com/kymatio/kymatio/blob/master/AUTHORS.md'\n },\n classifiers=['Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Natural Language :: English',\n 'Operating System :: MacOS',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Topic :: Multimedia :: Graphics :: 3D Modeling',\n 'Topic :: Multimedia :: Sound/Audio :: Analysis',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Scientific/Engineering :: Chemistry',\n 'Topic :: Scientific/Engineering :: Image Recognition',\n 'Topic :: Scientific/Engineering :: Information Analysis',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Scientific/Engineering :: Physics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n long_description_content_type='text/markdown',\n python_requires='>=3.5',\n license=LICENSE,\n packages=find_packages(exclude=('test',)),\n install_requires=REQUIREMENTS,\n zip_safe=True,\n)\n\nsetup(**setup_info)\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport csv\nimport importlib\nimport os\nimport shutil\nimport sys\nfrom setuptools import setup, find_packages\n\n# Constants\nDISTNAME = 'kymatio'\nDESCRIPTION = 'Wavelet scattering transforms in Python with GPU acceleration'\nURL = 'https://www.kymat.io'\nLICENSE = 'BSD-3-Clause'\n\n\n# Parse description\nwith open('README.md', encoding='utf8') as f:\n README = f.read().split('\\n')\n LONG_DESCRIPTION = '\\n'.join([x for x in README if not x[:3]=='[!['])\n\n\n# Parse version.py\nkymatio_version_spec = importlib.util.spec_from_file_location(\n 'kymatio_version', 'kymatio/version.py')\nkymatio_version_module = importlib.util.module_from_spec(kymatio_version_spec)\nkymatio_version_spec.loader.exec_module(kymatio_version_module)\nVERSION = kymatio_version_module.version\n\n\n# Parse requirements.txt\nwith open('requirements.txt', 'r') as f:\n REQUIREMENTS = f.read().split('\\n')\n\n\nsetup_info = dict(\n # Metadata\n name=DISTNAME,\n version=VERSION,\n author=('Edouard Oyallon, Eugene Belilovsky, Sergey Zagoruyko, '\n 'Michael Eickenberg, Mathieu Andreux, Georgios Exarchakis, '\n 'Louis Thiry, Vincent Lostanlen, Joakim Andén, '\n 'Tomás Angles, Gabriel Huang, Roberto Leonarduzzi'),\n author_email=('[email protected], [email protected], '\n '[email protected], [email protected], '\n '[email protected], [email protected], '\n '[email protected], [email protected], [email protected], '\n '[email protected], [email protected], [email protected]'),\n url=URL,\n download_url='https://github.com/kymatio/kymatio/releases',\n project_urls={\n 'Documentation': 'https://www.kymat.io/codereference.html',\n 'Source': 'https://github.com/kymatio/kymatio/',\n 'Tracker': 'https://github.com/kymatio/kymatio/issues',\n 'Authors': 'https://github.com/kymatio/kymatio/blob/master/AUTHORS.md'\n },\n classifiers=['Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Natural Language :: English',\n 'Operating System :: MacOS',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Topic :: Multimedia :: Graphics :: 3D Modeling',\n 'Topic :: Multimedia :: Sound/Audio :: Analysis',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Scientific/Engineering :: Chemistry',\n 'Topic :: Scientific/Engineering :: Image Recognition',\n 'Topic :: Scientific/Engineering :: Information Analysis',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Scientific/Engineering :: Physics',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n long_description_content_type='text/markdown',\n python_requires='>=3.5',\n license=LICENSE,\n packages=find_packages(exclude=('test',)),\n install_requires=REQUIREMENTS,\n zip_safe=True,\n)\n\nsetup(**setup_info)\n", "path": "setup.py" } ]
diff --git a/.travis.yml b/.travis.yml index 885beb281..bab8e4b97 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,4 +12,4 @@ install: script: - pytest --cov=kymatio after_success: - - bash <(curl -s https://codecov.io/bash) + - bash <(curl -s https://codecov.io/bash) -F travis diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index 3619022c9..a285412a1 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -10,5 +10,6 @@ Vincent Lostanlen nshervt Jan Schlüter Edouard Oyallon +Dylan Simon Louis Thiry Sergey Zagoruyko diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 000000000..ec16e50f3 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,75 @@ +pipeline { + agent none + options { + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '8', daysToKeepStr: '20')) + timeout(time: 1, unit: 'HOURS') + } + stages { + stage('torch') { + agent { + dockerfile { + dir 'tools' + args '--device /dev/nvidia0:/dev/nvidia0 --device /dev/nvidiactl:/dev/nvidiactl --device /dev/nvidia-uvm:/dev/nvidia-uvm' + } + } + environment { + HOME = pwd(tmp:true) + } + steps { + sh 'python3 -m venv $HOME' + sh '''#!/bin/bash -ex + source $HOME/bin/activate + pip3 install -r requirements.txt pytest pytest-cov torchvision + python3 setup.py develop + KYMATIO_BACKEND=$STAGE_NAME pytest --cov=kymatio + bash <(curl -s https://codecov.io/bash) -t 3941b784-370b-4e50-a162-e5018b7c2861 -F jenkins_$STAGE_NAME + ''' + } + } + stage('skcuda') { + agent { + dockerfile { + dir 'tools' + args '--device /dev/nvidia0:/dev/nvidia0 --device /dev/nvidiactl:/dev/nvidiactl --device /dev/nvidia-uvm:/dev/nvidia-uvm' + } + } + environment { + HOME = pwd(tmp:true) + } + steps { + sh 'python3 -m venv $HOME' + sh '''#!/bin/bash -ex + source $HOME/bin/activate + pip3 install -r requirements.txt pytest pytest-cov scikit-cuda cupy + python3 setup.py develop + KYMATIO_BACKEND=$STAGE_NAME pytest --cov=kymatio + bash <(curl -s https://codecov.io/bash) -t 3941b784-370b-4e50-a162-e5018b7c2861 -F jenkins_$STAGE_NAME + ''' + } + } + } + post { + failure { + emailext subject: '$PROJECT_NAME - Build #$BUILD_NUMBER - $BUILD_STATUS', + body: '''$PROJECT_NAME - Build #$BUILD_NUMBER - $BUILD_STATUS + +Check console output at $BUILD_URL to view full results. + +Building $BRANCH_NAME for $CAUSE +$JOB_DESCRIPTION + +Chages: +$CHANGES + +End of build log: +${BUILD_LOG,maxLines=60} +''', + recipientProviders: [ + [$class: 'DevelopersRecipientProvider'], + ], + replyTo: '$DEFAULT_REPLYTO', + to: '[email protected]' + } + } +} diff --git a/setup.py b/setup.py index 4bf31a9ae..cb9110a63 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ # Parse description -with open('README.md') as f: +with open('README.md', encoding='utf8') as f: README = f.read().split('\n') LONG_DESCRIPTION = '\n'.join([x for x in README if not x[:3]=='[![']) diff --git a/tools/Dockerfile b/tools/Dockerfile new file mode 100644 index 000000000..355f602ce --- /dev/null +++ b/tools/Dockerfile @@ -0,0 +1,19 @@ +FROM ubuntu:bionic + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y software-properties-common && \ + add-apt-repository ppa:graphics-drivers/ppa && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y \ + libnvidia-compute-410 \ + nvidia-cuda-toolkit \ + python3-scipy \ + python3-appdirs \ + python3-pytest \ + python3-pytest-cov \ + python3-pip \ + python3-venv \ + curl \ + && \ + apt-get autoremove --purge -y && \ + apt-get autoclean -y && \ + rm -rf /var/cache/apt/* /var/lib/apt/lists/*
apache__tvm-3962
docker/build.sh demo_android -it bash fails https://github.com/dmlc/tvm/blob/9e4f07b4695a8849590cdd46de662e3fa273d59b/docker/Dockerfile.demo_android#L70 Command fails with errors like: ``` CMake Error at cmake/util/FindLLVM.cmake:76 (string): string sub-command STRIP requires two arguments. Call Stack (most recent call first): cmake/modules/LLVM.cmake:22 (find_llvm) CMakeLists.txt:240 (include) ``` And more generally it's hard to follow the tutorial to optimize for Android. - On my Ubuntu 19.04 Java 11 is installed by default with which sources are not compatible - docker/bash.sh tvmai/demo-android fails - building image fails So the only way to run the tutorial is to prepare custom docker image where all needed resources will be avilable
[ { "content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\n\"\"\"\n.. _tutorial-deploy-model-on-android:\n\nDeploy the Pretrained Model on Android\n=======================================\n**Author**: `Tomohiro Kato <https://tkat0.github.io/>`_\n\nThis is an example of using Relay to compile a keras model and deploy it on Android device.\n\"\"\"\n\nimport os\nimport numpy as np\nfrom PIL import Image\nimport keras\nfrom keras.applications.mobilenet_v2 import MobileNetV2\nimport tvm\nimport tvm.relay as relay\nfrom tvm import rpc\nfrom tvm.contrib import util, ndk, graph_runtime as runtime\nfrom tvm.contrib.download import download_testdata\n\n\n######################################################################\n# Setup Environment\n# --------------------\n# Since there are many required packages for Android, it is recommended to use the official Docker Image.\n#\n# First, to build and run Docker Image, we can run the following command.\n#\n# .. code-block:: bash\n#\n# git clone --recursive https://github.com/dmlc/tvm\n# cd tvm\n# docker build -t tvm.demo_android -f docker/Dockerfile.demo_android ./docker\n# docker run --pid=host -h tvm -v $PWD:/workspace \\\n# -w /workspace -p 9190:9190 --name tvm -it tvm.demo_android bash\n#\n# You are now inside the container. The cloned TVM directory is mounted on /workspace.\n# At this time, mount the 9190 port used by RPC described later.\n#\n# .. note::\n#\n# Please execute the following steps in the container.\n# We can execute :code:`docker exec -it tvm bash` to open a new terminal in the container.\n#\n# Next we build the TVM.\n#\n# .. code-block:: bash\n#\n# mkdir build\n# cd build\n# cmake -DUSE_LLVM=llvm-config-6.0 \\\n# -DUSE_RPC=ON \\\n# -DUSE_SORT=ON \\\n# -DUSE_VULKAN=ON \\\n# -DUSE_GRAPH_RUNTIME=ON \\\n# ..\n# make -j10\n#\n# After building TVM successfully, Please set PYTHONPATH.\n#\n# .. code-block:: bash\n#\n# echo 'export PYTHONPATH=/workspace/python:/workspacem/topi/python:/workspace/nnvm/python/:/workspace/vta/python:${PYTHONPATH}' >> ~/.bashrc\n# source ~/.bashrc\n\n#################################################################\n# Start RPC Tracker\n# -----------------\n# TVM uses RPC session to communicate with Android device.\n#\n# To start an RPC tracker, run this command in the container. The tracker is\n# required during the whole tuning process, so we need to open a new terminal for\n# this command:\n#\n# .. code-block:: bash\n#\n# python3 -m tvm.exec.rpc_tracker --host=0.0.0.0 --port=9190\n#\n# The expected output is\n#\n# .. code-block:: bash\n#\n# INFO:RPCTracker:bind to 0.0.0.0:9190\n\n#################################################################\n# Register Android device to RPC Tracker\n# ---------------------------------------\n# Now we can register our Android device to the tracker.\n#\n# Follow this `readme page <https://github.com/dmlc/tvm/tree/master/apps/android_rpc>`_ to\n# install TVM RPC APK on the android device.\n#\n# Here is an example of config.mk. I enabled OpenCL and Vulkan.\n#\n#\n# .. code-block:: bash\n#\n# APP_ABI = arm64-v8a\n#\n# APP_PLATFORM = android-24\n#\n# # whether enable OpenCL during compile\n# USE_OPENCL = 1\n#\n# # whether to enable Vulkan during compile\n# USE_VULKAN = 1\n#\n# ifeq ($(USE_VULKAN), 1)\n# # Statically linking vulkan requires API Level 24 or higher\n# APP_PLATFORM = android-24\n# endif\n#\n# # the additional include headers you want to add, e.g., SDK_PATH/adrenosdk/Development/Inc\n# ADD_C_INCLUDES += /work/adrenosdk-linux-5_0/Development/Inc\n# # downloaded from https://github.com/KhronosGroup/OpenCL-Headers\n# ADD_C_INCLUDES += /usr/local/OpenCL-Headers/\n#\n# # the additional link libs you want to add, e.g., ANDROID_LIB_PATH/libOpenCL.so\n# ADD_LDLIBS = /workspace/pull-from-android-device/libOpenCL.so\n#\n# .. note::\n#\n# At this time, don't forget to `create a standalone toolchain <https://github.com/dmlc/tvm/tree/master/apps/android_rpc#architecture-and-android-standalone-toolchain>`_ .\n#\n# for example\n#\n# .. code-block:: bash\n#\n# /opt/android-sdk-linux/ndk-bundle/build/tools/make-standalone-toolchain.sh \\\n# --platform=android-24 --use-llvm --arch=arm64 --install-dir=/opt/android-toolchain-arm64\n# export TVM_NDK_CC=/opt/android-toolchain-arm64/bin/aarch64-linux-android-g++\n#\n# Next, start the Android application and enter the IP address and port of RPC Tracker.\n# Then you have already registered your device.\n#\n# After registering devices, we can confirm it by querying rpc_tracker\n#\n# .. code-block:: bash\n#\n# python3 -m tvm.exec.query_rpc_tracker --host=0.0.0.0 --port=9190\n#\n# For example, if we have 1 Android device.\n# the output can be\n#\n# .. code-block:: bash\n#\n# Queue Status\n# ----------------------------------\n# key total free pending\n# ----------------------------------\n# android 1 1 0\n# ----------------------------------\n#\n# To confirm that you can communicate with Android, we can run following test script.\n# If you use OpenCL and Vulkan, please set :code:`test_opencl` and :code:`test_vulkan` in the script.\n#\n# .. code-block:: bash\n#\n# export TVM_TRACKER_HOST=0.0.0.0\n# export TVM_TRACKER_PORT=9190\n#\n# .. code-block:: bash\n#\n# cd /workspace/apps/android_rpc\n# python3 tests/android_rpc_test.py\n#\n\n######################################################################\n# Load pretrained keras model\n# ----------------------------\n# We load a pretrained MobileNetV2(alpha=0.5) classification model provided by keras.\nkeras.backend.clear_session() # Destroys the current TF graph and creates a new one.\nweights_url = ''.join(['https://github.com/JonathanCMitchell/',\n 'mobilenet_v2_keras/releases/download/v1.1/',\n 'mobilenet_v2_weights_tf_dim_ordering_tf_kernels_0.5_224.h5'])\nweights_file = 'mobilenet_v2_weights.h5'\nweights_path = download_testdata(weights_url, weights_file, module='keras')\nkeras_mobilenet_v2 = MobileNetV2(alpha=0.5, include_top=True, weights=None,\n input_shape=(224, 224, 3), classes=1000)\nkeras_mobilenet_v2.load_weights(weights_path)\n\n######################################################################\n# In order to test our model, here we download an image of cat and\n# transform its format.\nimg_url = 'https://github.com/dmlc/mxnet.js/blob/master/data/cat.png?raw=true'\nimg_name = 'cat.png'\nimg_path = download_testdata(img_url, img_name, module='data')\nimage = Image.open(img_path).resize((224, 224))\ndtype = 'float32'\n\ndef transform_image(image):\n image = np.array(image) - np.array([123., 117., 104.])\n image /= np.array([58.395, 57.12, 57.375])\n image = image.transpose((2, 0, 1))\n image = image[np.newaxis, :]\n return image\n\nx = transform_image(image)\n\n######################################################################\n# synset is used to transform the label from number of ImageNet class to\n# the word human can understand.\nsynset_url = ''.join(['https://gist.githubusercontent.com/zhreshold/',\n '4d0b62f3d01426887599d4f7ede23ee5/raw/',\n '596b27d23537e5a1b5751d2b0481ef172f58b539/',\n 'imagenet1000_clsid_to_human.txt'])\nsynset_name = 'imagenet1000_clsid_to_human.txt'\nsynset_path = download_testdata(synset_url, synset_name, module='data')\nwith open(synset_path) as f:\n synset = eval(f.read())\n\n\n######################################################################\n# Compile the model with relay\n# ---------------------------------------------\n# If we run the example on our x86 server for demonstration, we can simply\n# set it as :code:`llvm`. If running it on the Android device, we need to\n# specify its instruction set. Set :code:`local_demo` to False if you want\n# to run this tutorial with a real device.\n\nlocal_demo = True\n\n# by default on CPU target will execute.\n# select 'cpu', 'opencl' and 'vulkan'\ntest_target = 'cpu'\n\n# Change target configuration.\n# Run `adb shell cat /proc/cpuinfo` to find the arch.\narch = 'arm64'\ntarget = 'llvm -target=%s-linux-android' % arch\ntarget_host = None\n\nif local_demo:\n target_host = None\n target = 'llvm'\nelif test_target == 'opencl':\n target_host = target\n target = 'opencl'\nelif test_target == 'vulkan':\n target_host = target\n target = 'vulkan'\n\ninput_name = 'input_1'\nshape_dict = {input_name: x.shape}\nmod, params = relay.frontend.from_keras(keras_mobilenet_v2, shape_dict)\n\nwith relay.build_config(opt_level=3):\n graph, lib, params = relay.build(mod, target=target,\n target_host=target_host, params=params)\n\n# After `relay.build`, you will get three return values: graph,\n# library and the new parameter, since we do some optimization that will\n# change the parameters but keep the result of model as the same.\n\n# Save the library at local temporary directory.\ntmp = util.tempdir()\nlib_fname = tmp.relpath('net.so')\nfcompile = ndk.create_shared if not local_demo else None\nlib.export_library(lib_fname, fcompile)\n\n######################################################################\n# Deploy the Model Remotely by RPC\n# ---------------------------------------------\n# With RPC, you can deploy the model remotely from your host machine\n# to the remote android device.\n\ntracker_host = os.environ.get('TVM_TRACKER_HOST', '0.0.0.0')\ntracker_port = int(os.environ.get('TVM_TRACKER_PORT', 9190))\nkey = 'android'\n\nif local_demo:\n remote = rpc.LocalSession()\nelse:\n tracker = rpc.connect_tracker(tracker_host, tracker_port)\n # When running a heavy model, we should increase the `session_timeout`\n remote = tracker.request(key, priority=0,\n session_timeout=60)\n\nif local_demo:\n ctx = remote.cpu(0)\nelif test_target == 'opencl':\n ctx = remote.cl(0)\nelif test_target == 'vulkan':\n ctx = remote.vulkan(0)\nelse:\n ctx = remote.cpu(0)\n\n# upload the library to remote device and load it\nremote.upload(lib_fname)\nrlib = remote.load_module('net.so')\n\n# create the remote runtime module\nmodule = runtime.create(graph, rlib, ctx)\n\n######################################################################\n# Execute on TVM\n# ---------------------------------------------\n\n# set parameter (upload params to the remote device. This may take a while)\nmodule.set_input(**params)\n# set input data\nmodule.set_input(input_name, tvm.nd.array(x.astype(dtype)))\n# run\nmodule.run()\n# get output\nout = module.get_output(0)\n\n# get top1 result\ntop1 = np.argmax(out.asnumpy())\nprint('TVM prediction top-1: {}'.format(synset[top1]))\n\nprint('Evaluate inference time cost...')\nftimer = module.module.time_evaluator('run', ctx, number=1, repeat=10)\nprof_res = np.array(ftimer().results) * 1000 # convert to millisecond\nprint('Mean inference time (std dev): %.2f ms (%.2f ms)' % (np.mean(prof_res),\n np.std(prof_res)))\n\n######################################################################\n# Sample Output\n# ---------------------------------------------\n# The following is the result of 'cpu', 'opencl' and 'vulkan' using Adreno 530 on Snapdragon 820\n#\n# Although we can run on a GPU, it is slower than CPU.\n# To speed up, we need to write and optimize the schedule according to the GPU architecture.\n#\n# .. code-block:: bash\n#\n# # cpu\n# TVM prediction top-1: tiger cat\n# Evaluate inference time cost...\n# Mean inference time (std dev): 37.92 ms (19.67 ms)\n#\n# # opencl\n# TVM prediction top-1: tiger cat\n# Evaluate inference time cost...\n# Mean inference time (std dev): 419.83 ms (7.49 ms)\n#\n# # vulkan\n# TVM prediction top-1: tiger cat\n# Evaluate inference time cost...\n# Mean inference time (std dev): 465.80 ms (4.52 ms)\n", "path": "tutorials/frontend/deploy_model_on_android.py" } ]
[ { "content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\n\"\"\"\n.. _tutorial-deploy-model-on-android:\n\nDeploy the Pretrained Model on Android\n=======================================\n**Author**: `Tomohiro Kato <https://tkat0.github.io/>`_\n\nThis is an example of using Relay to compile a keras model and deploy it on Android device.\n\"\"\"\n\nimport os\nimport numpy as np\nfrom PIL import Image\nimport keras\nfrom keras.applications.mobilenet_v2 import MobileNetV2\nimport tvm\nimport tvm.relay as relay\nfrom tvm import rpc\nfrom tvm.contrib import util, ndk, graph_runtime as runtime\nfrom tvm.contrib.download import download_testdata\n\n\n######################################################################\n# Setup Environment\n# --------------------\n# Since there are many required packages for Android, it is recommended to use the official Docker Image.\n#\n# First, to build and run Docker Image, we can run the following command.\n#\n# .. code-block:: bash\n#\n# git clone --recursive https://github.com/dmlc/tvm\n# cd tvm\n# docker build -t tvm.demo_android -f docker/Dockerfile.demo_android ./docker\n# docker run --pid=host -h tvm -v $PWD:/workspace \\\n# -w /workspace -p 9190:9190 --name tvm -it tvm.demo_android bash\n#\n# You are now inside the container. The cloned TVM directory is mounted on /workspace.\n# At this time, mount the 9190 port used by RPC described later.\n#\n# .. note::\n#\n# Please execute the following steps in the container.\n# We can execute :code:`docker exec -it tvm bash` to open a new terminal in the container.\n#\n# Next we build the TVM.\n#\n# .. code-block:: bash\n#\n# mkdir build\n# cd build\n# cmake -DUSE_LLVM=llvm-config-8 \\\n# -DUSE_RPC=ON \\\n# -DUSE_SORT=ON \\\n# -DUSE_VULKAN=ON \\\n# -DUSE_GRAPH_RUNTIME=ON \\\n# ..\n# make -j10\n#\n# After building TVM successfully, Please set PYTHONPATH.\n#\n# .. code-block:: bash\n#\n# echo 'export PYTHONPATH=/workspace/python:/workspacem/topi/python:/workspace/nnvm/python/:/workspace/vta/python:${PYTHONPATH}' >> ~/.bashrc\n# source ~/.bashrc\n\n#################################################################\n# Start RPC Tracker\n# -----------------\n# TVM uses RPC session to communicate with Android device.\n#\n# To start an RPC tracker, run this command in the container. The tracker is\n# required during the whole tuning process, so we need to open a new terminal for\n# this command:\n#\n# .. code-block:: bash\n#\n# python3 -m tvm.exec.rpc_tracker --host=0.0.0.0 --port=9190\n#\n# The expected output is\n#\n# .. code-block:: bash\n#\n# INFO:RPCTracker:bind to 0.0.0.0:9190\n\n#################################################################\n# Register Android device to RPC Tracker\n# ---------------------------------------\n# Now we can register our Android device to the tracker.\n#\n# Follow this `readme page <https://github.com/dmlc/tvm/tree/master/apps/android_rpc>`_ to\n# install TVM RPC APK on the android device.\n#\n# Here is an example of config.mk. I enabled OpenCL and Vulkan.\n#\n#\n# .. code-block:: bash\n#\n# APP_ABI = arm64-v8a\n#\n# APP_PLATFORM = android-24\n#\n# # whether enable OpenCL during compile\n# USE_OPENCL = 1\n#\n# # whether to enable Vulkan during compile\n# USE_VULKAN = 1\n#\n# ifeq ($(USE_VULKAN), 1)\n# # Statically linking vulkan requires API Level 24 or higher\n# APP_PLATFORM = android-24\n# endif\n#\n# # the additional include headers you want to add, e.g., SDK_PATH/adrenosdk/Development/Inc\n# ADD_C_INCLUDES += /work/adrenosdk-linux-5_0/Development/Inc\n# # downloaded from https://github.com/KhronosGroup/OpenCL-Headers\n# ADD_C_INCLUDES += /usr/local/OpenCL-Headers/\n#\n# # the additional link libs you want to add, e.g., ANDROID_LIB_PATH/libOpenCL.so\n# ADD_LDLIBS = /workspace/pull-from-android-device/libOpenCL.so\n#\n# .. note::\n#\n# At this time, don't forget to `create a standalone toolchain <https://github.com/dmlc/tvm/tree/master/apps/android_rpc#architecture-and-android-standalone-toolchain>`_ .\n#\n# for example\n#\n# .. code-block:: bash\n#\n# /opt/android-sdk-linux/ndk-bundle/build/tools/make-standalone-toolchain.sh \\\n# --platform=android-24 --use-llvm --arch=arm64 --install-dir=/opt/android-toolchain-arm64\n# export TVM_NDK_CC=/opt/android-toolchain-arm64/bin/aarch64-linux-android-g++\n#\n# Next, start the Android application and enter the IP address and port of RPC Tracker.\n# Then you have already registered your device.\n#\n# After registering devices, we can confirm it by querying rpc_tracker\n#\n# .. code-block:: bash\n#\n# python3 -m tvm.exec.query_rpc_tracker --host=0.0.0.0 --port=9190\n#\n# For example, if we have 1 Android device.\n# the output can be\n#\n# .. code-block:: bash\n#\n# Queue Status\n# ----------------------------------\n# key total free pending\n# ----------------------------------\n# android 1 1 0\n# ----------------------------------\n#\n# To confirm that you can communicate with Android, we can run following test script.\n# If you use OpenCL and Vulkan, please set :code:`test_opencl` and :code:`test_vulkan` in the script.\n#\n# .. code-block:: bash\n#\n# export TVM_TRACKER_HOST=0.0.0.0\n# export TVM_TRACKER_PORT=9190\n#\n# .. code-block:: bash\n#\n# cd /workspace/apps/android_rpc\n# python3 tests/android_rpc_test.py\n#\n\n######################################################################\n# Load pretrained keras model\n# ----------------------------\n# We load a pretrained MobileNetV2(alpha=0.5) classification model provided by keras.\nkeras.backend.clear_session() # Destroys the current TF graph and creates a new one.\nweights_url = ''.join(['https://github.com/JonathanCMitchell/',\n 'mobilenet_v2_keras/releases/download/v1.1/',\n 'mobilenet_v2_weights_tf_dim_ordering_tf_kernels_0.5_224.h5'])\nweights_file = 'mobilenet_v2_weights.h5'\nweights_path = download_testdata(weights_url, weights_file, module='keras')\nkeras_mobilenet_v2 = MobileNetV2(alpha=0.5, include_top=True, weights=None,\n input_shape=(224, 224, 3), classes=1000)\nkeras_mobilenet_v2.load_weights(weights_path)\n\n######################################################################\n# In order to test our model, here we download an image of cat and\n# transform its format.\nimg_url = 'https://github.com/dmlc/mxnet.js/blob/master/data/cat.png?raw=true'\nimg_name = 'cat.png'\nimg_path = download_testdata(img_url, img_name, module='data')\nimage = Image.open(img_path).resize((224, 224))\ndtype = 'float32'\n\ndef transform_image(image):\n image = np.array(image) - np.array([123., 117., 104.])\n image /= np.array([58.395, 57.12, 57.375])\n image = image.transpose((2, 0, 1))\n image = image[np.newaxis, :]\n return image\n\nx = transform_image(image)\n\n######################################################################\n# synset is used to transform the label from number of ImageNet class to\n# the word human can understand.\nsynset_url = ''.join(['https://gist.githubusercontent.com/zhreshold/',\n '4d0b62f3d01426887599d4f7ede23ee5/raw/',\n '596b27d23537e5a1b5751d2b0481ef172f58b539/',\n 'imagenet1000_clsid_to_human.txt'])\nsynset_name = 'imagenet1000_clsid_to_human.txt'\nsynset_path = download_testdata(synset_url, synset_name, module='data')\nwith open(synset_path) as f:\n synset = eval(f.read())\n\n\n######################################################################\n# Compile the model with relay\n# ---------------------------------------------\n# If we run the example on our x86 server for demonstration, we can simply\n# set it as :code:`llvm`. If running it on the Android device, we need to\n# specify its instruction set. Set :code:`local_demo` to False if you want\n# to run this tutorial with a real device.\n\nlocal_demo = True\n\n# by default on CPU target will execute.\n# select 'cpu', 'opencl' and 'vulkan'\ntest_target = 'cpu'\n\n# Change target configuration.\n# Run `adb shell cat /proc/cpuinfo` to find the arch.\narch = 'arm64'\ntarget = 'llvm -target=%s-linux-android' % arch\ntarget_host = None\n\nif local_demo:\n target_host = None\n target = 'llvm'\nelif test_target == 'opencl':\n target_host = target\n target = 'opencl'\nelif test_target == 'vulkan':\n target_host = target\n target = 'vulkan'\n\ninput_name = 'input_1'\nshape_dict = {input_name: x.shape}\nmod, params = relay.frontend.from_keras(keras_mobilenet_v2, shape_dict)\n\nwith relay.build_config(opt_level=3):\n graph, lib, params = relay.build(mod, target=target,\n target_host=target_host, params=params)\n\n# After `relay.build`, you will get three return values: graph,\n# library and the new parameter, since we do some optimization that will\n# change the parameters but keep the result of model as the same.\n\n# Save the library at local temporary directory.\ntmp = util.tempdir()\nlib_fname = tmp.relpath('net.so')\nfcompile = ndk.create_shared if not local_demo else None\nlib.export_library(lib_fname, fcompile)\n\n######################################################################\n# Deploy the Model Remotely by RPC\n# ---------------------------------------------\n# With RPC, you can deploy the model remotely from your host machine\n# to the remote android device.\n\ntracker_host = os.environ.get('TVM_TRACKER_HOST', '0.0.0.0')\ntracker_port = int(os.environ.get('TVM_TRACKER_PORT', 9190))\nkey = 'android'\n\nif local_demo:\n remote = rpc.LocalSession()\nelse:\n tracker = rpc.connect_tracker(tracker_host, tracker_port)\n # When running a heavy model, we should increase the `session_timeout`\n remote = tracker.request(key, priority=0,\n session_timeout=60)\n\nif local_demo:\n ctx = remote.cpu(0)\nelif test_target == 'opencl':\n ctx = remote.cl(0)\nelif test_target == 'vulkan':\n ctx = remote.vulkan(0)\nelse:\n ctx = remote.cpu(0)\n\n# upload the library to remote device and load it\nremote.upload(lib_fname)\nrlib = remote.load_module('net.so')\n\n# create the remote runtime module\nmodule = runtime.create(graph, rlib, ctx)\n\n######################################################################\n# Execute on TVM\n# ---------------------------------------------\n\n# set parameter (upload params to the remote device. This may take a while)\nmodule.set_input(**params)\n# set input data\nmodule.set_input(input_name, tvm.nd.array(x.astype(dtype)))\n# run\nmodule.run()\n# get output\nout = module.get_output(0)\n\n# get top1 result\ntop1 = np.argmax(out.asnumpy())\nprint('TVM prediction top-1: {}'.format(synset[top1]))\n\nprint('Evaluate inference time cost...')\nftimer = module.module.time_evaluator('run', ctx, number=1, repeat=10)\nprof_res = np.array(ftimer().results) * 1000 # convert to millisecond\nprint('Mean inference time (std dev): %.2f ms (%.2f ms)' % (np.mean(prof_res),\n np.std(prof_res)))\n\n######################################################################\n# Sample Output\n# ---------------------------------------------\n# The following is the result of 'cpu', 'opencl' and 'vulkan' using Adreno 530 on Snapdragon 820\n#\n# Although we can run on a GPU, it is slower than CPU.\n# To speed up, we need to write and optimize the schedule according to the GPU architecture.\n#\n# .. code-block:: bash\n#\n# # cpu\n# TVM prediction top-1: tiger cat\n# Evaluate inference time cost...\n# Mean inference time (std dev): 37.92 ms (19.67 ms)\n#\n# # opencl\n# TVM prediction top-1: tiger cat\n# Evaluate inference time cost...\n# Mean inference time (std dev): 419.83 ms (7.49 ms)\n#\n# # vulkan\n# TVM prediction top-1: tiger cat\n# Evaluate inference time cost...\n# Mean inference time (std dev): 465.80 ms (4.52 ms)\n", "path": "tutorials/frontend/deploy_model_on_android.py" } ]
diff --git a/CMakeLists.txt b/CMakeLists.txt index 754aa6498156..abf198de1c53 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -101,7 +101,7 @@ else(MSVC) set(CMAKE_C_FLAGS "-O2 -Wall -fPIC ${CMAKE_C_FLAGS}") set(CMAKE_CXX_FLAGS "-O2 -Wall -fPIC ${CMAKE_CXX_FLAGS}") if (HIDE_PRIVATE_SYMBOLS) - message("Hide private symbols...") + message(STATUS "Hide private symbols...") set(CMAKE_C_FLAGS "-fvisibility=hidden ${CMAKE_C_FLAGS}") set(CMAKE_CXX_FLAGS "-fvisibility=hidden ${CMAKE_CXX_FLAGS}") endif(HIDE_PRIVATE_SYMBOLS) diff --git a/cmake/util/FindLLVM.cmake b/cmake/util/FindLLVM.cmake index 1c3b2f0ca0d8..7e759ab20037 100644 --- a/cmake/util/FindLLVM.cmake +++ b/cmake/util/FindLLVM.cmake @@ -49,13 +49,29 @@ macro(find_llvm use_llvm) message(STATUS "Use llvm-config=" ${LLVM_CONFIG}) separate_arguments(LLVM_CONFIG) execute_process(COMMAND ${LLVM_CONFIG} --libfiles + RESULT_VARIABLE __llvm_exit_code OUTPUT_VARIABLE __llvm_libfiles) + if(NOT "${__llvm_exit_code}" STREQUAL "0") + message(FATAL_ERROR "Fatal error executing: ${use_llvm} --libfiles") + endif() execute_process(COMMAND ${LLVM_CONFIG} --system-libs + RESULT_VARIABLE __llvm_exit_code OUTPUT_VARIABLE __llvm_system_libs) + if(NOT "${__llvm_exit_code}" STREQUAL "0") + message(FATAL_ERROR "Fatal error executing: ${use_llvm} --system-libs") + endif() execute_process(COMMAND ${LLVM_CONFIG} --cxxflags + RESULT_VARIABLE __llvm_exit_code OUTPUT_VARIABLE __llvm_cxxflags) + if(NOT "${__llvm_exit_code}" STREQUAL "0") + message(FATAL_ERROR "Fatal error executing: ${use_llvm} --cxxflags") + endif() execute_process(COMMAND ${LLVM_CONFIG} --version + RESULT_VARIABLE __llvm_exit_code OUTPUT_VARIABLE __llvm_version) + if(NOT "${__llvm_exit_code}" STREQUAL "0") + message(FATAL_ERROR "Fatal error executing: ${use_llvm} --version") + endif() # llvm version string(REGEX REPLACE "^([^.]+)\.([^.])+\.[^.]+.*$" "\\1\\2" TVM_LLVM_VERSION ${__llvm_version}) # definitions diff --git a/docker/Dockerfile.demo_android b/docker/Dockerfile.demo_android index d6d9a9b50bd6..6f8720c9eb3e 100644 --- a/docker/Dockerfile.demo_android +++ b/docker/Dockerfile.demo_android @@ -61,7 +61,7 @@ RUN cd /usr && \ mkdir -p build && \ cd build && \ cmake \ - -DUSE_LLVM=llvm-config-6.0 \ + -DUSE_LLVM=llvm-config-8 \ -DUSE_RPC=ON \ -DUSE_SORT=ON \ -DUSE_GRAPH_RUNTIME=ON \ diff --git a/tutorials/frontend/deploy_model_on_android.py b/tutorials/frontend/deploy_model_on_android.py index 72404132c19e..9969d0788ba0 100644 --- a/tutorials/frontend/deploy_model_on_android.py +++ b/tutorials/frontend/deploy_model_on_android.py @@ -66,7 +66,7 @@ # # mkdir build # cd build -# cmake -DUSE_LLVM=llvm-config-6.0 \ +# cmake -DUSE_LLVM=llvm-config-8 \ # -DUSE_RPC=ON \ # -DUSE_SORT=ON \ # -DUSE_VULKAN=ON \
open-telemetry__opentelemetry-python-2307
Rename `ConsoleExporter` to `ConsoleLogExporter`? As suggested by @lonewolf3739, we should rename the ConsoleExporter to ConsoleLogExporter to follow the pattern established by the ConsoleSpanExporter. Not in this PR; Should we rename this to `ConsoleLogExporter`? _Originally posted by @lonewolf3739 in https://github.com/open-telemetry/opentelemetry-python/pull/2253#r759589860_
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport abc\nimport collections\nimport enum\nimport logging\nimport os\nimport sys\nimport threading\nfrom os import linesep\nfrom typing import IO, Callable, Deque, List, Optional, Sequence\n\nfrom opentelemetry.context import attach, detach, set_value\nfrom opentelemetry.sdk._logs import LogData, LogProcessor, LogRecord\nfrom opentelemetry.util._time import _time_ns\n\n_logger = logging.getLogger(__name__)\n\n\nclass LogExportResult(enum.Enum):\n SUCCESS = 0\n FAILURE = 1\n\n\nclass LogExporter(abc.ABC):\n \"\"\"Interface for exporting logs.\n\n Interface to be implemented by services that want to export logs received\n in their own format.\n\n To export data this MUST be registered to the :class`opentelemetry.sdk._logs.LogEmitter` using a\n log processor.\n \"\"\"\n\n @abc.abstractmethod\n def export(self, batch: Sequence[LogData]):\n \"\"\"Exports a batch of logs.\n\n Args:\n batch: The list of `LogData` objects to be exported\n\n Returns:\n The result of the export\n \"\"\"\n\n @abc.abstractmethod\n def shutdown(self):\n \"\"\"Shuts down the exporter.\n\n Called when the SDK is shut down.\n \"\"\"\n\n\nclass ConsoleExporter(LogExporter):\n \"\"\"Implementation of :class:`LogExporter` that prints log records to the\n console.\n\n This class can be used for diagnostic purposes. It prints the exported\n log records to the console STDOUT.\n \"\"\"\n\n def __init__(\n self,\n out: IO = sys.stdout,\n formatter: Callable[[LogRecord], str] = lambda record: record.to_json()\n + linesep,\n ):\n self.out = out\n self.formatter = formatter\n\n def export(self, batch: Sequence[LogData]):\n for data in batch:\n self.out.write(self.formatter(data.log_record))\n self.out.flush()\n return LogExportResult.SUCCESS\n\n def shutdown(self):\n pass\n\n\nclass SimpleLogProcessor(LogProcessor):\n \"\"\"This is an implementation of LogProcessor which passes\n received logs in the export-friendly LogData representation to the\n configured LogExporter, as soon as they are emitted.\n \"\"\"\n\n def __init__(self, exporter: LogExporter):\n self._exporter = exporter\n self._shutdown = False\n\n def emit(self, log_data: LogData):\n if self._shutdown:\n _logger.warning(\"Processor is already shutdown, ignoring call\")\n return\n token = attach(set_value(\"suppress_instrumentation\", True))\n try:\n self._exporter.export((log_data,))\n except Exception: # pylint: disable=broad-except\n _logger.exception(\"Exception while exporting logs.\")\n detach(token)\n\n def shutdown(self):\n self._shutdown = True\n self._exporter.shutdown()\n\n def force_flush(\n self, timeout_millis: int = 30000\n ) -> bool: # pylint: disable=no-self-use\n return True\n\n\nclass _FlushRequest:\n __slots__ = [\"event\", \"num_log_records\"]\n\n def __init__(self):\n self.event = threading.Event()\n self.num_log_records = 0\n\n\nclass BatchLogProcessor(LogProcessor):\n \"\"\"This is an implementation of LogProcessor which creates batches of\n received logs in the export-friendly LogData representation and\n send to the configured LogExporter, as soon as they are emitted.\n \"\"\"\n\n def __init__(\n self,\n exporter: LogExporter,\n schedule_delay_millis: int = 5000,\n max_export_batch_size: int = 512,\n export_timeout_millis: int = 30000,\n ):\n self._exporter = exporter\n self._schedule_delay_millis = schedule_delay_millis\n self._max_export_batch_size = max_export_batch_size\n self._export_timeout_millis = export_timeout_millis\n self._queue = collections.deque() # type: Deque[LogData]\n self._worker_thread = threading.Thread(target=self.worker, daemon=True)\n self._condition = threading.Condition(threading.Lock())\n self._shutdown = False\n self._flush_request = None # type: Optional[_FlushRequest]\n self._log_records = [\n None\n ] * self._max_export_batch_size # type: List[Optional[LogData]]\n self._worker_thread.start()\n # Only available in *nix since py37.\n if hasattr(os, \"register_at_fork\"):\n os.register_at_fork(\n after_in_child=self._at_fork_reinit\n ) # pylint: disable=protected-access\n\n def _at_fork_reinit(self):\n self._condition = threading.Condition(threading.Lock())\n self._queue.clear()\n self._worker_thread = threading.Thread(target=self.worker, daemon=True)\n self._worker_thread.start()\n\n def worker(self):\n timeout = self._schedule_delay_millis / 1e3\n flush_request = None # type: Optional[_FlushRequest]\n while not self._shutdown:\n with self._condition:\n if self._shutdown:\n # shutdown may have been called, avoid further processing\n break\n flush_request = self._get_and_unset_flush_request()\n if (\n len(self._queue) < self._max_export_batch_size\n and self._flush_request is None\n ):\n self._condition.wait(timeout)\n\n flush_request = self._get_and_unset_flush_request()\n if not self._queue:\n timeout = self._schedule_delay_millis / 1e3\n self._notify_flush_request_finished(flush_request)\n flush_request = None\n continue\n if self._shutdown:\n break\n\n start_ns = _time_ns()\n self._export(flush_request)\n end_ns = _time_ns()\n # subtract the duration of this export call to the next timeout\n timeout = self._schedule_delay_millis / 1e3 - (\n (end_ns - start_ns) / 1e9\n )\n\n self._notify_flush_request_finished(flush_request)\n flush_request = None\n\n # there might have been a new flush request while export was running\n # and before the done flag switched to true\n with self._condition:\n shutdown_flush_request = self._get_and_unset_flush_request()\n\n # flush the remaining logs\n self._drain_queue()\n self._notify_flush_request_finished(flush_request)\n self._notify_flush_request_finished(shutdown_flush_request)\n\n def _export(self, flush_request: Optional[_FlushRequest] = None):\n \"\"\"Exports logs considering the given flush_request.\n\n If flush_request is not None then logs are exported in batches\n until the number of exported logs reached or exceeded the num of logs in\n flush_request, otherwise exports at max max_export_batch_size logs.\n \"\"\"\n if flush_request is None:\n self._export_batch()\n return\n\n num_log_records = flush_request.num_log_records\n while self._queue:\n exported = self._export_batch()\n num_log_records -= exported\n\n if num_log_records <= 0:\n break\n\n def _export_batch(self) -> int:\n \"\"\"Exports at most max_export_batch_size logs and returns the number of\n exported logs.\n \"\"\"\n idx = 0\n while idx < self._max_export_batch_size and self._queue:\n record = self._queue.pop()\n self._log_records[idx] = record\n idx += 1\n token = attach(set_value(\"suppress_instrumentation\", True))\n try:\n self._exporter.export(self._log_records[:idx]) # type: ignore\n except Exception: # pylint: disable=broad-except\n _logger.exception(\"Exception while exporting logs.\")\n detach(token)\n\n for index in range(idx):\n self._log_records[index] = None\n return idx\n\n def _drain_queue(self):\n \"\"\"Export all elements until queue is empty.\n\n Can only be called from the worker thread context because it invokes\n `export` that is not thread safe.\n \"\"\"\n while self._queue:\n self._export_batch()\n\n def _get_and_unset_flush_request(self) -> Optional[_FlushRequest]:\n flush_request = self._flush_request\n self._flush_request = None\n if flush_request is not None:\n flush_request.num_log_records = len(self._queue)\n return flush_request\n\n @staticmethod\n def _notify_flush_request_finished(\n flush_request: Optional[_FlushRequest] = None,\n ):\n if flush_request is not None:\n flush_request.event.set()\n\n def _get_or_create_flush_request(self) -> _FlushRequest:\n if self._flush_request is None:\n self._flush_request = _FlushRequest()\n return self._flush_request\n\n def emit(self, log_data: LogData) -> None:\n \"\"\"Adds the `LogData` to queue and notifies the waiting threads\n when size of queue reaches max_export_batch_size.\n \"\"\"\n if self._shutdown:\n return\n self._queue.appendleft(log_data)\n if len(self._queue) >= self._max_export_batch_size:\n with self._condition:\n self._condition.notify()\n\n def shutdown(self):\n self._shutdown = True\n with self._condition:\n self._condition.notify_all()\n self._worker_thread.join()\n self._exporter.shutdown()\n\n def force_flush(self, timeout_millis: Optional[int] = None) -> bool:\n if timeout_millis is None:\n timeout_millis = self._export_timeout_millis\n if self._shutdown:\n return True\n\n with self._condition:\n flush_request = self._get_or_create_flush_request()\n self._condition.notify_all()\n\n ret = flush_request.event.wait(timeout_millis / 1e3)\n if not ret:\n _logger.warning(\"Timeout was exceeded in force_flush().\")\n return ret\n", "path": "opentelemetry-sdk/src/opentelemetry/sdk/_logs/export/__init__.py" } ]
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport abc\nimport collections\nimport enum\nimport logging\nimport os\nimport sys\nimport threading\nfrom os import linesep\nfrom typing import IO, Callable, Deque, List, Optional, Sequence\n\nfrom opentelemetry.context import attach, detach, set_value\nfrom opentelemetry.sdk._logs import LogData, LogProcessor, LogRecord\nfrom opentelemetry.util._time import _time_ns\n\n_logger = logging.getLogger(__name__)\n\n\nclass LogExportResult(enum.Enum):\n SUCCESS = 0\n FAILURE = 1\n\n\nclass LogExporter(abc.ABC):\n \"\"\"Interface for exporting logs.\n\n Interface to be implemented by services that want to export logs received\n in their own format.\n\n To export data this MUST be registered to the :class`opentelemetry.sdk._logs.LogEmitter` using a\n log processor.\n \"\"\"\n\n @abc.abstractmethod\n def export(self, batch: Sequence[LogData]):\n \"\"\"Exports a batch of logs.\n\n Args:\n batch: The list of `LogData` objects to be exported\n\n Returns:\n The result of the export\n \"\"\"\n\n @abc.abstractmethod\n def shutdown(self):\n \"\"\"Shuts down the exporter.\n\n Called when the SDK is shut down.\n \"\"\"\n\n\nclass ConsoleLogExporter(LogExporter):\n \"\"\"Implementation of :class:`LogExporter` that prints log records to the\n console.\n\n This class can be used for diagnostic purposes. It prints the exported\n log records to the console STDOUT.\n \"\"\"\n\n def __init__(\n self,\n out: IO = sys.stdout,\n formatter: Callable[[LogRecord], str] = lambda record: record.to_json()\n + linesep,\n ):\n self.out = out\n self.formatter = formatter\n\n def export(self, batch: Sequence[LogData]):\n for data in batch:\n self.out.write(self.formatter(data.log_record))\n self.out.flush()\n return LogExportResult.SUCCESS\n\n def shutdown(self):\n pass\n\n\nclass SimpleLogProcessor(LogProcessor):\n \"\"\"This is an implementation of LogProcessor which passes\n received logs in the export-friendly LogData representation to the\n configured LogExporter, as soon as they are emitted.\n \"\"\"\n\n def __init__(self, exporter: LogExporter):\n self._exporter = exporter\n self._shutdown = False\n\n def emit(self, log_data: LogData):\n if self._shutdown:\n _logger.warning(\"Processor is already shutdown, ignoring call\")\n return\n token = attach(set_value(\"suppress_instrumentation\", True))\n try:\n self._exporter.export((log_data,))\n except Exception: # pylint: disable=broad-except\n _logger.exception(\"Exception while exporting logs.\")\n detach(token)\n\n def shutdown(self):\n self._shutdown = True\n self._exporter.shutdown()\n\n def force_flush(\n self, timeout_millis: int = 30000\n ) -> bool: # pylint: disable=no-self-use\n return True\n\n\nclass _FlushRequest:\n __slots__ = [\"event\", \"num_log_records\"]\n\n def __init__(self):\n self.event = threading.Event()\n self.num_log_records = 0\n\n\nclass BatchLogProcessor(LogProcessor):\n \"\"\"This is an implementation of LogProcessor which creates batches of\n received logs in the export-friendly LogData representation and\n send to the configured LogExporter, as soon as they are emitted.\n \"\"\"\n\n def __init__(\n self,\n exporter: LogExporter,\n schedule_delay_millis: int = 5000,\n max_export_batch_size: int = 512,\n export_timeout_millis: int = 30000,\n ):\n self._exporter = exporter\n self._schedule_delay_millis = schedule_delay_millis\n self._max_export_batch_size = max_export_batch_size\n self._export_timeout_millis = export_timeout_millis\n self._queue = collections.deque() # type: Deque[LogData]\n self._worker_thread = threading.Thread(target=self.worker, daemon=True)\n self._condition = threading.Condition(threading.Lock())\n self._shutdown = False\n self._flush_request = None # type: Optional[_FlushRequest]\n self._log_records = [\n None\n ] * self._max_export_batch_size # type: List[Optional[LogData]]\n self._worker_thread.start()\n # Only available in *nix since py37.\n if hasattr(os, \"register_at_fork\"):\n os.register_at_fork(\n after_in_child=self._at_fork_reinit\n ) # pylint: disable=protected-access\n\n def _at_fork_reinit(self):\n self._condition = threading.Condition(threading.Lock())\n self._queue.clear()\n self._worker_thread = threading.Thread(target=self.worker, daemon=True)\n self._worker_thread.start()\n\n def worker(self):\n timeout = self._schedule_delay_millis / 1e3\n flush_request = None # type: Optional[_FlushRequest]\n while not self._shutdown:\n with self._condition:\n if self._shutdown:\n # shutdown may have been called, avoid further processing\n break\n flush_request = self._get_and_unset_flush_request()\n if (\n len(self._queue) < self._max_export_batch_size\n and self._flush_request is None\n ):\n self._condition.wait(timeout)\n\n flush_request = self._get_and_unset_flush_request()\n if not self._queue:\n timeout = self._schedule_delay_millis / 1e3\n self._notify_flush_request_finished(flush_request)\n flush_request = None\n continue\n if self._shutdown:\n break\n\n start_ns = _time_ns()\n self._export(flush_request)\n end_ns = _time_ns()\n # subtract the duration of this export call to the next timeout\n timeout = self._schedule_delay_millis / 1e3 - (\n (end_ns - start_ns) / 1e9\n )\n\n self._notify_flush_request_finished(flush_request)\n flush_request = None\n\n # there might have been a new flush request while export was running\n # and before the done flag switched to true\n with self._condition:\n shutdown_flush_request = self._get_and_unset_flush_request()\n\n # flush the remaining logs\n self._drain_queue()\n self._notify_flush_request_finished(flush_request)\n self._notify_flush_request_finished(shutdown_flush_request)\n\n def _export(self, flush_request: Optional[_FlushRequest] = None):\n \"\"\"Exports logs considering the given flush_request.\n\n If flush_request is not None then logs are exported in batches\n until the number of exported logs reached or exceeded the num of logs in\n flush_request, otherwise exports at max max_export_batch_size logs.\n \"\"\"\n if flush_request is None:\n self._export_batch()\n return\n\n num_log_records = flush_request.num_log_records\n while self._queue:\n exported = self._export_batch()\n num_log_records -= exported\n\n if num_log_records <= 0:\n break\n\n def _export_batch(self) -> int:\n \"\"\"Exports at most max_export_batch_size logs and returns the number of\n exported logs.\n \"\"\"\n idx = 0\n while idx < self._max_export_batch_size and self._queue:\n record = self._queue.pop()\n self._log_records[idx] = record\n idx += 1\n token = attach(set_value(\"suppress_instrumentation\", True))\n try:\n self._exporter.export(self._log_records[:idx]) # type: ignore\n except Exception: # pylint: disable=broad-except\n _logger.exception(\"Exception while exporting logs.\")\n detach(token)\n\n for index in range(idx):\n self._log_records[index] = None\n return idx\n\n def _drain_queue(self):\n \"\"\"Export all elements until queue is empty.\n\n Can only be called from the worker thread context because it invokes\n `export` that is not thread safe.\n \"\"\"\n while self._queue:\n self._export_batch()\n\n def _get_and_unset_flush_request(self) -> Optional[_FlushRequest]:\n flush_request = self._flush_request\n self._flush_request = None\n if flush_request is not None:\n flush_request.num_log_records = len(self._queue)\n return flush_request\n\n @staticmethod\n def _notify_flush_request_finished(\n flush_request: Optional[_FlushRequest] = None,\n ):\n if flush_request is not None:\n flush_request.event.set()\n\n def _get_or_create_flush_request(self) -> _FlushRequest:\n if self._flush_request is None:\n self._flush_request = _FlushRequest()\n return self._flush_request\n\n def emit(self, log_data: LogData) -> None:\n \"\"\"Adds the `LogData` to queue and notifies the waiting threads\n when size of queue reaches max_export_batch_size.\n \"\"\"\n if self._shutdown:\n return\n self._queue.appendleft(log_data)\n if len(self._queue) >= self._max_export_batch_size:\n with self._condition:\n self._condition.notify()\n\n def shutdown(self):\n self._shutdown = True\n with self._condition:\n self._condition.notify_all()\n self._worker_thread.join()\n self._exporter.shutdown()\n\n def force_flush(self, timeout_millis: Optional[int] = None) -> bool:\n if timeout_millis is None:\n timeout_millis = self._export_timeout_millis\n if self._shutdown:\n return True\n\n with self._condition:\n flush_request = self._get_or_create_flush_request()\n self._condition.notify_all()\n\n ret = flush_request.event.wait(timeout_millis / 1e3)\n if not ret:\n _logger.warning(\"Timeout was exceeded in force_flush().\")\n return ret\n", "path": "opentelemetry-sdk/src/opentelemetry/sdk/_logs/export/__init__.py" } ]
diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f0cf21c606..cd154b753cb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ([#2303](https://github.com/open-telemetry/opentelemetry-python/pull/2303)) - Adding entrypoints for log emitter provider and console, otlp log exporters ([#2253](https://github.com/open-telemetry/opentelemetry-python/pull/2253)) +- Rename ConsoleExporter to ConsoleLogExporter + ([#2307](https://github.com/open-telemetry/opentelemetry-python/pull/2307)) ## [1.7.1-0.26b1](https://github.com/open-telemetry/opentelemetry-python/releases/tag/v1.7.0-0.26b0) - 2021-11-11 diff --git a/opentelemetry-sdk/setup.cfg b/opentelemetry-sdk/setup.cfg index a8025965004..e78448dd820 100644 --- a/opentelemetry-sdk/setup.cfg +++ b/opentelemetry-sdk/setup.cfg @@ -57,7 +57,7 @@ opentelemetry_traces_exporter = opentelemetry_log_emitter_provider = sdk_log_emitter_provider = opentelemetry.sdk._logs:LogEmitterProvider opentelemetry_logs_exporter = - console = opentelemetry.sdk._logs.export:ConsoleExporter + console = opentelemetry.sdk._logs.export:ConsoleLogExporter opentelemetry_id_generator = random = opentelemetry.sdk.trace.id_generator:RandomIdGenerator opentelemetry_environment_variables = diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/export/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/export/__init__.py index c705c2b2497..87ac308317d 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/export/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/export/__init__.py @@ -63,7 +63,7 @@ def shutdown(self): """ -class ConsoleExporter(LogExporter): +class ConsoleLogExporter(LogExporter): """Implementation of :class:`LogExporter` that prints log records to the console. diff --git a/opentelemetry-sdk/tests/logs/test_export.py b/opentelemetry-sdk/tests/logs/test_export.py index 45b83358f93..502c68ed759 100644 --- a/opentelemetry-sdk/tests/logs/test_export.py +++ b/opentelemetry-sdk/tests/logs/test_export.py @@ -31,7 +31,7 @@ ) from opentelemetry.sdk._logs.export import ( BatchLogProcessor, - ConsoleExporter, + ConsoleLogExporter, SimpleLogProcessor, ) from opentelemetry.sdk._logs.export.in_memory_log_exporter import ( @@ -321,7 +321,7 @@ def _target(): log_processor.shutdown() -class TestConsoleExporter(unittest.TestCase): +class TestConsoleLogExporter(unittest.TestCase): def test_export(self): # pylint: disable=no-self-use """Check that the console exporter prints log records.""" log_data = LogData( @@ -341,7 +341,7 @@ def test_export(self): # pylint: disable=no-self-use "first_name", "first_version" ), ) - exporter = ConsoleExporter() + exporter = ConsoleLogExporter() # Mocking stdout interferes with debugging and test reporting, mock on # the exporter instance instead. @@ -362,7 +362,7 @@ def formatter(record): # pylint: disable=unused-argument return mock_record_str mock_stdout = Mock() - exporter = ConsoleExporter(out=mock_stdout, formatter=formatter) + exporter = ConsoleLogExporter(out=mock_stdout, formatter=formatter) log_data = LogData( log_record=LogRecord(), instrumentation_info=InstrumentationInfo( diff --git a/opentelemetry-sdk/tests/test_configurator.py b/opentelemetry-sdk/tests/test_configurator.py index 8a4aadd4790..ca755544b76 100644 --- a/opentelemetry-sdk/tests/test_configurator.py +++ b/opentelemetry-sdk/tests/test_configurator.py @@ -28,7 +28,7 @@ _import_id_generator, _init_tracing, ) -from opentelemetry.sdk._logs.export import ConsoleExporter +from opentelemetry.sdk._logs.export import ConsoleLogExporter from opentelemetry.sdk.resources import SERVICE_NAME, Resource from opentelemetry.sdk.trace.export import ConsoleSpanExporter from opentelemetry.sdk.trace.id_generator import IdGenerator, RandomIdGenerator @@ -193,5 +193,5 @@ def test_console_exporters(self): trace_exporters["console"].__class__, ConsoleSpanExporter.__class__ ) self.assertEqual( - logs_exporters["console"].__class__, ConsoleExporter.__class__ + logs_exporters["console"].__class__, ConsoleLogExporter.__class__ )
scikit-hep__pyhf-1220
pytest v6.2.0 causing test_optim_with_value to fail # Description `v0.5.4` `bump2version` changes were swept into `master` 2020-12-12 with f824afe and the CI on `master` succeeded. Later that day [`pytest` `v6.2.0`](https://github.com/pytest-dev/pytest/releases/tag/6.2.0) was released and the nightly scheduled CI failed on ```pytb _______________________ test_optim_with_value[jax-mu=1] ________________________ backend = (<pyhf.tensor.jax_backend.jax_backend object at 0x7f6bf92def50>, None) source = {'bindata': {'bkg': [100.0, 150.0], 'bkgsys_dn': [98, 100], 'bkgsys_up': [102, 190], 'data': [120.0, 180.0], ...}, 'binning': [2, -0.5, 1.5]} spec = {'channels': [{'name': 'singlechannel', 'samples': [{'data': [30.0, 95.0], 'modifiers': [{...}], 'name': 'signal'}, {'data': [100.0, 150.0], 'modifiers': [{...}], 'name': 'background'}]}]} mu = 1.0 @pytest.mark.parametrize('mu', [1.0], ids=['mu=1']) def test_optim_with_value(backend, source, spec, mu): pdf = pyhf.Model(spec) data = source['bindata']['data'] + pdf.config.auxdata init_pars = pdf.config.suggested_init() par_bounds = pdf.config.suggested_bounds() optim = pyhf.optimizer result = optim.minimize(pyhf.infer.mle.twice_nll, data, pdf, init_pars, par_bounds) assert pyhf.tensorlib.tolist(result) result, fitted_val = optim.minimize( pyhf.infer.mle.twice_nll, data, pdf, init_pars, par_bounds, fixed_vals=[(pdf.config.poi_index, mu)], return_fitted_val=True, ) assert pyhf.tensorlib.tolist(result) assert pyhf.tensorlib.shape(fitted_val) == () > assert pytest.approx(17.52954975, rel=1e-5) == fitted_val E assert 17.52954975 ± 1.8e-04 == DeviceArray(17.52954975, dtype=float64) E + where 17.52954975 ± 1.8e-04 = <function approx at 0x7f6cc1747f80>(17.52954975, rel=1e-05) E + where <function approx at 0x7f6cc1747f80> = pytest.approx tests/test_optim.py:383: AssertionError ``` Diffing the installed libraries between the two (in [f824afe_install.txt](https://github.com/scikit-hep/pyhf/files/5684241/f824afe_install.txt) and [failing_install.txt](https://github.com/scikit-hep/pyhf/files/5684242/failing_install.txt)) shows that the relevant change is `pytest` ``` $ diff f824afe_install.txt failing_install.txt 33a34 > importlib-metadata 3.1.1 83c84 < py 1.9.0 --- > py 1.10.0 96c97 < pytest 6.1.2 --- > pytest 6.2.0 143a145 > zipp 3.4.0 ``` This is confirmed as if ```diff --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ + extras_require['contrib'] + extras_require['shellcomplete'] + [ - 'pytest~=6.0', + 'pytest~=6.1.0', 'pytest-cov>=2.5.1', 'pytest-mock', 'pytest-benchmark[histogram]', ``` the [CI installs `v6.1.2` and passes](https://github.com/scikit-hep/pyhf/actions/runs/418404132). This behavior is confusing as the only mention of `pytest.approx`in the [`v6.2.0` release notes](https://github.com/pytest-dev/pytest/releases/tag/6.2.0) is under "Improvements" > 7710: Use strict equality comparison for non-numeric types in pytest.approx instead of raising TypeError. > > This was the undocumented behavior before 3.7, but is now officially a supported feature.
[ { "content": "from setuptools import setup\n\nextras_require = {\n 'shellcomplete': ['click_completion'],\n 'tensorflow': [\n 'tensorflow~=2.2.0', # TensorFlow minor releases are as volatile as major\n 'tensorflow-probability~=0.10.0',\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.2.4', 'jaxlib~=0.1.56'],\n 'xmlio': ['uproot3~=3.14'], # Future proof against uproot4 API changes\n 'minuit': ['iminuit~=1.5.3'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted({'matplotlib', 'requests'})\nextras_require['lint'] = sorted({'flake8', 'black'})\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + extras_require['shellcomplete']\n + [\n 'pytest~=6.0',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n {\n 'sphinx>=3.1.2',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n }\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + [\n 'nbdime',\n 'bump2version',\n 'ipython',\n 'pre-commit',\n 'check-manifest',\n 'codemetapy>=0.3.4',\n 'twine',\n ]\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py" } ]
[ { "content": "from setuptools import setup\n\nextras_require = {\n 'shellcomplete': ['click_completion'],\n 'tensorflow': [\n 'tensorflow~=2.2.0', # TensorFlow minor releases are as volatile as major\n 'tensorflow-probability~=0.10.0',\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.2.4', 'jaxlib~=0.1.56'],\n 'xmlio': ['uproot3~=3.14'], # Future proof against uproot4 API changes\n 'minuit': ['iminuit~=1.5.3'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted({'matplotlib', 'requests'})\nextras_require['lint'] = sorted({'flake8', 'black'})\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + extras_require['shellcomplete']\n + [\n 'pytest~=6.0',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n {\n 'sphinx>=3.1.2',\n 'sphinxcontrib-bibtex~=1.0',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n }\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + [\n 'nbdime',\n 'bump2version',\n 'ipython',\n 'pre-commit',\n 'check-manifest',\n 'codemetapy>=0.3.4',\n 'twine',\n ]\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py" } ]
diff --git a/.github/workflows/dependencies-head.yml b/.github/workflows/dependencies-head.yml index 174c1f382f..77b6f18142 100644 --- a/.github/workflows/dependencies-head.yml +++ b/.github/workflows/dependencies-head.yml @@ -57,7 +57,7 @@ jobs: run: | python -m pytest -r sx --ignore tests/benchmarks/ --ignore tests/contrib --ignore tests/test_notebooks.py - uproot: + uproot3: runs-on: ${{ matrix.os }} strategy: @@ -75,7 +75,31 @@ jobs: run: | python -m pip install --upgrade pip setuptools wheel python -m pip install --ignore-installed --upgrade -q --no-cache-dir -e .[test] - python -m pip install --upgrade --no-cache-dir git+git://github.com/scikit-hep/uproot.git + python -m pip install --upgrade --no-cache-dir git+git://github.com/scikit-hep/uproot3.git + python -m pip list + - name: Test with pytest + run: | + python -m pytest -r sx --ignore tests/benchmarks/ --ignore tests/contrib --ignore tests/test_notebooks.py + + pytest: + + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + python-version: [3.8] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools wheel + python -m pip install --ignore-installed --upgrade -q --no-cache-dir -e .[test] + python -m pip install --upgrade --no-cache-dir git+git://github.com/pytest-dev/pytest.git python -m pip list - name: Test with pytest run: | diff --git a/setup.py b/setup.py index 7a35d1d706..3baab9d7ff 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ extras_require['docs'] = sorted( { 'sphinx>=3.1.2', - 'sphinxcontrib-bibtex', + 'sphinxcontrib-bibtex~=1.0', 'sphinx-click', 'sphinx_rtd_theme', 'nbsphinx', diff --git a/tests/test_optim.py b/tests/test_optim.py index 5303f5b73a..f7f84517ec 100644 --- a/tests/test_optim.py +++ b/tests/test_optim.py @@ -380,7 +380,7 @@ def test_optim_with_value(backend, source, spec, mu): ) assert pyhf.tensorlib.tolist(result) assert pyhf.tensorlib.shape(fitted_val) == () - assert pytest.approx(17.52954975, rel=1e-5) == fitted_val + assert pytest.approx(17.52954975, rel=1e-5) == pyhf.tensorlib.tolist(fitted_val) @pytest.mark.parametrize('mu', [1.0], ids=['mu=1'])
ray-project__ray-5287
[Tune] The logdir string of Trial is always truncated For now, the logdir string of a trial is created by `Trial.create_logdir`: https://github.com/ray-project/ray/blob/6f737e6a500dc9f500d4cf7ba7b31f979922a18b/python/ray/tune/trial.py#L373-L389 The `identifier` is always be truncated to a length of `MAX_LEN_IDENTIFIER`. This should be configurable since the max length of file names could be 256 in some systems. @richardliaw
[ { "content": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom collections import namedtuple\nimport ray.cloudpickle as cloudpickle\nimport copy\nfrom datetime import datetime\nimport logging\nimport json\nimport uuid\nimport time\nimport tempfile\nimport os\nfrom numbers import Number\n\n# For compatibility under py2 to consider unicode as str\nfrom six import string_types\n\nimport ray\nfrom ray.tune import TuneError\nfrom ray.tune.logger import pretty_print, UnifiedLogger\n# NOTE(rkn): We import ray.tune.registry here instead of importing the names we\n# need because there are cyclic imports that may cause specific names to not\n# have been defined yet. See https://github.com/ray-project/ray/issues/1716.\nimport ray.tune.registry\nfrom ray.tune.result import (DEFAULT_RESULTS_DIR, DONE, HOSTNAME, PID,\n TIME_TOTAL_S, TRAINING_ITERATION, TIMESTEPS_TOTAL,\n EPISODE_REWARD_MEAN, MEAN_LOSS, MEAN_ACCURACY)\nfrom ray.utils import binary_to_hex, hex_to_binary\n\nDEBUG_PRINT_INTERVAL = 5\nMAX_LEN_IDENTIFIER = 130\nlogger = logging.getLogger(__name__)\n\n\ndef date_str():\n return datetime.today().strftime(\"%Y-%m-%d_%H-%M-%S\")\n\n\nclass Resources(\n namedtuple(\"Resources\", [\n \"cpu\", \"gpu\", \"extra_cpu\", \"extra_gpu\", \"custom_resources\",\n \"extra_custom_resources\"\n ])):\n \"\"\"Ray resources required to schedule a trial.\n\n Attributes:\n cpu (float): Number of CPUs to allocate to the trial.\n gpu (float): Number of GPUs to allocate to the trial.\n extra_cpu (float): Extra CPUs to reserve in case the trial needs to\n launch additional Ray actors that use CPUs.\n extra_gpu (float): Extra GPUs to reserve in case the trial needs to\n launch additional Ray actors that use GPUs.\n custom_resources (dict): Mapping of resource to quantity to allocate\n to the trial.\n extra_custom_resources (dict): Extra custom resources to reserve in\n case the trial needs to launch additional Ray actors that use\n any of these custom resources.\n\n \"\"\"\n\n __slots__ = ()\n\n def __new__(cls,\n cpu,\n gpu,\n extra_cpu=0,\n extra_gpu=0,\n custom_resources=None,\n extra_custom_resources=None):\n custom_resources = custom_resources or {}\n extra_custom_resources = extra_custom_resources or {}\n leftovers = set(custom_resources) ^ set(extra_custom_resources)\n\n for value in leftovers:\n custom_resources.setdefault(value, 0)\n extra_custom_resources.setdefault(value, 0)\n\n all_values = [cpu, gpu, extra_cpu, extra_gpu]\n all_values += list(custom_resources.values())\n all_values += list(extra_custom_resources.values())\n assert len(custom_resources) == len(extra_custom_resources)\n for entry in all_values:\n assert isinstance(entry, Number), \"Improper resource value.\"\n return super(Resources,\n cls).__new__(cls, cpu, gpu, extra_cpu, extra_gpu,\n custom_resources, extra_custom_resources)\n\n def summary_string(self):\n summary = \"{} CPUs, {} GPUs\".format(self.cpu + self.extra_cpu,\n self.gpu + self.extra_gpu)\n custom_summary = \", \".join([\n \"{} {}\".format(self.get_res_total(res), res)\n for res in self.custom_resources\n ])\n if custom_summary:\n summary += \" ({})\".format(custom_summary)\n return summary\n\n def cpu_total(self):\n return self.cpu + self.extra_cpu\n\n def gpu_total(self):\n return self.gpu + self.extra_gpu\n\n def get_res_total(self, key):\n return self.custom_resources.get(\n key, 0) + self.extra_custom_resources.get(key, 0)\n\n def get(self, key):\n return self.custom_resources.get(key, 0)\n\n def is_nonnegative(self):\n all_values = [self.cpu, self.gpu, self.extra_cpu, self.extra_gpu]\n all_values += list(self.custom_resources.values())\n all_values += list(self.extra_custom_resources.values())\n return all(v >= 0 for v in all_values)\n\n @classmethod\n def subtract(cls, original, to_remove):\n cpu = original.cpu - to_remove.cpu\n gpu = original.gpu - to_remove.gpu\n extra_cpu = original.extra_cpu - to_remove.extra_cpu\n extra_gpu = original.extra_gpu - to_remove.extra_gpu\n all_resources = set(original.custom_resources).union(\n set(to_remove.custom_resources))\n new_custom_res = {\n k: original.custom_resources.get(k, 0) -\n to_remove.custom_resources.get(k, 0)\n for k in all_resources\n }\n extra_custom_res = {\n k: original.extra_custom_resources.get(k, 0) -\n to_remove.extra_custom_resources.get(k, 0)\n for k in all_resources\n }\n return Resources(cpu, gpu, extra_cpu, extra_gpu, new_custom_res,\n extra_custom_res)\n\n def to_json(self):\n return resources_to_json(self)\n\n\ndef json_to_resources(data):\n if data is None or data == \"null\":\n return None\n if isinstance(data, string_types):\n data = json.loads(data)\n for k in data:\n if k in [\"driver_cpu_limit\", \"driver_gpu_limit\"]:\n raise TuneError(\n \"The field `{}` is no longer supported. Use `extra_cpu` \"\n \"or `extra_gpu` instead.\".format(k))\n if k not in Resources._fields:\n raise ValueError(\n \"Unknown resource field {}, must be one of {}\".format(\n k, Resources._fields))\n return Resources(\n data.get(\"cpu\", 1), data.get(\"gpu\", 0), data.get(\"extra_cpu\", 0),\n data.get(\"extra_gpu\", 0), data.get(\"custom_resources\"),\n data.get(\"extra_custom_resources\"))\n\n\ndef resources_to_json(resources):\n if resources is None:\n return None\n return {\n \"cpu\": resources.cpu,\n \"gpu\": resources.gpu,\n \"extra_cpu\": resources.extra_cpu,\n \"extra_gpu\": resources.extra_gpu,\n \"custom_resources\": resources.custom_resources.copy(),\n \"extra_custom_resources\": resources.extra_custom_resources.copy()\n }\n\n\ndef has_trainable(trainable_name):\n return ray.tune.registry._global_registry.contains(\n ray.tune.registry.TRAINABLE_CLASS, trainable_name)\n\n\ndef recursive_criteria_check(result, criteria):\n for criteria, stop_value in criteria.items():\n if criteria not in result:\n raise TuneError(\n \"Stopping criteria {} not provided in result {}.\".format(\n criteria, result))\n elif isinstance(result[criteria], dict) and isinstance(\n stop_value, dict):\n if recursive_criteria_check(result[criteria], stop_value):\n return True\n elif result[criteria] >= stop_value:\n return True\n return False\n\n\nclass Checkpoint(object):\n \"\"\"Describes a checkpoint of trial state.\n\n Checkpoint may be saved in different storage.\n\n Attributes:\n storage (str): Storage type.\n value (str): If storage==MEMORY,value is a Python object.\n If storage==DISK,value is a path points to the checkpoint in disk.\n \"\"\"\n\n MEMORY = \"memory\"\n DISK = \"disk\"\n\n def __init__(self, storage, value, last_result=None):\n self.storage = storage\n self.value = value\n self.last_result = last_result or {}\n\n @staticmethod\n def from_object(value=None):\n \"\"\"Creates a checkpoint from a Python object.\"\"\"\n return Checkpoint(Checkpoint.MEMORY, value)\n\n\nclass ExportFormat(object):\n \"\"\"Describes the format to export the trial Trainable.\n\n This may correspond to different file formats based on the\n Trainable implementation.\n \"\"\"\n CHECKPOINT = \"checkpoint\"\n MODEL = \"model\"\n\n @staticmethod\n def validate(export_formats):\n \"\"\"Validates export_formats.\n\n Raises:\n ValueError if the format is unknown.\n \"\"\"\n for i in range(len(export_formats)):\n export_formats[i] = export_formats[i].strip().lower()\n if export_formats[i] not in [\n ExportFormat.CHECKPOINT, ExportFormat.MODEL\n ]:\n raise TuneError(\"Unsupported export format: \" +\n export_formats[i])\n\n\nclass Trial(object):\n \"\"\"A trial object holds the state for one model training run.\n\n Trials are themselves managed by the TrialRunner class, which implements\n the event loop for submitting trial runs to a Ray cluster.\n\n Trials start in the PENDING state, and transition to RUNNING once started.\n On error it transitions to ERROR, otherwise TERMINATED on success.\n \"\"\"\n\n PENDING = \"PENDING\"\n RUNNING = \"RUNNING\"\n PAUSED = \"PAUSED\"\n TERMINATED = \"TERMINATED\"\n ERROR = \"ERROR\"\n\n def __init__(self,\n trainable_name,\n config=None,\n trial_id=None,\n local_dir=DEFAULT_RESULTS_DIR,\n experiment_tag=\"\",\n resources=None,\n stopping_criterion=None,\n checkpoint_freq=0,\n checkpoint_at_end=False,\n keep_checkpoints_num=None,\n checkpoint_score_attr=\"\",\n export_formats=None,\n restore_path=None,\n trial_name_creator=None,\n loggers=None,\n sync_to_driver_fn=None,\n max_failures=0):\n \"\"\"Initialize a new trial.\n\n The args here take the same meaning as the command line flags defined\n in ray.tune.config_parser.\n \"\"\"\n\n Trial._registration_check(trainable_name)\n # Trial config\n self.trainable_name = trainable_name\n self.config = config or {}\n self.local_dir = local_dir # This remains unexpanded for syncing.\n self.experiment_tag = experiment_tag\n trainable_cls = self._get_trainable_cls()\n if trainable_cls and hasattr(trainable_cls,\n \"default_resource_request\"):\n default_resources = trainable_cls.default_resource_request(\n self.config)\n if default_resources:\n if resources:\n raise ValueError(\n \"Resources for {} have been automatically set to {} \"\n \"by its `default_resource_request()` method. Please \"\n \"clear the `resources_per_trial` option.\".format(\n trainable_cls, default_resources))\n resources = default_resources\n self.resources = resources or Resources(cpu=1, gpu=0)\n self.stopping_criterion = stopping_criterion or {}\n self.loggers = loggers\n self.sync_to_driver_fn = sync_to_driver_fn\n self.verbose = True\n self.max_failures = max_failures\n\n # Local trial state that is updated during the run\n self.last_result = {}\n self.last_update_time = -float(\"inf\")\n self.checkpoint_freq = checkpoint_freq\n self.checkpoint_at_end = checkpoint_at_end\n\n self.history = []\n self.keep_checkpoints_num = keep_checkpoints_num\n self._cmp_greater = not checkpoint_score_attr.startswith(\"min-\")\n self.best_checkpoint_attr_value = -float(\"inf\") \\\n if self._cmp_greater else float(\"inf\")\n # Strip off \"min-\" from checkpoint attribute\n self.checkpoint_score_attr = checkpoint_score_attr \\\n if self._cmp_greater else checkpoint_score_attr[4:]\n\n self._checkpoint = Checkpoint(\n storage=Checkpoint.DISK, value=restore_path)\n self.export_formats = export_formats\n self.status = Trial.PENDING\n self.logdir = None\n self.runner = None\n self.result_logger = None\n self.last_debug = 0\n self.trial_id = Trial.generate_id() if trial_id is None else trial_id\n self.error_file = None\n self.num_failures = 0\n self.custom_trial_name = None\n\n # AutoML fields\n self.results = None\n self.best_result = None\n self.param_config = None\n self.extra_arg = None\n\n self._nonjson_fields = [\n \"_checkpoint\",\n \"loggers\",\n \"sync_to_driver_fn\",\n \"results\",\n \"best_result\",\n \"param_config\",\n \"extra_arg\",\n ]\n if trial_name_creator:\n self.custom_trial_name = trial_name_creator(self)\n\n @classmethod\n def _registration_check(cls, trainable_name):\n if not has_trainable(trainable_name):\n # Make sure rllib agents are registered\n from ray import rllib # noqa: F401\n if not has_trainable(trainable_name):\n raise TuneError(\"Unknown trainable: \" + trainable_name)\n\n @classmethod\n def generate_id(cls):\n return str(uuid.uuid1().hex)[:8]\n\n @classmethod\n def create_logdir(cls, identifier, local_dir):\n local_dir = os.path.expanduser(local_dir)\n if not os.path.exists(local_dir):\n os.makedirs(local_dir)\n return tempfile.mkdtemp(\n prefix=\"{}_{}\".format(identifier[:MAX_LEN_IDENTIFIER], date_str()),\n dir=local_dir)\n\n def init_logger(self):\n \"\"\"Init logger.\"\"\"\n\n if not self.result_logger:\n if not self.logdir:\n self.logdir = Trial.create_logdir(str(self), self.local_dir)\n elif not os.path.exists(self.logdir):\n os.makedirs(self.logdir)\n\n self.result_logger = UnifiedLogger(\n self.config,\n self.logdir,\n loggers=self.loggers,\n sync_function=self.sync_to_driver_fn)\n\n def update_resources(self, cpu, gpu, **kwargs):\n \"\"\"EXPERIMENTAL: Updates the resource requirements.\n\n Should only be called when the trial is not running.\n\n Raises:\n ValueError if trial status is running.\n \"\"\"\n if self.status is Trial.RUNNING:\n raise ValueError(\"Cannot update resources while Trial is running.\")\n self.resources = Resources(cpu, gpu, **kwargs)\n\n def sync_logger_to_new_location(self, worker_ip):\n \"\"\"Updates the logger location.\n\n Also pushes logdir to worker_ip, allowing for cross-node recovery.\n \"\"\"\n if self.result_logger:\n self.result_logger.sync_results_to_new_location(worker_ip)\n\n def close_logger(self):\n \"\"\"Close logger.\"\"\"\n\n if self.result_logger:\n self.result_logger.close()\n self.result_logger = None\n\n def write_error_log(self, error_msg):\n if error_msg and self.logdir:\n self.num_failures += 1 # may be moved to outer scope?\n error_file = os.path.join(self.logdir,\n \"error_{}.txt\".format(date_str()))\n with open(error_file, \"w\") as f:\n f.write(error_msg)\n self.error_file = error_file\n\n def should_stop(self, result):\n \"\"\"Whether the given result meets this trial's stopping criteria.\"\"\"\n\n if result.get(DONE):\n return True\n\n return recursive_criteria_check(result, self.stopping_criterion)\n\n def should_checkpoint(self):\n \"\"\"Whether this trial is due for checkpointing.\"\"\"\n result = self.last_result or {}\n\n if result.get(DONE) and self.checkpoint_at_end:\n return True\n\n if self.checkpoint_freq:\n return result.get(TRAINING_ITERATION,\n 0) % self.checkpoint_freq == 0\n else:\n return False\n\n def progress_string(self):\n \"\"\"Returns a progress message for printing out to the console.\"\"\"\n\n if not self.last_result:\n return self._status_string()\n\n def location_string(hostname, pid):\n if hostname == os.uname()[1]:\n return \"pid={}\".format(pid)\n else:\n return \"{} pid={}\".format(hostname, pid)\n\n pieces = [\n \"{}\".format(self._status_string()), \"[{}]\".format(\n self.resources.summary_string()), \"[{}]\".format(\n location_string(\n self.last_result.get(HOSTNAME),\n self.last_result.get(PID))), \"{} s\".format(\n int(self.last_result.get(TIME_TOTAL_S)))\n ]\n\n if self.last_result.get(TRAINING_ITERATION) is not None:\n pieces.append(\"{} iter\".format(\n self.last_result[TRAINING_ITERATION]))\n\n if self.last_result.get(TIMESTEPS_TOTAL) is not None:\n pieces.append(\"{} ts\".format(self.last_result[TIMESTEPS_TOTAL]))\n\n if self.last_result.get(EPISODE_REWARD_MEAN) is not None:\n pieces.append(\"{} rew\".format(\n format(self.last_result[EPISODE_REWARD_MEAN], \".3g\")))\n\n if self.last_result.get(MEAN_LOSS) is not None:\n pieces.append(\"{} loss\".format(\n format(self.last_result[MEAN_LOSS], \".3g\")))\n\n if self.last_result.get(MEAN_ACCURACY) is not None:\n pieces.append(\"{} acc\".format(\n format(self.last_result[MEAN_ACCURACY], \".3g\")))\n\n return \", \".join(pieces)\n\n def _status_string(self):\n return \"{}{}\".format(\n self.status, \", {} failures: {}\".format(self.num_failures,\n self.error_file)\n if self.error_file else \"\")\n\n def has_checkpoint(self):\n return self._checkpoint.value is not None\n\n def clear_checkpoint(self):\n self._checkpoint.value = None\n\n def should_recover(self):\n \"\"\"Returns whether the trial qualifies for restoring.\n\n This is if a checkpoint frequency is set and has not failed more than\n max_failures. This may return true even when there may not yet\n be a checkpoint.\n \"\"\"\n return (self.checkpoint_freq > 0\n and (self.num_failures < self.max_failures\n or self.max_failures < 0))\n\n def update_last_result(self, result, terminate=False):\n if terminate:\n result.update(done=True)\n if self.verbose and (terminate or time.time() - self.last_debug >\n DEBUG_PRINT_INTERVAL):\n print(\"Result for {}:\".format(self))\n print(\" {}\".format(pretty_print(result).replace(\"\\n\", \"\\n \")))\n self.last_debug = time.time()\n self.last_result = result\n self.last_update_time = time.time()\n self.result_logger.on_result(self.last_result)\n\n def compare_checkpoints(self, attr_mean):\n \"\"\"Compares two checkpoints based on the attribute attr_mean param.\n Greater than is used by default. If command-line parameter\n checkpoint_score_attr starts with \"min-\" less than is used.\n\n Arguments:\n attr_mean: mean of attribute value for the current checkpoint\n\n Returns:\n True: when attr_mean is greater than previous checkpoint attr_mean\n and greater than function is selected\n when attr_mean is less than previous checkpoint attr_mean and\n less than function is selected\n False: when attr_mean is not in alignment with selected cmp fn\n \"\"\"\n if self._cmp_greater and attr_mean > self.best_checkpoint_attr_value:\n return True\n elif (not self._cmp_greater\n and attr_mean < self.best_checkpoint_attr_value):\n return True\n return False\n\n def _get_trainable_cls(self):\n return ray.tune.registry._global_registry.get(\n ray.tune.registry.TRAINABLE_CLASS, self.trainable_name)\n\n def set_verbose(self, verbose):\n self.verbose = verbose\n\n def is_finished(self):\n return self.status in [Trial.TERMINATED, Trial.ERROR]\n\n @property\n def node_ip(self):\n return self.last_result.get(\"node_ip\")\n\n def __repr__(self):\n return str(self)\n\n def __str__(self):\n \"\"\"Combines ``env`` with ``trainable_name`` and ``experiment_tag``.\n\n Can be overriden with a custom string creator.\n \"\"\"\n if self.custom_trial_name:\n return self.custom_trial_name\n\n if \"env\" in self.config:\n env = self.config[\"env\"]\n if isinstance(env, type):\n env = env.__name__\n identifier = \"{}_{}\".format(self.trainable_name, env)\n else:\n identifier = self.trainable_name\n if self.experiment_tag:\n identifier += \"_\" + self.experiment_tag\n return identifier.replace(\"/\", \"_\")\n\n def __getstate__(self):\n \"\"\"Memento generator for Trial.\n\n Sets RUNNING trials to PENDING, and flushes the result logger.\n Note this can only occur if the trial holds a DISK checkpoint.\n \"\"\"\n assert self._checkpoint.storage == Checkpoint.DISK, (\n \"Checkpoint must not be in-memory.\")\n state = self.__dict__.copy()\n state[\"resources\"] = resources_to_json(self.resources)\n\n for key in self._nonjson_fields:\n state[key] = binary_to_hex(cloudpickle.dumps(state.get(key)))\n\n state[\"runner\"] = None\n state[\"result_logger\"] = None\n if self.result_logger:\n self.result_logger.flush()\n state[\"__logger_started__\"] = True\n else:\n state[\"__logger_started__\"] = False\n return copy.deepcopy(state)\n\n def __setstate__(self, state):\n logger_started = state.pop(\"__logger_started__\")\n state[\"resources\"] = json_to_resources(state[\"resources\"])\n if state[\"status\"] == Trial.RUNNING:\n state[\"status\"] = Trial.PENDING\n for key in self._nonjson_fields:\n state[key] = cloudpickle.loads(hex_to_binary(state[key]))\n\n self.__dict__.update(state)\n Trial._registration_check(self.trainable_name)\n if logger_started:\n self.init_logger()\n", "path": "python/ray/tune/trial.py" } ]
[ { "content": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom collections import namedtuple\nimport ray.cloudpickle as cloudpickle\nimport copy\nfrom datetime import datetime\nimport logging\nimport json\nimport uuid\nimport time\nimport tempfile\nimport os\nfrom numbers import Number\n\n# For compatibility under py2 to consider unicode as str\nfrom six import string_types\n\nimport ray\nfrom ray.tune import TuneError\nfrom ray.tune.logger import pretty_print, UnifiedLogger\n# NOTE(rkn): We import ray.tune.registry here instead of importing the names we\n# need because there are cyclic imports that may cause specific names to not\n# have been defined yet. See https://github.com/ray-project/ray/issues/1716.\nimport ray.tune.registry\nfrom ray.tune.result import (DEFAULT_RESULTS_DIR, DONE, HOSTNAME, PID,\n TIME_TOTAL_S, TRAINING_ITERATION, TIMESTEPS_TOTAL,\n EPISODE_REWARD_MEAN, MEAN_LOSS, MEAN_ACCURACY)\nfrom ray.utils import binary_to_hex, hex_to_binary\n\nDEBUG_PRINT_INTERVAL = 5\nMAX_LEN_IDENTIFIER = int(os.environ.get(\"MAX_LEN_IDENTIFIER\", 130))\nlogger = logging.getLogger(__name__)\n\n\ndef date_str():\n return datetime.today().strftime(\"%Y-%m-%d_%H-%M-%S\")\n\n\nclass Resources(\n namedtuple(\"Resources\", [\n \"cpu\", \"gpu\", \"extra_cpu\", \"extra_gpu\", \"custom_resources\",\n \"extra_custom_resources\"\n ])):\n \"\"\"Ray resources required to schedule a trial.\n\n Attributes:\n cpu (float): Number of CPUs to allocate to the trial.\n gpu (float): Number of GPUs to allocate to the trial.\n extra_cpu (float): Extra CPUs to reserve in case the trial needs to\n launch additional Ray actors that use CPUs.\n extra_gpu (float): Extra GPUs to reserve in case the trial needs to\n launch additional Ray actors that use GPUs.\n custom_resources (dict): Mapping of resource to quantity to allocate\n to the trial.\n extra_custom_resources (dict): Extra custom resources to reserve in\n case the trial needs to launch additional Ray actors that use\n any of these custom resources.\n\n \"\"\"\n\n __slots__ = ()\n\n def __new__(cls,\n cpu,\n gpu,\n extra_cpu=0,\n extra_gpu=0,\n custom_resources=None,\n extra_custom_resources=None):\n custom_resources = custom_resources or {}\n extra_custom_resources = extra_custom_resources or {}\n leftovers = set(custom_resources) ^ set(extra_custom_resources)\n\n for value in leftovers:\n custom_resources.setdefault(value, 0)\n extra_custom_resources.setdefault(value, 0)\n\n all_values = [cpu, gpu, extra_cpu, extra_gpu]\n all_values += list(custom_resources.values())\n all_values += list(extra_custom_resources.values())\n assert len(custom_resources) == len(extra_custom_resources)\n for entry in all_values:\n assert isinstance(entry, Number), \"Improper resource value.\"\n return super(Resources,\n cls).__new__(cls, cpu, gpu, extra_cpu, extra_gpu,\n custom_resources, extra_custom_resources)\n\n def summary_string(self):\n summary = \"{} CPUs, {} GPUs\".format(self.cpu + self.extra_cpu,\n self.gpu + self.extra_gpu)\n custom_summary = \", \".join([\n \"{} {}\".format(self.get_res_total(res), res)\n for res in self.custom_resources\n ])\n if custom_summary:\n summary += \" ({})\".format(custom_summary)\n return summary\n\n def cpu_total(self):\n return self.cpu + self.extra_cpu\n\n def gpu_total(self):\n return self.gpu + self.extra_gpu\n\n def get_res_total(self, key):\n return self.custom_resources.get(\n key, 0) + self.extra_custom_resources.get(key, 0)\n\n def get(self, key):\n return self.custom_resources.get(key, 0)\n\n def is_nonnegative(self):\n all_values = [self.cpu, self.gpu, self.extra_cpu, self.extra_gpu]\n all_values += list(self.custom_resources.values())\n all_values += list(self.extra_custom_resources.values())\n return all(v >= 0 for v in all_values)\n\n @classmethod\n def subtract(cls, original, to_remove):\n cpu = original.cpu - to_remove.cpu\n gpu = original.gpu - to_remove.gpu\n extra_cpu = original.extra_cpu - to_remove.extra_cpu\n extra_gpu = original.extra_gpu - to_remove.extra_gpu\n all_resources = set(original.custom_resources).union(\n set(to_remove.custom_resources))\n new_custom_res = {\n k: original.custom_resources.get(k, 0) -\n to_remove.custom_resources.get(k, 0)\n for k in all_resources\n }\n extra_custom_res = {\n k: original.extra_custom_resources.get(k, 0) -\n to_remove.extra_custom_resources.get(k, 0)\n for k in all_resources\n }\n return Resources(cpu, gpu, extra_cpu, extra_gpu, new_custom_res,\n extra_custom_res)\n\n def to_json(self):\n return resources_to_json(self)\n\n\ndef json_to_resources(data):\n if data is None or data == \"null\":\n return None\n if isinstance(data, string_types):\n data = json.loads(data)\n for k in data:\n if k in [\"driver_cpu_limit\", \"driver_gpu_limit\"]:\n raise TuneError(\n \"The field `{}` is no longer supported. Use `extra_cpu` \"\n \"or `extra_gpu` instead.\".format(k))\n if k not in Resources._fields:\n raise ValueError(\n \"Unknown resource field {}, must be one of {}\".format(\n k, Resources._fields))\n return Resources(\n data.get(\"cpu\", 1), data.get(\"gpu\", 0), data.get(\"extra_cpu\", 0),\n data.get(\"extra_gpu\", 0), data.get(\"custom_resources\"),\n data.get(\"extra_custom_resources\"))\n\n\ndef resources_to_json(resources):\n if resources is None:\n return None\n return {\n \"cpu\": resources.cpu,\n \"gpu\": resources.gpu,\n \"extra_cpu\": resources.extra_cpu,\n \"extra_gpu\": resources.extra_gpu,\n \"custom_resources\": resources.custom_resources.copy(),\n \"extra_custom_resources\": resources.extra_custom_resources.copy()\n }\n\n\ndef has_trainable(trainable_name):\n return ray.tune.registry._global_registry.contains(\n ray.tune.registry.TRAINABLE_CLASS, trainable_name)\n\n\ndef recursive_criteria_check(result, criteria):\n for criteria, stop_value in criteria.items():\n if criteria not in result:\n raise TuneError(\n \"Stopping criteria {} not provided in result {}.\".format(\n criteria, result))\n elif isinstance(result[criteria], dict) and isinstance(\n stop_value, dict):\n if recursive_criteria_check(result[criteria], stop_value):\n return True\n elif result[criteria] >= stop_value:\n return True\n return False\n\n\nclass Checkpoint(object):\n \"\"\"Describes a checkpoint of trial state.\n\n Checkpoint may be saved in different storage.\n\n Attributes:\n storage (str): Storage type.\n value (str): If storage==MEMORY,value is a Python object.\n If storage==DISK,value is a path points to the checkpoint in disk.\n \"\"\"\n\n MEMORY = \"memory\"\n DISK = \"disk\"\n\n def __init__(self, storage, value, last_result=None):\n self.storage = storage\n self.value = value\n self.last_result = last_result or {}\n\n @staticmethod\n def from_object(value=None):\n \"\"\"Creates a checkpoint from a Python object.\"\"\"\n return Checkpoint(Checkpoint.MEMORY, value)\n\n\nclass ExportFormat(object):\n \"\"\"Describes the format to export the trial Trainable.\n\n This may correspond to different file formats based on the\n Trainable implementation.\n \"\"\"\n CHECKPOINT = \"checkpoint\"\n MODEL = \"model\"\n\n @staticmethod\n def validate(export_formats):\n \"\"\"Validates export_formats.\n\n Raises:\n ValueError if the format is unknown.\n \"\"\"\n for i in range(len(export_formats)):\n export_formats[i] = export_formats[i].strip().lower()\n if export_formats[i] not in [\n ExportFormat.CHECKPOINT, ExportFormat.MODEL\n ]:\n raise TuneError(\"Unsupported export format: \" +\n export_formats[i])\n\n\nclass Trial(object):\n \"\"\"A trial object holds the state for one model training run.\n\n Trials are themselves managed by the TrialRunner class, which implements\n the event loop for submitting trial runs to a Ray cluster.\n\n Trials start in the PENDING state, and transition to RUNNING once started.\n On error it transitions to ERROR, otherwise TERMINATED on success.\n \"\"\"\n\n PENDING = \"PENDING\"\n RUNNING = \"RUNNING\"\n PAUSED = \"PAUSED\"\n TERMINATED = \"TERMINATED\"\n ERROR = \"ERROR\"\n\n def __init__(self,\n trainable_name,\n config=None,\n trial_id=None,\n local_dir=DEFAULT_RESULTS_DIR,\n experiment_tag=\"\",\n resources=None,\n stopping_criterion=None,\n checkpoint_freq=0,\n checkpoint_at_end=False,\n keep_checkpoints_num=None,\n checkpoint_score_attr=\"\",\n export_formats=None,\n restore_path=None,\n trial_name_creator=None,\n loggers=None,\n sync_to_driver_fn=None,\n max_failures=0):\n \"\"\"Initialize a new trial.\n\n The args here take the same meaning as the command line flags defined\n in ray.tune.config_parser.\n \"\"\"\n\n Trial._registration_check(trainable_name)\n # Trial config\n self.trainable_name = trainable_name\n self.config = config or {}\n self.local_dir = local_dir # This remains unexpanded for syncing.\n self.experiment_tag = experiment_tag\n trainable_cls = self._get_trainable_cls()\n if trainable_cls and hasattr(trainable_cls,\n \"default_resource_request\"):\n default_resources = trainable_cls.default_resource_request(\n self.config)\n if default_resources:\n if resources:\n raise ValueError(\n \"Resources for {} have been automatically set to {} \"\n \"by its `default_resource_request()` method. Please \"\n \"clear the `resources_per_trial` option.\".format(\n trainable_cls, default_resources))\n resources = default_resources\n self.resources = resources or Resources(cpu=1, gpu=0)\n self.stopping_criterion = stopping_criterion or {}\n self.loggers = loggers\n self.sync_to_driver_fn = sync_to_driver_fn\n self.verbose = True\n self.max_failures = max_failures\n\n # Local trial state that is updated during the run\n self.last_result = {}\n self.last_update_time = -float(\"inf\")\n self.checkpoint_freq = checkpoint_freq\n self.checkpoint_at_end = checkpoint_at_end\n\n self.history = []\n self.keep_checkpoints_num = keep_checkpoints_num\n self._cmp_greater = not checkpoint_score_attr.startswith(\"min-\")\n self.best_checkpoint_attr_value = -float(\"inf\") \\\n if self._cmp_greater else float(\"inf\")\n # Strip off \"min-\" from checkpoint attribute\n self.checkpoint_score_attr = checkpoint_score_attr \\\n if self._cmp_greater else checkpoint_score_attr[4:]\n\n self._checkpoint = Checkpoint(\n storage=Checkpoint.DISK, value=restore_path)\n self.export_formats = export_formats\n self.status = Trial.PENDING\n self.logdir = None\n self.runner = None\n self.result_logger = None\n self.last_debug = 0\n self.trial_id = Trial.generate_id() if trial_id is None else trial_id\n self.error_file = None\n self.num_failures = 0\n self.custom_trial_name = None\n\n # AutoML fields\n self.results = None\n self.best_result = None\n self.param_config = None\n self.extra_arg = None\n\n self._nonjson_fields = [\n \"_checkpoint\",\n \"loggers\",\n \"sync_to_driver_fn\",\n \"results\",\n \"best_result\",\n \"param_config\",\n \"extra_arg\",\n ]\n if trial_name_creator:\n self.custom_trial_name = trial_name_creator(self)\n\n @classmethod\n def _registration_check(cls, trainable_name):\n if not has_trainable(trainable_name):\n # Make sure rllib agents are registered\n from ray import rllib # noqa: F401\n if not has_trainable(trainable_name):\n raise TuneError(\"Unknown trainable: \" + trainable_name)\n\n @classmethod\n def generate_id(cls):\n return str(uuid.uuid1().hex)[:8]\n\n @classmethod\n def create_logdir(cls, identifier, local_dir):\n local_dir = os.path.expanduser(local_dir)\n if not os.path.exists(local_dir):\n os.makedirs(local_dir)\n return tempfile.mkdtemp(\n prefix=\"{}_{}\".format(identifier[:MAX_LEN_IDENTIFIER], date_str()),\n dir=local_dir)\n\n def init_logger(self):\n \"\"\"Init logger.\"\"\"\n\n if not self.result_logger:\n if not self.logdir:\n self.logdir = Trial.create_logdir(str(self), self.local_dir)\n elif not os.path.exists(self.logdir):\n os.makedirs(self.logdir)\n\n self.result_logger = UnifiedLogger(\n self.config,\n self.logdir,\n loggers=self.loggers,\n sync_function=self.sync_to_driver_fn)\n\n def update_resources(self, cpu, gpu, **kwargs):\n \"\"\"EXPERIMENTAL: Updates the resource requirements.\n\n Should only be called when the trial is not running.\n\n Raises:\n ValueError if trial status is running.\n \"\"\"\n if self.status is Trial.RUNNING:\n raise ValueError(\"Cannot update resources while Trial is running.\")\n self.resources = Resources(cpu, gpu, **kwargs)\n\n def sync_logger_to_new_location(self, worker_ip):\n \"\"\"Updates the logger location.\n\n Also pushes logdir to worker_ip, allowing for cross-node recovery.\n \"\"\"\n if self.result_logger:\n self.result_logger.sync_results_to_new_location(worker_ip)\n\n def close_logger(self):\n \"\"\"Close logger.\"\"\"\n\n if self.result_logger:\n self.result_logger.close()\n self.result_logger = None\n\n def write_error_log(self, error_msg):\n if error_msg and self.logdir:\n self.num_failures += 1 # may be moved to outer scope?\n error_file = os.path.join(self.logdir,\n \"error_{}.txt\".format(date_str()))\n with open(error_file, \"w\") as f:\n f.write(error_msg)\n self.error_file = error_file\n\n def should_stop(self, result):\n \"\"\"Whether the given result meets this trial's stopping criteria.\"\"\"\n\n if result.get(DONE):\n return True\n\n return recursive_criteria_check(result, self.stopping_criterion)\n\n def should_checkpoint(self):\n \"\"\"Whether this trial is due for checkpointing.\"\"\"\n result = self.last_result or {}\n\n if result.get(DONE) and self.checkpoint_at_end:\n return True\n\n if self.checkpoint_freq:\n return result.get(TRAINING_ITERATION,\n 0) % self.checkpoint_freq == 0\n else:\n return False\n\n def progress_string(self):\n \"\"\"Returns a progress message for printing out to the console.\"\"\"\n\n if not self.last_result:\n return self._status_string()\n\n def location_string(hostname, pid):\n if hostname == os.uname()[1]:\n return \"pid={}\".format(pid)\n else:\n return \"{} pid={}\".format(hostname, pid)\n\n pieces = [\n \"{}\".format(self._status_string()), \"[{}]\".format(\n self.resources.summary_string()), \"[{}]\".format(\n location_string(\n self.last_result.get(HOSTNAME),\n self.last_result.get(PID))), \"{} s\".format(\n int(self.last_result.get(TIME_TOTAL_S)))\n ]\n\n if self.last_result.get(TRAINING_ITERATION) is not None:\n pieces.append(\"{} iter\".format(\n self.last_result[TRAINING_ITERATION]))\n\n if self.last_result.get(TIMESTEPS_TOTAL) is not None:\n pieces.append(\"{} ts\".format(self.last_result[TIMESTEPS_TOTAL]))\n\n if self.last_result.get(EPISODE_REWARD_MEAN) is not None:\n pieces.append(\"{} rew\".format(\n format(self.last_result[EPISODE_REWARD_MEAN], \".3g\")))\n\n if self.last_result.get(MEAN_LOSS) is not None:\n pieces.append(\"{} loss\".format(\n format(self.last_result[MEAN_LOSS], \".3g\")))\n\n if self.last_result.get(MEAN_ACCURACY) is not None:\n pieces.append(\"{} acc\".format(\n format(self.last_result[MEAN_ACCURACY], \".3g\")))\n\n return \", \".join(pieces)\n\n def _status_string(self):\n return \"{}{}\".format(\n self.status, \", {} failures: {}\".format(self.num_failures,\n self.error_file)\n if self.error_file else \"\")\n\n def has_checkpoint(self):\n return self._checkpoint.value is not None\n\n def clear_checkpoint(self):\n self._checkpoint.value = None\n\n def should_recover(self):\n \"\"\"Returns whether the trial qualifies for restoring.\n\n This is if a checkpoint frequency is set and has not failed more than\n max_failures. This may return true even when there may not yet\n be a checkpoint.\n \"\"\"\n return (self.checkpoint_freq > 0\n and (self.num_failures < self.max_failures\n or self.max_failures < 0))\n\n def update_last_result(self, result, terminate=False):\n if terminate:\n result.update(done=True)\n if self.verbose and (terminate or time.time() - self.last_debug >\n DEBUG_PRINT_INTERVAL):\n print(\"Result for {}:\".format(self))\n print(\" {}\".format(pretty_print(result).replace(\"\\n\", \"\\n \")))\n self.last_debug = time.time()\n self.last_result = result\n self.last_update_time = time.time()\n self.result_logger.on_result(self.last_result)\n\n def compare_checkpoints(self, attr_mean):\n \"\"\"Compares two checkpoints based on the attribute attr_mean param.\n Greater than is used by default. If command-line parameter\n checkpoint_score_attr starts with \"min-\" less than is used.\n\n Arguments:\n attr_mean: mean of attribute value for the current checkpoint\n\n Returns:\n True: when attr_mean is greater than previous checkpoint attr_mean\n and greater than function is selected\n when attr_mean is less than previous checkpoint attr_mean and\n less than function is selected\n False: when attr_mean is not in alignment with selected cmp fn\n \"\"\"\n if self._cmp_greater and attr_mean > self.best_checkpoint_attr_value:\n return True\n elif (not self._cmp_greater\n and attr_mean < self.best_checkpoint_attr_value):\n return True\n return False\n\n def _get_trainable_cls(self):\n return ray.tune.registry._global_registry.get(\n ray.tune.registry.TRAINABLE_CLASS, self.trainable_name)\n\n def set_verbose(self, verbose):\n self.verbose = verbose\n\n def is_finished(self):\n return self.status in [Trial.TERMINATED, Trial.ERROR]\n\n @property\n def node_ip(self):\n return self.last_result.get(\"node_ip\")\n\n def __repr__(self):\n return str(self)\n\n def __str__(self):\n \"\"\"Combines ``env`` with ``trainable_name`` and ``experiment_tag``.\n\n Can be overriden with a custom string creator.\n \"\"\"\n if self.custom_trial_name:\n return self.custom_trial_name\n\n if \"env\" in self.config:\n env = self.config[\"env\"]\n if isinstance(env, type):\n env = env.__name__\n identifier = \"{}_{}\".format(self.trainable_name, env)\n else:\n identifier = self.trainable_name\n if self.experiment_tag:\n identifier += \"_\" + self.experiment_tag\n return identifier.replace(\"/\", \"_\")\n\n def __getstate__(self):\n \"\"\"Memento generator for Trial.\n\n Sets RUNNING trials to PENDING, and flushes the result logger.\n Note this can only occur if the trial holds a DISK checkpoint.\n \"\"\"\n assert self._checkpoint.storage == Checkpoint.DISK, (\n \"Checkpoint must not be in-memory.\")\n state = self.__dict__.copy()\n state[\"resources\"] = resources_to_json(self.resources)\n\n for key in self._nonjson_fields:\n state[key] = binary_to_hex(cloudpickle.dumps(state.get(key)))\n\n state[\"runner\"] = None\n state[\"result_logger\"] = None\n if self.result_logger:\n self.result_logger.flush()\n state[\"__logger_started__\"] = True\n else:\n state[\"__logger_started__\"] = False\n return copy.deepcopy(state)\n\n def __setstate__(self, state):\n logger_started = state.pop(\"__logger_started__\")\n state[\"resources\"] = json_to_resources(state[\"resources\"])\n if state[\"status\"] == Trial.RUNNING:\n state[\"status\"] = Trial.PENDING\n for key in self._nonjson_fields:\n state[key] = cloudpickle.loads(hex_to_binary(state[key]))\n\n self.__dict__.update(state)\n Trial._registration_check(self.trainable_name)\n if logger_started:\n self.init_logger()\n", "path": "python/ray/tune/trial.py" } ]
diff --git a/python/ray/tune/trial.py b/python/ray/tune/trial.py index a5f9cef3abd17..1221c2a534c38 100644 --- a/python/ray/tune/trial.py +++ b/python/ray/tune/trial.py @@ -30,7 +30,7 @@ from ray.utils import binary_to_hex, hex_to_binary DEBUG_PRINT_INTERVAL = 5 -MAX_LEN_IDENTIFIER = 130 +MAX_LEN_IDENTIFIER = int(os.environ.get("MAX_LEN_IDENTIFIER", 130)) logger = logging.getLogger(__name__)
python-pillow__Pillow-6874
Fatal Python error for negative radius in ImageFilter.BoxBlur() Hi, Python crashes without an exception when a negative radius is passed into ImageFilter.BoxBlur(). This is the error message using spyder. ``` Fatal Python error: Aborted Main thread: Current thread 0x00007fbab679b740 (most recent call first): File "/home/mgouka/miniconda3/envs/bx_gui/lib/python3.9/site-packages/PIL/ImageFilter.py", line 189 in filter File "/home/mgouka/miniconda3/envs/bx_gui/lib/python3.9/site-packages/PIL/Image.py", line 1305 in filter File "/tmp/ipykernel_28079/2108891280.py", line 1 in <module> Restarting kernel... ``` And using python in the standard linux terminal it says: ``` Segmentation fault (core dumped) ``` The code below produces the problem. ```python import numpy as np from PIL import Image, ImageFilter img = Image.fromarray(np.ones([100, 100, 3], dtype='uint8')) img = img.filter(ImageFilter.BoxBlur(-2)) ```
[ { "content": "#\n# The Python Imaging Library.\n# $Id$\n#\n# standard filters\n#\n# History:\n# 1995-11-27 fl Created\n# 2002-06-08 fl Added rank and mode filters\n# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call\n#\n# Copyright (c) 1997-2003 by Secret Labs AB.\n# Copyright (c) 1995-2002 by Fredrik Lundh.\n#\n# See the README file for information on usage and redistribution.\n#\nimport functools\n\n\nclass Filter:\n pass\n\n\nclass MultibandFilter(Filter):\n pass\n\n\nclass BuiltinFilter(MultibandFilter):\n def filter(self, image):\n if image.mode == \"P\":\n msg = \"cannot filter palette images\"\n raise ValueError(msg)\n return image.filter(*self.filterargs)\n\n\nclass Kernel(BuiltinFilter):\n \"\"\"\n Create a convolution kernel. The current version only\n supports 3x3 and 5x5 integer and floating point kernels.\n\n In the current version, kernels can only be applied to\n \"L\" and \"RGB\" images.\n\n :param size: Kernel size, given as (width, height). In the current\n version, this must be (3,3) or (5,5).\n :param kernel: A sequence containing kernel weights.\n :param scale: Scale factor. If given, the result for each pixel is\n divided by this value. The default is the sum of the\n kernel weights.\n :param offset: Offset. If given, this value is added to the result,\n after it has been divided by the scale factor.\n \"\"\"\n\n name = \"Kernel\"\n\n def __init__(self, size, kernel, scale=None, offset=0):\n if scale is None:\n # default scale is sum of kernel\n scale = functools.reduce(lambda a, b: a + b, kernel)\n if size[0] * size[1] != len(kernel):\n msg = \"not enough coefficients in kernel\"\n raise ValueError(msg)\n self.filterargs = size, scale, offset, kernel\n\n\nclass RankFilter(Filter):\n \"\"\"\n Create a rank filter. The rank filter sorts all pixels in\n a window of the given size, and returns the ``rank``'th value.\n\n :param size: The kernel size, in pixels.\n :param rank: What pixel value to pick. Use 0 for a min filter,\n ``size * size / 2`` for a median filter, ``size * size - 1``\n for a max filter, etc.\n \"\"\"\n\n name = \"Rank\"\n\n def __init__(self, size, rank):\n self.size = size\n self.rank = rank\n\n def filter(self, image):\n if image.mode == \"P\":\n msg = \"cannot filter palette images\"\n raise ValueError(msg)\n image = image.expand(self.size // 2, self.size // 2)\n return image.rankfilter(self.size, self.rank)\n\n\nclass MedianFilter(RankFilter):\n \"\"\"\n Create a median filter. Picks the median pixel value in a window with the\n given size.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Median\"\n\n def __init__(self, size=3):\n self.size = size\n self.rank = size * size // 2\n\n\nclass MinFilter(RankFilter):\n \"\"\"\n Create a min filter. Picks the lowest pixel value in a window with the\n given size.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Min\"\n\n def __init__(self, size=3):\n self.size = size\n self.rank = 0\n\n\nclass MaxFilter(RankFilter):\n \"\"\"\n Create a max filter. Picks the largest pixel value in a window with the\n given size.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Max\"\n\n def __init__(self, size=3):\n self.size = size\n self.rank = size * size - 1\n\n\nclass ModeFilter(Filter):\n \"\"\"\n Create a mode filter. Picks the most frequent pixel value in a box with the\n given size. Pixel values that occur only once or twice are ignored; if no\n pixel value occurs more than twice, the original pixel value is preserved.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Mode\"\n\n def __init__(self, size=3):\n self.size = size\n\n def filter(self, image):\n return image.modefilter(self.size)\n\n\nclass GaussianBlur(MultibandFilter):\n \"\"\"Blurs the image with a sequence of extended box filters, which\n approximates a Gaussian kernel. For details on accuracy see\n <https://www.mia.uni-saarland.de/Publications/gwosdek-ssvm11.pdf>\n\n :param radius: Standard deviation of the Gaussian kernel.\n \"\"\"\n\n name = \"GaussianBlur\"\n\n def __init__(self, radius=2):\n self.radius = radius\n\n def filter(self, image):\n return image.gaussian_blur(self.radius)\n\n\nclass BoxBlur(MultibandFilter):\n \"\"\"Blurs the image by setting each pixel to the average value of the pixels\n in a square box extending radius pixels in each direction.\n Supports float radius of arbitrary size. Uses an optimized implementation\n which runs in linear time relative to the size of the image\n for any radius value.\n\n :param radius: Size of the box in one direction. Radius 0 does not blur,\n returns an identical image. Radius 1 takes 1 pixel\n in each direction, i.e. 9 pixels in total.\n \"\"\"\n\n name = \"BoxBlur\"\n\n def __init__(self, radius):\n self.radius = radius\n\n def filter(self, image):\n return image.box_blur(self.radius)\n\n\nclass UnsharpMask(MultibandFilter):\n \"\"\"Unsharp mask filter.\n\n See Wikipedia's entry on `digital unsharp masking`_ for an explanation of\n the parameters.\n\n :param radius: Blur Radius\n :param percent: Unsharp strength, in percent\n :param threshold: Threshold controls the minimum brightness change that\n will be sharpened\n\n .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking\n\n \"\"\" # noqa: E501\n\n name = \"UnsharpMask\"\n\n def __init__(self, radius=2, percent=150, threshold=3):\n self.radius = radius\n self.percent = percent\n self.threshold = threshold\n\n def filter(self, image):\n return image.unsharp_mask(self.radius, self.percent, self.threshold)\n\n\nclass BLUR(BuiltinFilter):\n name = \"Blur\"\n # fmt: off\n filterargs = (5, 5), 16, 0, (\n 1, 1, 1, 1, 1,\n 1, 0, 0, 0, 1,\n 1, 0, 0, 0, 1,\n 1, 0, 0, 0, 1,\n 1, 1, 1, 1, 1,\n )\n # fmt: on\n\n\nclass CONTOUR(BuiltinFilter):\n name = \"Contour\"\n # fmt: off\n filterargs = (3, 3), 1, 255, (\n -1, -1, -1,\n -1, 8, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass DETAIL(BuiltinFilter):\n name = \"Detail\"\n # fmt: off\n filterargs = (3, 3), 6, 0, (\n 0, -1, 0,\n -1, 10, -1,\n 0, -1, 0,\n )\n # fmt: on\n\n\nclass EDGE_ENHANCE(BuiltinFilter):\n name = \"Edge-enhance\"\n # fmt: off\n filterargs = (3, 3), 2, 0, (\n -1, -1, -1,\n -1, 10, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass EDGE_ENHANCE_MORE(BuiltinFilter):\n name = \"Edge-enhance More\"\n # fmt: off\n filterargs = (3, 3), 1, 0, (\n -1, -1, -1,\n -1, 9, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass EMBOSS(BuiltinFilter):\n name = \"Emboss\"\n # fmt: off\n filterargs = (3, 3), 1, 128, (\n -1, 0, 0,\n 0, 1, 0,\n 0, 0, 0,\n )\n # fmt: on\n\n\nclass FIND_EDGES(BuiltinFilter):\n name = \"Find Edges\"\n # fmt: off\n filterargs = (3, 3), 1, 0, (\n -1, -1, -1,\n -1, 8, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass SHARPEN(BuiltinFilter):\n name = \"Sharpen\"\n # fmt: off\n filterargs = (3, 3), 16, 0, (\n -2, -2, -2,\n -2, 32, -2,\n -2, -2, -2,\n )\n # fmt: on\n\n\nclass SMOOTH(BuiltinFilter):\n name = \"Smooth\"\n # fmt: off\n filterargs = (3, 3), 13, 0, (\n 1, 1, 1,\n 1, 5, 1,\n 1, 1, 1,\n )\n # fmt: on\n\n\nclass SMOOTH_MORE(BuiltinFilter):\n name = \"Smooth More\"\n # fmt: off\n filterargs = (5, 5), 100, 0, (\n 1, 1, 1, 1, 1,\n 1, 5, 5, 5, 1,\n 1, 5, 44, 5, 1,\n 1, 5, 5, 5, 1,\n 1, 1, 1, 1, 1,\n )\n # fmt: on\n\n\nclass Color3DLUT(MultibandFilter):\n \"\"\"Three-dimensional color lookup table.\n\n Transforms 3-channel pixels using the values of the channels as coordinates\n in the 3D lookup table and interpolating the nearest elements.\n\n This method allows you to apply almost any color transformation\n in constant time by using pre-calculated decimated tables.\n\n .. versionadded:: 5.2.0\n\n :param size: Size of the table. One int or tuple of (int, int, int).\n Minimal size in any dimension is 2, maximum is 65.\n :param table: Flat lookup table. A list of ``channels * size**3``\n float elements or a list of ``size**3`` channels-sized\n tuples with floats. Channels are changed first,\n then first dimension, then second, then third.\n Value 0.0 corresponds lowest value of output, 1.0 highest.\n :param channels: Number of channels in the table. Could be 3 or 4.\n Default is 3.\n :param target_mode: A mode for the result image. Should have not less\n than ``channels`` channels. Default is ``None``,\n which means that mode wouldn't be changed.\n \"\"\"\n\n name = \"Color 3D LUT\"\n\n def __init__(self, size, table, channels=3, target_mode=None, **kwargs):\n if channels not in (3, 4):\n msg = \"Only 3 or 4 output channels are supported\"\n raise ValueError(msg)\n self.size = size = self._check_size(size)\n self.channels = channels\n self.mode = target_mode\n\n # Hidden flag `_copy_table=False` could be used to avoid extra copying\n # of the table if the table is specially made for the constructor.\n copy_table = kwargs.get(\"_copy_table\", True)\n items = size[0] * size[1] * size[2]\n wrong_size = False\n\n numpy = None\n if hasattr(table, \"shape\"):\n try:\n import numpy\n except ImportError: # pragma: no cover\n pass\n\n if numpy and isinstance(table, numpy.ndarray):\n if copy_table:\n table = table.copy()\n\n if table.shape in [\n (items * channels,),\n (items, channels),\n (size[2], size[1], size[0], channels),\n ]:\n table = table.reshape(items * channels)\n else:\n wrong_size = True\n\n else:\n if copy_table:\n table = list(table)\n\n # Convert to a flat list\n if table and isinstance(table[0], (list, tuple)):\n table, raw_table = [], table\n for pixel in raw_table:\n if len(pixel) != channels:\n msg = (\n \"The elements of the table should \"\n f\"have a length of {channels}.\"\n )\n raise ValueError(msg)\n table.extend(pixel)\n\n if wrong_size or len(table) != items * channels:\n msg = (\n \"The table should have either channels * size**3 float items \"\n \"or size**3 items of channels-sized tuples with floats. \"\n f\"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. \"\n f\"Actual length: {len(table)}\"\n )\n raise ValueError(msg)\n self.table = table\n\n @staticmethod\n def _check_size(size):\n try:\n _, _, _ = size\n except ValueError as e:\n msg = \"Size should be either an integer or a tuple of three integers.\"\n raise ValueError(msg) from e\n except TypeError:\n size = (size, size, size)\n size = [int(x) for x in size]\n for size_1d in size:\n if not 2 <= size_1d <= 65:\n msg = \"Size should be in [2, 65] range.\"\n raise ValueError(msg)\n return size\n\n @classmethod\n def generate(cls, size, callback, channels=3, target_mode=None):\n \"\"\"Generates new LUT using provided callback.\n\n :param size: Size of the table. Passed to the constructor.\n :param callback: Function with three parameters which correspond\n three color channels. Will be called ``size**3``\n times with values from 0.0 to 1.0 and should return\n a tuple with ``channels`` elements.\n :param channels: The number of channels which should return callback.\n :param target_mode: Passed to the constructor of the resulting\n lookup table.\n \"\"\"\n size_1d, size_2d, size_3d = cls._check_size(size)\n if channels not in (3, 4):\n msg = \"Only 3 or 4 output channels are supported\"\n raise ValueError(msg)\n\n table = [0] * (size_1d * size_2d * size_3d * channels)\n idx_out = 0\n for b in range(size_3d):\n for g in range(size_2d):\n for r in range(size_1d):\n table[idx_out : idx_out + channels] = callback(\n r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1)\n )\n idx_out += channels\n\n return cls(\n (size_1d, size_2d, size_3d),\n table,\n channels=channels,\n target_mode=target_mode,\n _copy_table=False,\n )\n\n def transform(self, callback, with_normals=False, channels=None, target_mode=None):\n \"\"\"Transforms the table values using provided callback and returns\n a new LUT with altered values.\n\n :param callback: A function which takes old lookup table values\n and returns a new set of values. The number\n of arguments which function should take is\n ``self.channels`` or ``3 + self.channels``\n if ``with_normals`` flag is set.\n Should return a tuple of ``self.channels`` or\n ``channels`` elements if it is set.\n :param with_normals: If true, ``callback`` will be called with\n coordinates in the color cube as the first\n three arguments. Otherwise, ``callback``\n will be called only with actual color values.\n :param channels: The number of channels in the resulting lookup table.\n :param target_mode: Passed to the constructor of the resulting\n lookup table.\n \"\"\"\n if channels not in (None, 3, 4):\n msg = \"Only 3 or 4 output channels are supported\"\n raise ValueError(msg)\n ch_in = self.channels\n ch_out = channels or ch_in\n size_1d, size_2d, size_3d = self.size\n\n table = [0] * (size_1d * size_2d * size_3d * ch_out)\n idx_in = 0\n idx_out = 0\n for b in range(size_3d):\n for g in range(size_2d):\n for r in range(size_1d):\n values = self.table[idx_in : idx_in + ch_in]\n if with_normals:\n values = callback(\n r / (size_1d - 1),\n g / (size_2d - 1),\n b / (size_3d - 1),\n *values,\n )\n else:\n values = callback(*values)\n table[idx_out : idx_out + ch_out] = values\n idx_in += ch_in\n idx_out += ch_out\n\n return type(self)(\n self.size,\n table,\n channels=ch_out,\n target_mode=target_mode or self.mode,\n _copy_table=False,\n )\n\n def __repr__(self):\n r = [\n f\"{self.__class__.__name__} from {self.table.__class__.__name__}\",\n \"size={:d}x{:d}x{:d}\".format(*self.size),\n f\"channels={self.channels:d}\",\n ]\n if self.mode:\n r.append(f\"target_mode={self.mode}\")\n return \"<{}>\".format(\" \".join(r))\n\n def filter(self, image):\n from . import Image\n\n return image.color_lut_3d(\n self.mode or image.mode,\n Image.Resampling.BILINEAR,\n self.channels,\n self.size[0],\n self.size[1],\n self.size[2],\n self.table,\n )\n", "path": "src/PIL/ImageFilter.py" } ]
[ { "content": "#\n# The Python Imaging Library.\n# $Id$\n#\n# standard filters\n#\n# History:\n# 1995-11-27 fl Created\n# 2002-06-08 fl Added rank and mode filters\n# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call\n#\n# Copyright (c) 1997-2003 by Secret Labs AB.\n# Copyright (c) 1995-2002 by Fredrik Lundh.\n#\n# See the README file for information on usage and redistribution.\n#\nimport functools\n\n\nclass Filter:\n pass\n\n\nclass MultibandFilter(Filter):\n pass\n\n\nclass BuiltinFilter(MultibandFilter):\n def filter(self, image):\n if image.mode == \"P\":\n msg = \"cannot filter palette images\"\n raise ValueError(msg)\n return image.filter(*self.filterargs)\n\n\nclass Kernel(BuiltinFilter):\n \"\"\"\n Create a convolution kernel. The current version only\n supports 3x3 and 5x5 integer and floating point kernels.\n\n In the current version, kernels can only be applied to\n \"L\" and \"RGB\" images.\n\n :param size: Kernel size, given as (width, height). In the current\n version, this must be (3,3) or (5,5).\n :param kernel: A sequence containing kernel weights.\n :param scale: Scale factor. If given, the result for each pixel is\n divided by this value. The default is the sum of the\n kernel weights.\n :param offset: Offset. If given, this value is added to the result,\n after it has been divided by the scale factor.\n \"\"\"\n\n name = \"Kernel\"\n\n def __init__(self, size, kernel, scale=None, offset=0):\n if scale is None:\n # default scale is sum of kernel\n scale = functools.reduce(lambda a, b: a + b, kernel)\n if size[0] * size[1] != len(kernel):\n msg = \"not enough coefficients in kernel\"\n raise ValueError(msg)\n self.filterargs = size, scale, offset, kernel\n\n\nclass RankFilter(Filter):\n \"\"\"\n Create a rank filter. The rank filter sorts all pixels in\n a window of the given size, and returns the ``rank``'th value.\n\n :param size: The kernel size, in pixels.\n :param rank: What pixel value to pick. Use 0 for a min filter,\n ``size * size / 2`` for a median filter, ``size * size - 1``\n for a max filter, etc.\n \"\"\"\n\n name = \"Rank\"\n\n def __init__(self, size, rank):\n self.size = size\n self.rank = rank\n\n def filter(self, image):\n if image.mode == \"P\":\n msg = \"cannot filter palette images\"\n raise ValueError(msg)\n image = image.expand(self.size // 2, self.size // 2)\n return image.rankfilter(self.size, self.rank)\n\n\nclass MedianFilter(RankFilter):\n \"\"\"\n Create a median filter. Picks the median pixel value in a window with the\n given size.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Median\"\n\n def __init__(self, size=3):\n self.size = size\n self.rank = size * size // 2\n\n\nclass MinFilter(RankFilter):\n \"\"\"\n Create a min filter. Picks the lowest pixel value in a window with the\n given size.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Min\"\n\n def __init__(self, size=3):\n self.size = size\n self.rank = 0\n\n\nclass MaxFilter(RankFilter):\n \"\"\"\n Create a max filter. Picks the largest pixel value in a window with the\n given size.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Max\"\n\n def __init__(self, size=3):\n self.size = size\n self.rank = size * size - 1\n\n\nclass ModeFilter(Filter):\n \"\"\"\n Create a mode filter. Picks the most frequent pixel value in a box with the\n given size. Pixel values that occur only once or twice are ignored; if no\n pixel value occurs more than twice, the original pixel value is preserved.\n\n :param size: The kernel size, in pixels.\n \"\"\"\n\n name = \"Mode\"\n\n def __init__(self, size=3):\n self.size = size\n\n def filter(self, image):\n return image.modefilter(self.size)\n\n\nclass GaussianBlur(MultibandFilter):\n \"\"\"Blurs the image with a sequence of extended box filters, which\n approximates a Gaussian kernel. For details on accuracy see\n <https://www.mia.uni-saarland.de/Publications/gwosdek-ssvm11.pdf>\n\n :param radius: Standard deviation of the Gaussian kernel.\n \"\"\"\n\n name = \"GaussianBlur\"\n\n def __init__(self, radius=2):\n self.radius = radius\n\n def filter(self, image):\n return image.gaussian_blur(self.radius)\n\n\nclass BoxBlur(MultibandFilter):\n \"\"\"Blurs the image by setting each pixel to the average value of the pixels\n in a square box extending radius pixels in each direction.\n Supports float radius of arbitrary size. Uses an optimized implementation\n which runs in linear time relative to the size of the image\n for any radius value.\n\n :param radius: Size of the box in one direction. Radius 0 does not blur,\n returns an identical image. Radius 1 takes 1 pixel\n in each direction, i.e. 9 pixels in total.\n \"\"\"\n\n name = \"BoxBlur\"\n\n def __init__(self, radius):\n if radius < 0:\n msg = \"radius must be >= 0\"\n raise ValueError(msg)\n self.radius = radius\n\n def filter(self, image):\n return image.box_blur(self.radius)\n\n\nclass UnsharpMask(MultibandFilter):\n \"\"\"Unsharp mask filter.\n\n See Wikipedia's entry on `digital unsharp masking`_ for an explanation of\n the parameters.\n\n :param radius: Blur Radius\n :param percent: Unsharp strength, in percent\n :param threshold: Threshold controls the minimum brightness change that\n will be sharpened\n\n .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking\n\n \"\"\" # noqa: E501\n\n name = \"UnsharpMask\"\n\n def __init__(self, radius=2, percent=150, threshold=3):\n self.radius = radius\n self.percent = percent\n self.threshold = threshold\n\n def filter(self, image):\n return image.unsharp_mask(self.radius, self.percent, self.threshold)\n\n\nclass BLUR(BuiltinFilter):\n name = \"Blur\"\n # fmt: off\n filterargs = (5, 5), 16, 0, (\n 1, 1, 1, 1, 1,\n 1, 0, 0, 0, 1,\n 1, 0, 0, 0, 1,\n 1, 0, 0, 0, 1,\n 1, 1, 1, 1, 1,\n )\n # fmt: on\n\n\nclass CONTOUR(BuiltinFilter):\n name = \"Contour\"\n # fmt: off\n filterargs = (3, 3), 1, 255, (\n -1, -1, -1,\n -1, 8, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass DETAIL(BuiltinFilter):\n name = \"Detail\"\n # fmt: off\n filterargs = (3, 3), 6, 0, (\n 0, -1, 0,\n -1, 10, -1,\n 0, -1, 0,\n )\n # fmt: on\n\n\nclass EDGE_ENHANCE(BuiltinFilter):\n name = \"Edge-enhance\"\n # fmt: off\n filterargs = (3, 3), 2, 0, (\n -1, -1, -1,\n -1, 10, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass EDGE_ENHANCE_MORE(BuiltinFilter):\n name = \"Edge-enhance More\"\n # fmt: off\n filterargs = (3, 3), 1, 0, (\n -1, -1, -1,\n -1, 9, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass EMBOSS(BuiltinFilter):\n name = \"Emboss\"\n # fmt: off\n filterargs = (3, 3), 1, 128, (\n -1, 0, 0,\n 0, 1, 0,\n 0, 0, 0,\n )\n # fmt: on\n\n\nclass FIND_EDGES(BuiltinFilter):\n name = \"Find Edges\"\n # fmt: off\n filterargs = (3, 3), 1, 0, (\n -1, -1, -1,\n -1, 8, -1,\n -1, -1, -1,\n )\n # fmt: on\n\n\nclass SHARPEN(BuiltinFilter):\n name = \"Sharpen\"\n # fmt: off\n filterargs = (3, 3), 16, 0, (\n -2, -2, -2,\n -2, 32, -2,\n -2, -2, -2,\n )\n # fmt: on\n\n\nclass SMOOTH(BuiltinFilter):\n name = \"Smooth\"\n # fmt: off\n filterargs = (3, 3), 13, 0, (\n 1, 1, 1,\n 1, 5, 1,\n 1, 1, 1,\n )\n # fmt: on\n\n\nclass SMOOTH_MORE(BuiltinFilter):\n name = \"Smooth More\"\n # fmt: off\n filterargs = (5, 5), 100, 0, (\n 1, 1, 1, 1, 1,\n 1, 5, 5, 5, 1,\n 1, 5, 44, 5, 1,\n 1, 5, 5, 5, 1,\n 1, 1, 1, 1, 1,\n )\n # fmt: on\n\n\nclass Color3DLUT(MultibandFilter):\n \"\"\"Three-dimensional color lookup table.\n\n Transforms 3-channel pixels using the values of the channels as coordinates\n in the 3D lookup table and interpolating the nearest elements.\n\n This method allows you to apply almost any color transformation\n in constant time by using pre-calculated decimated tables.\n\n .. versionadded:: 5.2.0\n\n :param size: Size of the table. One int or tuple of (int, int, int).\n Minimal size in any dimension is 2, maximum is 65.\n :param table: Flat lookup table. A list of ``channels * size**3``\n float elements or a list of ``size**3`` channels-sized\n tuples with floats. Channels are changed first,\n then first dimension, then second, then third.\n Value 0.0 corresponds lowest value of output, 1.0 highest.\n :param channels: Number of channels in the table. Could be 3 or 4.\n Default is 3.\n :param target_mode: A mode for the result image. Should have not less\n than ``channels`` channels. Default is ``None``,\n which means that mode wouldn't be changed.\n \"\"\"\n\n name = \"Color 3D LUT\"\n\n def __init__(self, size, table, channels=3, target_mode=None, **kwargs):\n if channels not in (3, 4):\n msg = \"Only 3 or 4 output channels are supported\"\n raise ValueError(msg)\n self.size = size = self._check_size(size)\n self.channels = channels\n self.mode = target_mode\n\n # Hidden flag `_copy_table=False` could be used to avoid extra copying\n # of the table if the table is specially made for the constructor.\n copy_table = kwargs.get(\"_copy_table\", True)\n items = size[0] * size[1] * size[2]\n wrong_size = False\n\n numpy = None\n if hasattr(table, \"shape\"):\n try:\n import numpy\n except ImportError: # pragma: no cover\n pass\n\n if numpy and isinstance(table, numpy.ndarray):\n if copy_table:\n table = table.copy()\n\n if table.shape in [\n (items * channels,),\n (items, channels),\n (size[2], size[1], size[0], channels),\n ]:\n table = table.reshape(items * channels)\n else:\n wrong_size = True\n\n else:\n if copy_table:\n table = list(table)\n\n # Convert to a flat list\n if table and isinstance(table[0], (list, tuple)):\n table, raw_table = [], table\n for pixel in raw_table:\n if len(pixel) != channels:\n msg = (\n \"The elements of the table should \"\n f\"have a length of {channels}.\"\n )\n raise ValueError(msg)\n table.extend(pixel)\n\n if wrong_size or len(table) != items * channels:\n msg = (\n \"The table should have either channels * size**3 float items \"\n \"or size**3 items of channels-sized tuples with floats. \"\n f\"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. \"\n f\"Actual length: {len(table)}\"\n )\n raise ValueError(msg)\n self.table = table\n\n @staticmethod\n def _check_size(size):\n try:\n _, _, _ = size\n except ValueError as e:\n msg = \"Size should be either an integer or a tuple of three integers.\"\n raise ValueError(msg) from e\n except TypeError:\n size = (size, size, size)\n size = [int(x) for x in size]\n for size_1d in size:\n if not 2 <= size_1d <= 65:\n msg = \"Size should be in [2, 65] range.\"\n raise ValueError(msg)\n return size\n\n @classmethod\n def generate(cls, size, callback, channels=3, target_mode=None):\n \"\"\"Generates new LUT using provided callback.\n\n :param size: Size of the table. Passed to the constructor.\n :param callback: Function with three parameters which correspond\n three color channels. Will be called ``size**3``\n times with values from 0.0 to 1.0 and should return\n a tuple with ``channels`` elements.\n :param channels: The number of channels which should return callback.\n :param target_mode: Passed to the constructor of the resulting\n lookup table.\n \"\"\"\n size_1d, size_2d, size_3d = cls._check_size(size)\n if channels not in (3, 4):\n msg = \"Only 3 or 4 output channels are supported\"\n raise ValueError(msg)\n\n table = [0] * (size_1d * size_2d * size_3d * channels)\n idx_out = 0\n for b in range(size_3d):\n for g in range(size_2d):\n for r in range(size_1d):\n table[idx_out : idx_out + channels] = callback(\n r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1)\n )\n idx_out += channels\n\n return cls(\n (size_1d, size_2d, size_3d),\n table,\n channels=channels,\n target_mode=target_mode,\n _copy_table=False,\n )\n\n def transform(self, callback, with_normals=False, channels=None, target_mode=None):\n \"\"\"Transforms the table values using provided callback and returns\n a new LUT with altered values.\n\n :param callback: A function which takes old lookup table values\n and returns a new set of values. The number\n of arguments which function should take is\n ``self.channels`` or ``3 + self.channels``\n if ``with_normals`` flag is set.\n Should return a tuple of ``self.channels`` or\n ``channels`` elements if it is set.\n :param with_normals: If true, ``callback`` will be called with\n coordinates in the color cube as the first\n three arguments. Otherwise, ``callback``\n will be called only with actual color values.\n :param channels: The number of channels in the resulting lookup table.\n :param target_mode: Passed to the constructor of the resulting\n lookup table.\n \"\"\"\n if channels not in (None, 3, 4):\n msg = \"Only 3 or 4 output channels are supported\"\n raise ValueError(msg)\n ch_in = self.channels\n ch_out = channels or ch_in\n size_1d, size_2d, size_3d = self.size\n\n table = [0] * (size_1d * size_2d * size_3d * ch_out)\n idx_in = 0\n idx_out = 0\n for b in range(size_3d):\n for g in range(size_2d):\n for r in range(size_1d):\n values = self.table[idx_in : idx_in + ch_in]\n if with_normals:\n values = callback(\n r / (size_1d - 1),\n g / (size_2d - 1),\n b / (size_3d - 1),\n *values,\n )\n else:\n values = callback(*values)\n table[idx_out : idx_out + ch_out] = values\n idx_in += ch_in\n idx_out += ch_out\n\n return type(self)(\n self.size,\n table,\n channels=ch_out,\n target_mode=target_mode or self.mode,\n _copy_table=False,\n )\n\n def __repr__(self):\n r = [\n f\"{self.__class__.__name__} from {self.table.__class__.__name__}\",\n \"size={:d}x{:d}x{:d}\".format(*self.size),\n f\"channels={self.channels:d}\",\n ]\n if self.mode:\n r.append(f\"target_mode={self.mode}\")\n return \"<{}>\".format(\" \".join(r))\n\n def filter(self, image):\n from . import Image\n\n return image.color_lut_3d(\n self.mode or image.mode,\n Image.Resampling.BILINEAR,\n self.channels,\n self.size[0],\n self.size[1],\n self.size[2],\n self.table,\n )\n", "path": "src/PIL/ImageFilter.py" } ]
diff --git a/Tests/test_image_filter.py b/Tests/test_image_filter.py index cfe46b65898..a2ef2280b72 100644 --- a/Tests/test_image_filter.py +++ b/Tests/test_image_filter.py @@ -24,6 +24,7 @@ ImageFilter.ModeFilter, ImageFilter.GaussianBlur, ImageFilter.GaussianBlur(5), + ImageFilter.BoxBlur(0), ImageFilter.BoxBlur(5), ImageFilter.UnsharpMask, ImageFilter.UnsharpMask(10), @@ -173,3 +174,14 @@ def test_consistency_5x5(mode): Image.merge(mode, source[: len(mode)]).filter(kernel), Image.merge(mode, reference[: len(mode)]), ) + + +def test_invalid_box_blur_filter(): + with pytest.raises(ValueError): + ImageFilter.BoxBlur(-2) + + im = hopper() + box_blur_filter = ImageFilter.BoxBlur(2) + box_blur_filter.radius = -2 + with pytest.raises(ValueError): + im.filter(box_blur_filter) diff --git a/src/PIL/ImageFilter.py b/src/PIL/ImageFilter.py index 59e2c18b9ac..63d6dcf5cec 100644 --- a/src/PIL/ImageFilter.py +++ b/src/PIL/ImageFilter.py @@ -183,6 +183,9 @@ class BoxBlur(MultibandFilter): name = "BoxBlur" def __init__(self, radius): + if radius < 0: + msg = "radius must be >= 0" + raise ValueError(msg) self.radius = radius def filter(self, image): diff --git a/src/libImaging/BoxBlur.c b/src/libImaging/BoxBlur.c index 2e45a33587c..5afe7cf5043 100644 --- a/src/libImaging/BoxBlur.c +++ b/src/libImaging/BoxBlur.c @@ -237,6 +237,9 @@ ImagingBoxBlur(Imaging imOut, Imaging imIn, float radius, int n) { if (n < 1) { return ImagingError_ValueError("number of passes must be greater than zero"); } + if (radius < 0) { + return ImagingError_ValueError("radius must be >= 0"); + } if (strcmp(imIn->mode, imOut->mode) || imIn->type != imOut->type || imIn->bands != imOut->bands || imIn->xsize != imOut->xsize ||
encode__httpx-1799
Update h2 pin? ### Discussed in https://github.com/encode/httpx/discussions/1485 <div type='discussions-op-text'> <sup>Originally posted by **HarrySky** February 24, 2021</sup> Hi, some time ago `h2` pin was updated in `httpcore`: https://github.com/encode/httpcore/pull/208 But it is still pinned to `3.*` in this package's `http2` extra: https://github.com/encode/httpx/blob/0f280af8b170ed5cc48c12a894f71a8b5762f748/setup.py#L65 This is not an issue, as I can just add `h2>=3,<5` to my setup.py instead of using `httpx[http2]`, but maybe you want dependencies to be in sync with `httpcore`. EDIT: Using git blame we can see that before `http2` extra - `httpcore[http2]` was used instead of `h2` dependency directly.</div>
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n version = Path(package, \"__version__.py\").read_text()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", version).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n long_description = \"\"\n with open(\"README.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n long_description += \"\\n\\n\"\n with open(\"CHANGELOG.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n return long_description\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [str(path.parent) for path in Path(package).glob(\"**/__init__.py\")]\n\n\nsetup(\n name=\"httpx\",\n python_requires=\">=3.6\",\n version=get_version(\"httpx\"),\n url=\"https://github.com/encode/httpx\",\n project_urls={\n \"Changelog\": \"https://github.com/encode/httpx/blob/master/CHANGELOG.md\",\n \"Documentation\": \"https://www.python-httpx.org\",\n \"Source\": \"https://github.com/encode/httpx\",\n },\n license=\"BSD\",\n description=\"The next generation HTTP client.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n package_data={\"httpx\": [\"py.typed\"]},\n packages=get_packages(\"httpx\"),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n \"certifi\",\n \"sniffio\",\n \"rfc3986[idna2008]>=1.3,<2\",\n \"httpcore>=0.13.3,<0.14.0\",\n \"async_generator; python_version < '3.7'\"\n ],\n extras_require={\n \"http2\": \"h2==3.*\",\n \"brotli\": \"brotlicffi==1.*\",\n },\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Framework :: AsyncIO\",\n \"Framework :: Trio\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3 :: Only\",\n ],\n)\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n version = Path(package, \"__version__.py\").read_text()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", version).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n long_description = \"\"\n with open(\"README.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n long_description += \"\\n\\n\"\n with open(\"CHANGELOG.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n return long_description\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [str(path.parent) for path in Path(package).glob(\"**/__init__.py\")]\n\n\nsetup(\n name=\"httpx\",\n python_requires=\">=3.6\",\n version=get_version(\"httpx\"),\n url=\"https://github.com/encode/httpx\",\n project_urls={\n \"Changelog\": \"https://github.com/encode/httpx/blob/master/CHANGELOG.md\",\n \"Documentation\": \"https://www.python-httpx.org\",\n \"Source\": \"https://github.com/encode/httpx\",\n },\n license=\"BSD\",\n description=\"The next generation HTTP client.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n package_data={\"httpx\": [\"py.typed\"]},\n packages=get_packages(\"httpx\"),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n \"certifi\",\n \"sniffio\",\n \"rfc3986[idna2008]>=1.3,<2\",\n \"httpcore>=0.13.3,<0.14.0\",\n \"async_generator; python_version < '3.7'\"\n ],\n extras_require={\n \"http2\": \"h2>=3,<5\",\n \"brotli\": \"brotlicffi==1.*\",\n },\n classifiers=[\n \"Development Status :: 4 - Beta\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Framework :: AsyncIO\",\n \"Framework :: Trio\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3 :: Only\",\n ],\n)\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 212aedf865..8854039e21 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ def get_packages(package): "async_generator; python_version < '3.7'" ], extras_require={ - "http2": "h2==3.*", + "http2": "h2>=3,<5", "brotli": "brotlicffi==1.*", }, classifiers=[
scikit-image__scikit-image-5206
Small typo in utils.py ## Description Small typo in the docs The class docs have the argument name as `arg_mapping` https://github.com/scikit-image/scikit-image/blob/87a8806cca7fb5366b6e5ddbe5e46364b44f90fe/skimage/_shared/utils.py#L119 However, the actual `__init__` method takes the argument with the name `kwarg_mapping` https://github.com/scikit-image/scikit-image/blob/87a8806cca7fb5366b6e5ddbe5e46364b44f90fe/skimage/_shared/utils.py#L131
[ { "content": "import inspect\nimport functools\nimport numbers\nimport sys\nimport warnings\n\nimport numpy as np\nfrom numpy.lib import NumpyVersion\nimport scipy\n\nfrom ..util import img_as_float\nfrom ._warnings import all_warnings, warn\n\n__all__ = ['deprecated', 'get_bound_method_class', 'all_warnings',\n 'safe_as_int', 'check_nD', 'check_shape_equality', 'warn']\n\n\nclass skimage_deprecation(Warning):\n \"\"\"Create our own deprecation class, since Python >= 2.7\n silences deprecations by default.\n\n \"\"\"\n pass\n\n\nclass change_default_value:\n \"\"\"Decorator for changing the default value of an argument.\n\n Parameters\n ----------\n arg_name: str\n The name of the argument to be updated.\n new_value: any\n The argument new value.\n changed_version : str\n The package version in which the change will be introduced.\n warning_msg: str\n Optional warning message. If None, a generic warning message\n is used.\n\n \"\"\"\n\n def __init__(self, arg_name, *, new_value, changed_version,\n warning_msg=None):\n self.arg_name = arg_name\n self.new_value = new_value\n self.warning_msg = warning_msg\n self.changed_version = changed_version\n\n def __call__(self, func):\n parameters = inspect.signature(func).parameters\n arg_idx = list(parameters.keys()).index(self.arg_name)\n old_value = parameters[self.arg_name].default\n\n if self.warning_msg is None:\n self.warning_msg = (\n f\"The new recommended value for {self.arg_name} is \"\n f\"{self.new_value}. Until version {self.changed_version}, \"\n f\"the default {self.arg_name} value is {old_value}. \"\n f\"From version {self.changed_version}, the {self.arg_name} \"\n f\"default value will be {self.new_value}. To avoid \"\n f\"this warning, please explicitly set {self.arg_name} value.\")\n\n @functools.wraps(func)\n def fixed_func(*args, **kwargs):\n if len(args) < arg_idx + 1 and self.arg_name not in kwargs.keys():\n # warn that arg_name default value changed:\n warnings.warn(self.warning_msg, FutureWarning, stacklevel=2)\n return func(*args, **kwargs)\n\n return fixed_func\n\n\nclass remove_arg:\n \"\"\"Decorator to remove an argument from function's signature.\n\n Parameters\n ----------\n arg_name: str\n The name of the argument to be removed.\n changed_version : str\n The package version in which the warning will be replaced by\n an error.\n help_msg: str\n Optional message appended to the generic warning message.\n\n \"\"\"\n\n def __init__(self, arg_name, *, changed_version, help_msg=None):\n self.arg_name = arg_name\n self.help_msg = help_msg\n self.changed_version = changed_version\n\n def __call__(self, func):\n parameters = inspect.signature(func).parameters\n arg_idx = list(parameters.keys()).index(self.arg_name)\n warning_msg = (\n f\"{self.arg_name} argument is deprecated and will be removed \"\n f\"in version {self.changed_version}. To avoid this warning, \"\n f\"please do not use the {self.arg_name} argument. Please \"\n f\"see {func.__name__} documentation for more details.\")\n\n if self.help_msg is not None:\n warning_msg += f\" {self.help_msg}\"\n\n @functools.wraps(func)\n def fixed_func(*args, **kwargs):\n if len(args) > arg_idx or self.arg_name in kwargs.keys():\n # warn that arg_name is deprecated\n warnings.warn(warning_msg, FutureWarning, stacklevel=2)\n return func(*args, **kwargs)\n\n return fixed_func\n\n\nclass deprecate_kwarg:\n \"\"\"Decorator ensuring backward compatibility when argument names are\n modified in a function definition.\n\n Parameters\n ----------\n arg_mapping: dict\n Mapping between the function's old argument names and the new\n ones.\n warning_msg: str\n Optional warning message. If None, a generic warning message\n is used.\n removed_version : str\n The package version in which the deprecated argument will be\n removed.\n\n \"\"\"\n\n def __init__(self, kwarg_mapping, warning_msg=None, removed_version=None):\n self.kwarg_mapping = kwarg_mapping\n if warning_msg is None:\n self.warning_msg = (\"'{old_arg}' is a deprecated argument name \"\n \"for `{func_name}`. \")\n if removed_version is not None:\n self.warning_msg += (\"It will be removed in version {}. \"\n .format(removed_version))\n self.warning_msg += \"Please use '{new_arg}' instead.\"\n else:\n self.warning_msg = warning_msg\n\n def __call__(self, func):\n @functools.wraps(func)\n def fixed_func(*args, **kwargs):\n for old_arg, new_arg in self.kwarg_mapping.items():\n if old_arg in kwargs:\n # warn that the function interface has changed:\n warnings.warn(self.warning_msg.format(\n old_arg=old_arg, func_name=func.__name__,\n new_arg=new_arg), FutureWarning, stacklevel=2)\n # Substitute new_arg to old_arg\n kwargs[new_arg] = kwargs.pop(old_arg)\n\n # Call the function with the fixed arguments\n return func(*args, **kwargs)\n return fixed_func\n\n\nclass deprecated(object):\n \"\"\"Decorator to mark deprecated functions with warning.\n\n Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.\n\n Parameters\n ----------\n alt_func : str\n If given, tell user what function to use instead.\n behavior : {'warn', 'raise'}\n Behavior during call to deprecated function: 'warn' = warn user that\n function is deprecated; 'raise' = raise error.\n removed_version : str\n The package version in which the deprecated function will be removed.\n \"\"\"\n\n def __init__(self, alt_func=None, behavior='warn', removed_version=None):\n self.alt_func = alt_func\n self.behavior = behavior\n self.removed_version = removed_version\n\n def __call__(self, func):\n\n alt_msg = ''\n if self.alt_func is not None:\n alt_msg = ' Use ``%s`` instead.' % self.alt_func\n rmv_msg = ''\n if self.removed_version is not None:\n rmv_msg = (' and will be removed in version %s' %\n self.removed_version)\n\n msg = ('Function ``%s`` is deprecated' % func.__name__ +\n rmv_msg + '.' + alt_msg)\n\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n if self.behavior == 'warn':\n func_code = func.__code__\n warnings.simplefilter('always', skimage_deprecation)\n warnings.warn_explicit(msg,\n category=skimage_deprecation,\n filename=func_code.co_filename,\n lineno=func_code.co_firstlineno + 1)\n elif self.behavior == 'raise':\n raise skimage_deprecation(msg)\n return func(*args, **kwargs)\n\n # modify doc string to display deprecation warning\n doc = '**Deprecated function**.' + alt_msg\n if wrapped.__doc__ is None:\n wrapped.__doc__ = doc\n else:\n wrapped.__doc__ = doc + '\\n\\n ' + wrapped.__doc__\n\n return wrapped\n\n\ndef get_bound_method_class(m):\n \"\"\"Return the class for a bound method.\n\n \"\"\"\n return m.im_class if sys.version < '3' else m.__self__.__class__\n\n\ndef safe_as_int(val, atol=1e-3):\n \"\"\"\n Attempt to safely cast values to integer format.\n\n Parameters\n ----------\n val : scalar or iterable of scalars\n Number or container of numbers which are intended to be interpreted as\n integers, e.g., for indexing purposes, but which may not carry integer\n type.\n atol : float\n Absolute tolerance away from nearest integer to consider values in\n ``val`` functionally integers.\n\n Returns\n -------\n val_int : NumPy scalar or ndarray of dtype `np.int64`\n Returns the input value(s) coerced to dtype `np.int64` assuming all\n were within ``atol`` of the nearest integer.\n\n Notes\n -----\n This operation calculates ``val`` modulo 1, which returns the mantissa of\n all values. Then all mantissas greater than 0.5 are subtracted from one.\n Finally, the absolute tolerance from zero is calculated. If it is less\n than ``atol`` for all value(s) in ``val``, they are rounded and returned\n in an integer array. Or, if ``val`` was a scalar, a NumPy scalar type is\n returned.\n\n If any value(s) are outside the specified tolerance, an informative error\n is raised.\n\n Examples\n --------\n >>> safe_as_int(7.0)\n 7\n\n >>> safe_as_int([9, 4, 2.9999999999])\n array([9, 4, 3])\n\n >>> safe_as_int(53.1)\n Traceback (most recent call last):\n ...\n ValueError: Integer argument required but received 53.1, check inputs.\n\n >>> safe_as_int(53.01, atol=0.01)\n 53\n\n \"\"\"\n mod = np.asarray(val) % 1 # Extract mantissa\n\n # Check for and subtract any mod values > 0.5 from 1\n if mod.ndim == 0: # Scalar input, cannot be indexed\n if mod > 0.5:\n mod = 1 - mod\n else: # Iterable input, now ndarray\n mod[mod > 0.5] = 1 - mod[mod > 0.5] # Test on each side of nearest int\n\n try:\n np.testing.assert_allclose(mod, 0, atol=atol)\n except AssertionError:\n raise ValueError(\"Integer argument required but received \"\n \"{0}, check inputs.\".format(val))\n\n return np.round(val).astype(np.int64)\n\n\ndef check_shape_equality(im1, im2):\n \"\"\"Raise an error if the shape do not match.\"\"\"\n if not im1.shape == im2.shape:\n raise ValueError('Input images must have the same dimensions.')\n return\n\n\ndef check_nD(array, ndim, arg_name='image'):\n \"\"\"\n Verify an array meets the desired ndims and array isn't empty.\n\n Parameters\n ----------\n array : array-like\n Input array to be validated\n ndim : int or iterable of ints\n Allowable ndim or ndims for the array.\n arg_name : str, optional\n The name of the array in the original function.\n\n \"\"\"\n array = np.asanyarray(array)\n msg_incorrect_dim = \"The parameter `%s` must be a %s-dimensional array\"\n msg_empty_array = \"The parameter `%s` cannot be an empty array\"\n if isinstance(ndim, int):\n ndim = [ndim]\n if array.size == 0:\n raise ValueError(msg_empty_array % (arg_name))\n if not array.ndim in ndim:\n raise ValueError(msg_incorrect_dim % (arg_name, '-or-'.join([str(n) for n in ndim])))\n\n\ndef check_random_state(seed):\n \"\"\"Turn seed into a `np.random.RandomState` instance.\n\n Parameters\n ----------\n seed : None, int or np.random.RandomState\n If `seed` is None, return the RandomState singleton used by `np.random`.\n If `seed` is an int, return a new RandomState instance seeded with `seed`.\n If `seed` is already a RandomState instance, return it.\n\n Raises\n ------\n ValueError\n If `seed` is of the wrong type.\n\n \"\"\"\n # Function originally from scikit-learn's module sklearn.utils.validation\n if seed is None or seed is np.random:\n return np.random.mtrand._rand\n if isinstance(seed, (numbers.Integral, np.integer)):\n return np.random.RandomState(seed)\n if isinstance(seed, np.random.RandomState):\n return seed\n raise ValueError('%r cannot be used to seed a numpy.random.RandomState'\n ' instance' % seed)\n\n\ndef convert_to_float(image, preserve_range):\n \"\"\"Convert input image to float image with the appropriate range.\n\n Parameters\n ----------\n image : ndarray\n Input image.\n preserve_range : bool\n Determines if the range of the image should be kept or transformed\n using img_as_float. Also see\n https://scikit-image.org/docs/dev/user_guide/data_types.html\n\n Notes:\n ------\n * Input images with `float32` data type are not upcast.\n\n Returns\n -------\n image : ndarray\n Transformed version of the input.\n\n \"\"\"\n if preserve_range:\n # Convert image to double only if it is not single or double\n # precision float\n if image.dtype.char not in 'df':\n image = image.astype(float)\n else:\n image = img_as_float(image)\n return image\n\n\ndef _validate_interpolation_order(image_dtype, order):\n \"\"\"Validate and return spline interpolation's order.\n\n Parameters\n ----------\n image_dtype : dtype\n Image dtype.\n order : int, optional\n The order of the spline interpolation. The order has to be in\n the range 0-5. See `skimage.transform.warp` for detail.\n\n Returns\n -------\n order : int\n if input order is None, returns 0 if image_dtype is bool and 1\n otherwise. Otherwise, image_dtype is checked and input order\n is validated accordingly (order > 0 is not supported for bool\n image dtype)\n\n \"\"\"\n\n if order is None:\n return 0 if image_dtype == bool else 1\n\n if order < 0 or order > 5:\n raise ValueError(\"Spline interpolation order has to be in the \"\n \"range 0-5.\")\n\n if image_dtype == bool and order != 0:\n warn(\"Input image dtype is bool. Interpolation is not defined \"\n \"with bool data type. Please set order to 0 or explicitely \"\n \"cast input image to another data type. Starting from version \"\n \"0.19 a ValueError will be raised instead of this warning.\",\n FutureWarning, stacklevel=2)\n\n return order\n\n\ndef _to_np_mode(mode):\n \"\"\"Convert padding modes from `ndi.correlate` to `np.pad`.\"\"\"\n mode_translation_dict = dict(nearest='edge', reflect='symmetric',\n mirror='reflect')\n if mode in mode_translation_dict:\n mode = mode_translation_dict[mode]\n return mode\n\n\ndef _to_ndimage_mode(mode):\n \"\"\"Convert from `numpy.pad` mode name to the corresponding ndimage mode.\"\"\"\n mode_translation_dict = dict(constant='constant', edge='nearest',\n symmetric='reflect', reflect='mirror',\n wrap='wrap')\n if mode not in mode_translation_dict:\n raise ValueError(\n (\"Unknown mode: '{}', or cannot translate mode. The \"\n \"mode should be one of 'constant', 'edge', 'symmetric', \"\n \"'reflect', or 'wrap'. See the documentation of numpy.pad for\"\n \"more info.\").format(mode))\n return _fix_ndimage_mode(mode_translation_dict[mode])\n\n\ndef _fix_ndimage_mode(mode):\n # SciPy 1.6.0 introduced grid variants of constant and wrap which\n # have less surprising behavior for images. Use these when available\n grid_modes = {'constant': 'grid-constant', 'wrap': 'grid-wrap'}\n if NumpyVersion(scipy.__version__) >= '1.6.0':\n mode = grid_modes.get(mode, mode)\n return mode\n", "path": "skimage/_shared/utils.py" } ]
[ { "content": "import inspect\nimport functools\nimport numbers\nimport sys\nimport warnings\n\nimport numpy as np\nfrom numpy.lib import NumpyVersion\nimport scipy\n\nfrom ..util import img_as_float\nfrom ._warnings import all_warnings, warn\n\n__all__ = ['deprecated', 'get_bound_method_class', 'all_warnings',\n 'safe_as_int', 'check_nD', 'check_shape_equality', 'warn']\n\n\nclass skimage_deprecation(Warning):\n \"\"\"Create our own deprecation class, since Python >= 2.7\n silences deprecations by default.\n\n \"\"\"\n pass\n\n\nclass change_default_value:\n \"\"\"Decorator for changing the default value of an argument.\n\n Parameters\n ----------\n arg_name: str\n The name of the argument to be updated.\n new_value: any\n The argument new value.\n changed_version : str\n The package version in which the change will be introduced.\n warning_msg: str\n Optional warning message. If None, a generic warning message\n is used.\n\n \"\"\"\n\n def __init__(self, arg_name, *, new_value, changed_version,\n warning_msg=None):\n self.arg_name = arg_name\n self.new_value = new_value\n self.warning_msg = warning_msg\n self.changed_version = changed_version\n\n def __call__(self, func):\n parameters = inspect.signature(func).parameters\n arg_idx = list(parameters.keys()).index(self.arg_name)\n old_value = parameters[self.arg_name].default\n\n if self.warning_msg is None:\n self.warning_msg = (\n f\"The new recommended value for {self.arg_name} is \"\n f\"{self.new_value}. Until version {self.changed_version}, \"\n f\"the default {self.arg_name} value is {old_value}. \"\n f\"From version {self.changed_version}, the {self.arg_name} \"\n f\"default value will be {self.new_value}. To avoid \"\n f\"this warning, please explicitly set {self.arg_name} value.\")\n\n @functools.wraps(func)\n def fixed_func(*args, **kwargs):\n if len(args) < arg_idx + 1 and self.arg_name not in kwargs.keys():\n # warn that arg_name default value changed:\n warnings.warn(self.warning_msg, FutureWarning, stacklevel=2)\n return func(*args, **kwargs)\n\n return fixed_func\n\n\nclass remove_arg:\n \"\"\"Decorator to remove an argument from function's signature.\n\n Parameters\n ----------\n arg_name: str\n The name of the argument to be removed.\n changed_version : str\n The package version in which the warning will be replaced by\n an error.\n help_msg: str\n Optional message appended to the generic warning message.\n\n \"\"\"\n\n def __init__(self, arg_name, *, changed_version, help_msg=None):\n self.arg_name = arg_name\n self.help_msg = help_msg\n self.changed_version = changed_version\n\n def __call__(self, func):\n parameters = inspect.signature(func).parameters\n arg_idx = list(parameters.keys()).index(self.arg_name)\n warning_msg = (\n f\"{self.arg_name} argument is deprecated and will be removed \"\n f\"in version {self.changed_version}. To avoid this warning, \"\n f\"please do not use the {self.arg_name} argument. Please \"\n f\"see {func.__name__} documentation for more details.\")\n\n if self.help_msg is not None:\n warning_msg += f\" {self.help_msg}\"\n\n @functools.wraps(func)\n def fixed_func(*args, **kwargs):\n if len(args) > arg_idx or self.arg_name in kwargs.keys():\n # warn that arg_name is deprecated\n warnings.warn(warning_msg, FutureWarning, stacklevel=2)\n return func(*args, **kwargs)\n\n return fixed_func\n\n\nclass deprecate_kwarg:\n \"\"\"Decorator ensuring backward compatibility when argument names are\n modified in a function definition.\n\n Parameters\n ----------\n kwarg_mapping: dict\n Mapping between the function's old argument names and the new\n ones.\n warning_msg: str\n Optional warning message. If None, a generic warning message\n is used.\n removed_version : str\n The package version in which the deprecated argument will be\n removed.\n\n \"\"\"\n\n def __init__(self, kwarg_mapping, warning_msg=None, removed_version=None):\n self.kwarg_mapping = kwarg_mapping\n if warning_msg is None:\n self.warning_msg = (\"'{old_arg}' is a deprecated argument name \"\n \"for `{func_name}`. \")\n if removed_version is not None:\n self.warning_msg += (\"It will be removed in version {}. \"\n .format(removed_version))\n self.warning_msg += \"Please use '{new_arg}' instead.\"\n else:\n self.warning_msg = warning_msg\n\n def __call__(self, func):\n @functools.wraps(func)\n def fixed_func(*args, **kwargs):\n for old_arg, new_arg in self.kwarg_mapping.items():\n if old_arg in kwargs:\n # warn that the function interface has changed:\n warnings.warn(self.warning_msg.format(\n old_arg=old_arg, func_name=func.__name__,\n new_arg=new_arg), FutureWarning, stacklevel=2)\n # Substitute new_arg to old_arg\n kwargs[new_arg] = kwargs.pop(old_arg)\n\n # Call the function with the fixed arguments\n return func(*args, **kwargs)\n return fixed_func\n\n\nclass deprecated(object):\n \"\"\"Decorator to mark deprecated functions with warning.\n\n Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.\n\n Parameters\n ----------\n alt_func : str\n If given, tell user what function to use instead.\n behavior : {'warn', 'raise'}\n Behavior during call to deprecated function: 'warn' = warn user that\n function is deprecated; 'raise' = raise error.\n removed_version : str\n The package version in which the deprecated function will be removed.\n \"\"\"\n\n def __init__(self, alt_func=None, behavior='warn', removed_version=None):\n self.alt_func = alt_func\n self.behavior = behavior\n self.removed_version = removed_version\n\n def __call__(self, func):\n\n alt_msg = ''\n if self.alt_func is not None:\n alt_msg = ' Use ``%s`` instead.' % self.alt_func\n rmv_msg = ''\n if self.removed_version is not None:\n rmv_msg = (' and will be removed in version %s' %\n self.removed_version)\n\n msg = ('Function ``%s`` is deprecated' % func.__name__ +\n rmv_msg + '.' + alt_msg)\n\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n if self.behavior == 'warn':\n func_code = func.__code__\n warnings.simplefilter('always', skimage_deprecation)\n warnings.warn_explicit(msg,\n category=skimage_deprecation,\n filename=func_code.co_filename,\n lineno=func_code.co_firstlineno + 1)\n elif self.behavior == 'raise':\n raise skimage_deprecation(msg)\n return func(*args, **kwargs)\n\n # modify doc string to display deprecation warning\n doc = '**Deprecated function**.' + alt_msg\n if wrapped.__doc__ is None:\n wrapped.__doc__ = doc\n else:\n wrapped.__doc__ = doc + '\\n\\n ' + wrapped.__doc__\n\n return wrapped\n\n\ndef get_bound_method_class(m):\n \"\"\"Return the class for a bound method.\n\n \"\"\"\n return m.im_class if sys.version < '3' else m.__self__.__class__\n\n\ndef safe_as_int(val, atol=1e-3):\n \"\"\"\n Attempt to safely cast values to integer format.\n\n Parameters\n ----------\n val : scalar or iterable of scalars\n Number or container of numbers which are intended to be interpreted as\n integers, e.g., for indexing purposes, but which may not carry integer\n type.\n atol : float\n Absolute tolerance away from nearest integer to consider values in\n ``val`` functionally integers.\n\n Returns\n -------\n val_int : NumPy scalar or ndarray of dtype `np.int64`\n Returns the input value(s) coerced to dtype `np.int64` assuming all\n were within ``atol`` of the nearest integer.\n\n Notes\n -----\n This operation calculates ``val`` modulo 1, which returns the mantissa of\n all values. Then all mantissas greater than 0.5 are subtracted from one.\n Finally, the absolute tolerance from zero is calculated. If it is less\n than ``atol`` for all value(s) in ``val``, they are rounded and returned\n in an integer array. Or, if ``val`` was a scalar, a NumPy scalar type is\n returned.\n\n If any value(s) are outside the specified tolerance, an informative error\n is raised.\n\n Examples\n --------\n >>> safe_as_int(7.0)\n 7\n\n >>> safe_as_int([9, 4, 2.9999999999])\n array([9, 4, 3])\n\n >>> safe_as_int(53.1)\n Traceback (most recent call last):\n ...\n ValueError: Integer argument required but received 53.1, check inputs.\n\n >>> safe_as_int(53.01, atol=0.01)\n 53\n\n \"\"\"\n mod = np.asarray(val) % 1 # Extract mantissa\n\n # Check for and subtract any mod values > 0.5 from 1\n if mod.ndim == 0: # Scalar input, cannot be indexed\n if mod > 0.5:\n mod = 1 - mod\n else: # Iterable input, now ndarray\n mod[mod > 0.5] = 1 - mod[mod > 0.5] # Test on each side of nearest int\n\n try:\n np.testing.assert_allclose(mod, 0, atol=atol)\n except AssertionError:\n raise ValueError(\"Integer argument required but received \"\n \"{0}, check inputs.\".format(val))\n\n return np.round(val).astype(np.int64)\n\n\ndef check_shape_equality(im1, im2):\n \"\"\"Raise an error if the shape do not match.\"\"\"\n if not im1.shape == im2.shape:\n raise ValueError('Input images must have the same dimensions.')\n return\n\n\ndef check_nD(array, ndim, arg_name='image'):\n \"\"\"\n Verify an array meets the desired ndims and array isn't empty.\n\n Parameters\n ----------\n array : array-like\n Input array to be validated\n ndim : int or iterable of ints\n Allowable ndim or ndims for the array.\n arg_name : str, optional\n The name of the array in the original function.\n\n \"\"\"\n array = np.asanyarray(array)\n msg_incorrect_dim = \"The parameter `%s` must be a %s-dimensional array\"\n msg_empty_array = \"The parameter `%s` cannot be an empty array\"\n if isinstance(ndim, int):\n ndim = [ndim]\n if array.size == 0:\n raise ValueError(msg_empty_array % (arg_name))\n if not array.ndim in ndim:\n raise ValueError(msg_incorrect_dim % (arg_name, '-or-'.join([str(n) for n in ndim])))\n\n\ndef check_random_state(seed):\n \"\"\"Turn seed into a `np.random.RandomState` instance.\n\n Parameters\n ----------\n seed : None, int or np.random.RandomState\n If `seed` is None, return the RandomState singleton used by `np.random`.\n If `seed` is an int, return a new RandomState instance seeded with `seed`.\n If `seed` is already a RandomState instance, return it.\n\n Raises\n ------\n ValueError\n If `seed` is of the wrong type.\n\n \"\"\"\n # Function originally from scikit-learn's module sklearn.utils.validation\n if seed is None or seed is np.random:\n return np.random.mtrand._rand\n if isinstance(seed, (numbers.Integral, np.integer)):\n return np.random.RandomState(seed)\n if isinstance(seed, np.random.RandomState):\n return seed\n raise ValueError('%r cannot be used to seed a numpy.random.RandomState'\n ' instance' % seed)\n\n\ndef convert_to_float(image, preserve_range):\n \"\"\"Convert input image to float image with the appropriate range.\n\n Parameters\n ----------\n image : ndarray\n Input image.\n preserve_range : bool\n Determines if the range of the image should be kept or transformed\n using img_as_float. Also see\n https://scikit-image.org/docs/dev/user_guide/data_types.html\n\n Notes:\n ------\n * Input images with `float32` data type are not upcast.\n\n Returns\n -------\n image : ndarray\n Transformed version of the input.\n\n \"\"\"\n if preserve_range:\n # Convert image to double only if it is not single or double\n # precision float\n if image.dtype.char not in 'df':\n image = image.astype(float)\n else:\n image = img_as_float(image)\n return image\n\n\ndef _validate_interpolation_order(image_dtype, order):\n \"\"\"Validate and return spline interpolation's order.\n\n Parameters\n ----------\n image_dtype : dtype\n Image dtype.\n order : int, optional\n The order of the spline interpolation. The order has to be in\n the range 0-5. See `skimage.transform.warp` for detail.\n\n Returns\n -------\n order : int\n if input order is None, returns 0 if image_dtype is bool and 1\n otherwise. Otherwise, image_dtype is checked and input order\n is validated accordingly (order > 0 is not supported for bool\n image dtype)\n\n \"\"\"\n\n if order is None:\n return 0 if image_dtype == bool else 1\n\n if order < 0 or order > 5:\n raise ValueError(\"Spline interpolation order has to be in the \"\n \"range 0-5.\")\n\n if image_dtype == bool and order != 0:\n warn(\"Input image dtype is bool. Interpolation is not defined \"\n \"with bool data type. Please set order to 0 or explicitely \"\n \"cast input image to another data type. Starting from version \"\n \"0.19 a ValueError will be raised instead of this warning.\",\n FutureWarning, stacklevel=2)\n\n return order\n\n\ndef _to_np_mode(mode):\n \"\"\"Convert padding modes from `ndi.correlate` to `np.pad`.\"\"\"\n mode_translation_dict = dict(nearest='edge', reflect='symmetric',\n mirror='reflect')\n if mode in mode_translation_dict:\n mode = mode_translation_dict[mode]\n return mode\n\n\ndef _to_ndimage_mode(mode):\n \"\"\"Convert from `numpy.pad` mode name to the corresponding ndimage mode.\"\"\"\n mode_translation_dict = dict(constant='constant', edge='nearest',\n symmetric='reflect', reflect='mirror',\n wrap='wrap')\n if mode not in mode_translation_dict:\n raise ValueError(\n (\"Unknown mode: '{}', or cannot translate mode. The \"\n \"mode should be one of 'constant', 'edge', 'symmetric', \"\n \"'reflect', or 'wrap'. See the documentation of numpy.pad for\"\n \"more info.\").format(mode))\n return _fix_ndimage_mode(mode_translation_dict[mode])\n\n\ndef _fix_ndimage_mode(mode):\n # SciPy 1.6.0 introduced grid variants of constant and wrap which\n # have less surprising behavior for images. Use these when available\n grid_modes = {'constant': 'grid-constant', 'wrap': 'grid-wrap'}\n if NumpyVersion(scipy.__version__) >= '1.6.0':\n mode = grid_modes.get(mode, mode)\n return mode\n", "path": "skimage/_shared/utils.py" } ]
diff --git a/skimage/_shared/utils.py b/skimage/_shared/utils.py index c0cf954e44a..144145dfdfe 100644 --- a/skimage/_shared/utils.py +++ b/skimage/_shared/utils.py @@ -119,7 +119,7 @@ class deprecate_kwarg: Parameters ---------- - arg_mapping: dict + kwarg_mapping: dict Mapping between the function's old argument names and the new ones. warning_msg: str
cupy__cupy-4734
`pip install` completely ignores existing source builds and installed dependencies I suspect this has to do with #4619. I am on the latest master, and now every time I call `pip install -v -e .` two things happens: 1. These packages keeps being reinstalled despite I already have them in my env: setuptools, wheel, Cython, fastrlock 2. All modules are re-cythonized and recompiled from scratch, despite they've been built and nothing has changed I will try to build in a fresh env to see if something is wrong with my current env. But it's better to be confirmed independently. cc: @kmaehashi @emcastillo
[ { "content": "#!/usr/bin/env python\n\nimport glob\nimport os\nfrom setuptools import setup, find_packages\nimport sys\n\nimport cupy_setup_build\n\n\nfor submodule in ('cupy/core/include/cupy/cub/',\n 'cupy/core/include/cupy/jitify'):\n if len(os.listdir(submodule)) == 0:\n msg = '''\n The folder %s is a git submodule but is\n currently empty. Please use the command\n\n git submodule update --init\n\n to populate the folder before building from source.\n ''' % submodule\n print(msg, file=sys.stderr)\n sys.exit(1)\n\n\nrequirements = {\n # setup_requires remains here for pip v18 or earlier.\n # Keep in sync with pyproject.yaml.\n 'setup': [\n 'Cython>=0.28.0',\n 'fastrlock>=0.5',\n ],\n\n 'install': [\n 'numpy>=1.17',\n 'fastrlock>=0.5',\n ],\n 'all': [\n 'scipy>=1.4',\n 'optuna>=2.0',\n ],\n\n 'stylecheck': [\n 'autopep8==1.4.4',\n 'flake8==3.7.9',\n 'pbr==4.0.4',\n 'pycodestyle==2.5.0',\n ],\n 'test': [\n # 4.2 <= pytest < 6.2 is slow collecting tests and times out on CI.\n 'pytest>=6.2',\n ],\n 'appveyor': [\n '-r test',\n ],\n 'jenkins': [\n '-r test',\n 'pytest-timeout',\n 'pytest-cov',\n 'coveralls',\n 'codecov',\n 'coverage<5', # Otherwise, Python must be built with sqlite\n ],\n}\n\n\ndef reduce_requirements(key):\n # Resolve recursive requirements notation (-r)\n reqs = requirements[key]\n resolved_reqs = []\n for req in reqs:\n if req.startswith('-r'):\n depend_key = req[2:].lstrip()\n reduce_requirements(depend_key)\n resolved_reqs += requirements[depend_key]\n else:\n resolved_reqs.append(req)\n requirements[key] = resolved_reqs\n\n\nfor k in requirements.keys():\n reduce_requirements(k)\n\n\nextras_require = {k: v for k, v in requirements.items() if k != 'install'}\n\n\nsetup_requires = requirements['setup']\ninstall_requires = requirements['install']\ntests_require = requirements['test']\n\n# List of files that needs to be in the distribution (sdist/wheel).\n# Notes:\n# - Files only needed in sdist should be added to `MANIFEST.in`.\n# - The following glob (`**`) ignores items starting with `.`.\ncupy_package_data = [\n 'cupy/cuda/cupy_thrust.cu',\n 'cupy/cuda/cupy_cub.cu',\n 'cupy/cuda/cupy_cufftXt.cu', # for cuFFT callback\n 'cupy/cuda/cupy_cufftXt.h', # for cuFFT callback\n 'cupy/cuda/cupy_cufft.h', # for cuFFT callback\n 'cupy/cuda/cufft.pxd', # for cuFFT callback\n 'cupy/cuda/cufft.pyx', # for cuFFT callback\n 'cupy/random/cupy_distributions.cu',\n 'cupy/random/cupy_distributions.cuh',\n] + [\n x for x in glob.glob('cupy/core/include/cupy/**', recursive=True)\n if os.path.isfile(x)\n]\n\npackage_data = {\n 'cupy': [\n os.path.relpath(x, 'cupy') for x in cupy_package_data\n ],\n}\n\npackage_data['cupy'] += cupy_setup_build.prepare_wheel_libs()\n\npackage_name = cupy_setup_build.get_package_name()\nlong_description = cupy_setup_build.get_long_description()\next_modules = cupy_setup_build.get_ext_modules()\nbuild_ext = cupy_setup_build.custom_build_ext\n\nhere = os.path.abspath(os.path.dirname(__file__))\n# Get __version__ variable\nwith open(os.path.join(here, 'cupy', '_version.py')) as f:\n exec(f.read())\n\nCLASSIFIERS = \"\"\"\\\nDevelopment Status :: 5 - Production/Stable\nIntended Audience :: Science/Research\nIntended Audience :: Developers\nLicense :: OSI Approved :: MIT License\nProgramming Language :: Python\nProgramming Language :: Python :: 3\nProgramming Language :: Python :: 3.6\nProgramming Language :: Python :: 3.7\nProgramming Language :: Python :: 3.8\nProgramming Language :: Python :: 3.9\nProgramming Language :: Python :: 3 :: Only\nProgramming Language :: Cython\nTopic :: Software Development\nTopic :: Scientific/Engineering\nOperating System :: POSIX\nOperating System :: Microsoft :: Windows\n\"\"\"\n\n\nsetup(\n name=package_name,\n version=__version__, # NOQA\n description='CuPy: A NumPy-compatible array library accelerated by CUDA',\n long_description=long_description,\n author='Seiya Tokui',\n author_email='[email protected]',\n url='https://cupy.dev/',\n license='MIT License',\n project_urls={\n \"Bug Tracker\": \"https://github.com/cupy/cupy/issues\",\n \"Documentation\": \"https://docs.cupy.dev/\",\n \"Source Code\": \"https://github.com/cupy/cupy\",\n },\n classifiers=[_f for _f in CLASSIFIERS.split('\\n') if _f],\n packages=find_packages(exclude=['install', 'tests']),\n package_data=package_data,\n zip_safe=False,\n python_requires='>=3.6.0',\n setup_requires=setup_requires,\n install_requires=install_requires,\n tests_require=tests_require,\n extras_require=extras_require,\n ext_modules=ext_modules,\n cmdclass={'build_ext': build_ext},\n)\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python\n\nimport glob\nimport os\nfrom setuptools import setup, find_packages\nimport sys\n\nimport cupy_setup_build\n\n\nfor submodule in ('cupy/core/include/cupy/cub/',\n 'cupy/core/include/cupy/jitify'):\n if len(os.listdir(submodule)) == 0:\n msg = '''\n The folder %s is a git submodule but is\n currently empty. Please use the command\n\n git submodule update --init\n\n to populate the folder before building from source.\n ''' % submodule\n print(msg, file=sys.stderr)\n sys.exit(1)\n\n\nrequirements = {\n # TODO(kmaehashi): migrate to pyproject.toml (see #4727, #4619)\n 'setup': [\n 'Cython>=0.28.0',\n 'fastrlock>=0.5',\n ],\n\n 'install': [\n 'numpy>=1.17',\n 'fastrlock>=0.5',\n ],\n 'all': [\n 'scipy>=1.4',\n 'optuna>=2.0',\n ],\n\n 'stylecheck': [\n 'autopep8==1.4.4',\n 'flake8==3.7.9',\n 'pbr==4.0.4',\n 'pycodestyle==2.5.0',\n ],\n 'test': [\n # 4.2 <= pytest < 6.2 is slow collecting tests and times out on CI.\n 'pytest>=6.2',\n ],\n 'appveyor': [\n '-r test',\n ],\n 'jenkins': [\n '-r test',\n 'pytest-timeout',\n 'pytest-cov',\n 'coveralls',\n 'codecov',\n 'coverage<5', # Otherwise, Python must be built with sqlite\n ],\n}\n\n\ndef reduce_requirements(key):\n # Resolve recursive requirements notation (-r)\n reqs = requirements[key]\n resolved_reqs = []\n for req in reqs:\n if req.startswith('-r'):\n depend_key = req[2:].lstrip()\n reduce_requirements(depend_key)\n resolved_reqs += requirements[depend_key]\n else:\n resolved_reqs.append(req)\n requirements[key] = resolved_reqs\n\n\nfor k in requirements.keys():\n reduce_requirements(k)\n\n\nextras_require = {k: v for k, v in requirements.items() if k != 'install'}\n\n\nsetup_requires = requirements['setup']\ninstall_requires = requirements['install']\ntests_require = requirements['test']\n\n# List of files that needs to be in the distribution (sdist/wheel).\n# Notes:\n# - Files only needed in sdist should be added to `MANIFEST.in`.\n# - The following glob (`**`) ignores items starting with `.`.\ncupy_package_data = [\n 'cupy/cuda/cupy_thrust.cu',\n 'cupy/cuda/cupy_cub.cu',\n 'cupy/cuda/cupy_cufftXt.cu', # for cuFFT callback\n 'cupy/cuda/cupy_cufftXt.h', # for cuFFT callback\n 'cupy/cuda/cupy_cufft.h', # for cuFFT callback\n 'cupy/cuda/cufft.pxd', # for cuFFT callback\n 'cupy/cuda/cufft.pyx', # for cuFFT callback\n 'cupy/random/cupy_distributions.cu',\n 'cupy/random/cupy_distributions.cuh',\n] + [\n x for x in glob.glob('cupy/core/include/cupy/**', recursive=True)\n if os.path.isfile(x)\n]\n\npackage_data = {\n 'cupy': [\n os.path.relpath(x, 'cupy') for x in cupy_package_data\n ],\n}\n\npackage_data['cupy'] += cupy_setup_build.prepare_wheel_libs()\n\npackage_name = cupy_setup_build.get_package_name()\nlong_description = cupy_setup_build.get_long_description()\next_modules = cupy_setup_build.get_ext_modules()\nbuild_ext = cupy_setup_build.custom_build_ext\n\nhere = os.path.abspath(os.path.dirname(__file__))\n# Get __version__ variable\nwith open(os.path.join(here, 'cupy', '_version.py')) as f:\n exec(f.read())\n\nCLASSIFIERS = \"\"\"\\\nDevelopment Status :: 5 - Production/Stable\nIntended Audience :: Science/Research\nIntended Audience :: Developers\nLicense :: OSI Approved :: MIT License\nProgramming Language :: Python\nProgramming Language :: Python :: 3\nProgramming Language :: Python :: 3.6\nProgramming Language :: Python :: 3.7\nProgramming Language :: Python :: 3.8\nProgramming Language :: Python :: 3.9\nProgramming Language :: Python :: 3 :: Only\nProgramming Language :: Cython\nTopic :: Software Development\nTopic :: Scientific/Engineering\nOperating System :: POSIX\nOperating System :: Microsoft :: Windows\n\"\"\"\n\n\nsetup(\n name=package_name,\n version=__version__, # NOQA\n description='CuPy: A NumPy-compatible array library accelerated by CUDA',\n long_description=long_description,\n author='Seiya Tokui',\n author_email='[email protected]',\n url='https://cupy.dev/',\n license='MIT License',\n project_urls={\n \"Bug Tracker\": \"https://github.com/cupy/cupy/issues\",\n \"Documentation\": \"https://docs.cupy.dev/\",\n \"Source Code\": \"https://github.com/cupy/cupy\",\n },\n classifiers=[_f for _f in CLASSIFIERS.split('\\n') if _f],\n packages=find_packages(exclude=['install', 'tests']),\n package_data=package_data,\n zip_safe=False,\n python_requires='>=3.6.0',\n setup_requires=setup_requires,\n install_requires=install_requires,\n tests_require=tests_require,\n extras_require=extras_require,\n ext_modules=ext_modules,\n cmdclass={'build_ext': build_ext},\n)\n", "path": "setup.py" } ]
diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 64d5a0e5df7..00000000000 --- a/pyproject.toml +++ /dev/null @@ -1,2 +0,0 @@ -[build-system] -requires = ["setuptools", "wheel", "Cython>=0.28.0", "fastrlock>=0.5"] diff --git a/setup.py b/setup.py index fc0d55303d4..5350393bff5 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,7 @@ requirements = { - # setup_requires remains here for pip v18 or earlier. - # Keep in sync with pyproject.yaml. + # TODO(kmaehashi): migrate to pyproject.toml (see #4727, #4619) 'setup': [ 'Cython>=0.28.0', 'fastrlock>=0.5',
streamlink__streamlink-4238
plugins.ustreamtv: [plugin.api.websocket][error] EOF occurred in violation of protocol (_ssl.c:1129) ### Checklist - [X] This is a bug report and not a different kind of issue - [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink) - [X] [I have checked the list of open and recently closed bug reports](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22bug%22) - [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master) ### Streamlink version Latest stable release ### Description Installed the latest stable build on Windows 11 and the command fails every time with > **[plugin.api.websocket][error] [Errno 2] No such file or directory** this command line `streamlink --loglevel debug https://video.ibm.com/nasahdtv` See the debug info for the result. Note - although the debug log says I am running windows 10 - it is most definitely windows 11 (via winver). This is a new laptop (DELL) that came pre-installed with Windows 11 I didn't see any reference to this issue - but perhaps I have missed something. Thanks for any tips. ### Debug log ```text C:\Users\liamk>streamlink --loglevel debug https://video.ibm.com/nasahdtv [cli][debug] OS: Windows 10 [cli][debug] Python: 3.9.8 [cli][debug] Streamlink: 3.0.3 [cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(1.2.1) [cli][debug] Arguments: [cli][debug] url=https://video.ibm.com/nasahdtv [cli][debug] --loglevel=debug [cli][debug] --ffmpeg-ffmpeg=C:\Program Files (x86)\Streamlink\ffmpeg\ffmpeg.exe [cli][info] Found matching plugin ustreamtv for URL https://video.ibm.com/nasahdtv [plugins.ustreamtv][debug] Connecting to UStream API: media_id=6540154, application=channel, referrer=https://video.ibm.com/nasahdtv, cluster=live [plugin.api.websocket][debug] Connecting to: wss://r2935561-1-6540154-channel-ws-live.ums.ustream.tv:1935/1/ustream [plugins.ustreamtv][debug] Waiting for stream data (for at most 15 seconds)... [plugin.api.websocket][error] [Errno 2] No such file or directory [plugin.api.websocket][debug] Closed: wss://r2935561-1-6540154-channel-ws-live.ums.ustream.tv:1935/1/ustream [plugins.ustreamtv][error] Waiting for stream data timed out. error: No playable streams found on this URL: https://video.ibm.com/nasahdtv ```
[ { "content": "import logging\nimport re\nfrom collections import deque\nfrom datetime import datetime, timedelta\nfrom random import randint\nfrom threading import Event, RLock\nfrom typing import Any, Callable, Deque, Dict, List, NamedTuple, Union\nfrom urllib.parse import urljoin, urlunparse\n\nfrom requests import Response\n\nfrom streamlink.exceptions import PluginError, StreamError\nfrom streamlink.plugin import Plugin, PluginArgument, PluginArguments, pluginmatcher\nfrom streamlink.plugin.api import useragents, validate\nfrom streamlink.plugin.api.websocket import WebsocketClient\nfrom streamlink.stream.ffmpegmux import MuxedStream\nfrom streamlink.stream.segmented import SegmentedStreamReader, SegmentedStreamWorker, SegmentedStreamWriter\nfrom streamlink.stream.stream import Stream\nfrom streamlink.utils.parse import parse_json\n\n\nlog = logging.getLogger(__name__)\n\n\n# TODO: use dataclasses for stream formats after dropping py36 to be able to subclass\nclass StreamFormatVideo(NamedTuple):\n contentType: str\n sourceStreamVersion: int\n initUrl: str\n segmentUrl: str\n bitrate: int\n height: int\n\n\nclass StreamFormatAudio(NamedTuple):\n contentType: str\n sourceStreamVersion: int\n initUrl: str\n segmentUrl: str\n bitrate: int\n language: str = \"\"\n\n\nclass Segment(NamedTuple):\n num: int\n duration: int\n available_at: datetime\n hash: str\n path: str\n\n # the segment URLs depend on the CDN and the chosen stream format and its segment template string\n def url(self, base: str, template: str) -> str:\n return urljoin(\n base,\n f\"{self.path}/{template.replace('%', str(self.num), 1).replace('%', self.hash, 1)}\"\n )\n\n\nclass UStreamTVWsClient(WebsocketClient):\n API_URL = \"wss://r{0}-1-{1}-{2}-ws-{3}.ums.ustream.tv:1935/1/ustream\"\n APP_ID = 3\n APP_VERSION = 2\n\n STREAM_OPENED_TIMEOUT = 6\n\n _schema_cmd = validate.Schema({\n \"cmd\": str,\n \"args\": [{str: object}],\n })\n _schema_stream_formats = validate.Schema({\n \"streams\": [validate.any(\n validate.all(\n {\n \"contentType\": \"video/mp4\",\n \"sourceStreamVersion\": int,\n \"initUrl\": str,\n \"segmentUrl\": str,\n \"bitrate\": int,\n \"height\": int,\n },\n validate.transform(lambda obj: StreamFormatVideo(**obj))\n ),\n validate.all(\n {\n \"contentType\": \"audio/mp4\",\n \"sourceStreamVersion\": int,\n \"initUrl\": str,\n \"segmentUrl\": str,\n \"bitrate\": int,\n validate.optional(\"language\"): str,\n },\n validate.transform(lambda obj: StreamFormatAudio(**obj))\n ),\n object\n )]\n })\n _schema_stream_segments = validate.Schema({\n \"chunkId\": int,\n \"chunkTime\": int,\n \"contentAccess\": validate.all(\n {\n \"accessList\": [{\n \"data\": {\n \"path\": str\n }\n }]\n },\n validate.get((\"accessList\", 0, \"data\", \"path\"))\n ),\n \"hashes\": {validate.transform(int): str}\n })\n\n stream_cdn: str = None\n stream_formats_video: List[StreamFormatVideo] = None\n stream_formats_audio: List[StreamFormatAudio] = None\n stream_initial_id: int = None\n\n def __init__(\n self,\n session,\n media_id,\n application,\n referrer=None,\n cluster=\"live\",\n password=None,\n app_id=APP_ID,\n app_version=APP_VERSION\n ):\n self.opened = Event()\n self.ready = Event()\n self.stream_error = None\n # a list of deques subscribed by worker threads which independently need to read segments\n self.stream_segments_subscribers: List[Deque[Segment]] = []\n self.stream_segments_initial: Deque[Segment] = deque()\n self.stream_segments_lock = RLock()\n\n self.media_id = media_id\n self.application = application\n self.referrer = referrer\n self.cluster = cluster\n self.password = password\n self.app_id = app_id\n self.app_version = app_version\n\n super().__init__(session, self._get_url(), origin=\"https://www.ustream.tv\")\n\n def _get_url(self):\n return self.API_URL.format(randint(0, 0xffffff), self.media_id, self.application, self.cluster)\n\n def _set_error(self, error: Any):\n self.stream_error = error\n self.ready.set()\n\n def _set_ready(self):\n if not self.ready.is_set() and self.stream_cdn and self.stream_initial_id is not None:\n self.ready.set()\n\n if self.opened.wait(self.STREAM_OPENED_TIMEOUT):\n log.debug(\"Stream opened, keeping websocket connection alive\")\n else:\n log.info(\"Closing websocket connection\")\n self.ws.close()\n\n def segments_subscribe(self) -> Deque[Segment]:\n with self.stream_segments_lock:\n # copy the initial segments deque (segments arrive early)\n new_deque = self.stream_segments_initial.copy()\n self.stream_segments_subscribers.append(new_deque)\n\n return new_deque\n\n def _segments_append(self, segment: Segment):\n # if there are no subscribers yet, add segment(s) to the initial deque\n if not self.stream_segments_subscribers:\n self.stream_segments_initial.append(segment)\n else:\n for subscriber_deque in self.stream_segments_subscribers:\n subscriber_deque.append(segment)\n\n def on_open(self, wsapp):\n args = {\n \"type\": \"viewer\",\n \"appId\": self.app_id,\n \"appVersion\": self.app_version,\n \"rsid\": f\"{randint(0, 10_000_000_000):x}:{randint(0, 10_000_000_000):x}\",\n \"rpin\": f\"_rpin.{randint(0, 1_000_000_000_000_000)}\",\n \"referrer\": self.referrer,\n \"clusterHost\": \"r%rnd%-1-%mediaId%-%mediaType%-%protocolPrefix%-%cluster%.ums.ustream.tv\",\n \"media\": self.media_id,\n \"application\": self.application\n }\n if self.password:\n args[\"password\"] = self.password\n\n self.send_json({\n \"cmd\": \"connect\",\n \"args\": [args]\n })\n\n def on_message(self, wsapp, data: str):\n try:\n parsed = parse_json(data, schema=self._schema_cmd)\n except PluginError:\n log.error(f\"Could not parse message: {data[:50]}\")\n return\n\n cmd: str = parsed[\"cmd\"]\n args: List[Dict] = parsed[\"args\"]\n log.trace(f\"Received '{cmd}' command\")\n log.trace(f\"{args!r}\")\n\n handlers = self._MESSAGE_HANDLERS.get(cmd)\n if handlers is not None:\n for arg in args:\n for name, handler in handlers.items():\n argdata = arg.get(name)\n if argdata is not None:\n log.debug(f\"Processing '{cmd}' - '{name}'\")\n handler(self, argdata)\n\n # noinspection PyMethodMayBeStatic\n def _handle_warning(self, data: Dict):\n log.warning(f\"{data['code']}: {str(data['message'])[:50]}\")\n\n # noinspection PyUnusedLocal\n def _handle_reject_nonexistent(self, *args):\n self._set_error(\"This channel does not exist\")\n\n # noinspection PyUnusedLocal\n def _handle_reject_geo_lock(self, *args):\n self._set_error(\"This content is not available in your area\")\n\n def _handle_reject_cluster(self, arg: Dict):\n self.cluster = arg[\"name\"]\n log.info(f\"Switching cluster to: {self.cluster}\")\n self.reconnect(url=self._get_url())\n\n def _handle_reject_referrer_lock(self, arg: Dict):\n self.referrer = arg[\"redirectUrl\"]\n log.info(f\"Updating referrer to: {self.referrer}\")\n self.reconnect(url=self._get_url())\n\n def _handle_module_info_cdn_config(self, data: Dict):\n self.stream_cdn = urlunparse((\n data[\"protocol\"],\n data[\"data\"][0][\"data\"][0][\"sites\"][0][\"host\"],\n data[\"data\"][0][\"data\"][0][\"sites\"][0][\"path\"],\n \"\", \"\", \"\"\n ))\n self._set_ready()\n\n def _handle_module_info_stream(self, data: Dict):\n if data.get(\"contentAvailable\") is False:\n return self._set_error(\"This stream is currently offline\")\n\n mp4_segmented = data.get(\"streamFormats\", {}).get(\"mp4/segmented\")\n if not mp4_segmented:\n return\n\n # parse the stream formats once\n if self.stream_initial_id is None:\n try:\n formats = self._schema_stream_formats.validate(mp4_segmented)\n formats = formats[\"streams\"]\n except PluginError as err:\n return self._set_error(err)\n self.stream_formats_video = list(filter(lambda f: type(f) is StreamFormatVideo, formats))\n self.stream_formats_audio = list(filter(lambda f: type(f) is StreamFormatAudio, formats))\n\n # parse segment duration and hashes, and queue new segments\n try:\n segmentdata: Dict = self._schema_stream_segments.validate(mp4_segmented)\n except PluginError:\n log.error(\"Failed parsing hashes\")\n return\n\n current_id: int = segmentdata[\"chunkId\"]\n duration: int = segmentdata[\"chunkTime\"]\n path: str = segmentdata[\"contentAccess\"]\n hashes: Dict[int, str] = segmentdata[\"hashes\"]\n\n sorted_ids = sorted(hashes.keys())\n count = len(sorted_ids)\n if count == 0:\n return\n\n # initial segment ID (needed by the workers to filter queued segments)\n if self.stream_initial_id is None:\n self.stream_initial_id = current_id\n\n current_time = datetime.now()\n\n # lock the stream segments deques for the worker threads\n with self.stream_segments_lock:\n # interpolate and extrapolate segments from the provided id->hash data\n diff = 10 - sorted_ids[0] % 10 # if there's only one id->hash item, extrapolate until the next decimal\n for idx, segment_id in enumerate(sorted_ids):\n idx_next = idx + 1\n if idx_next < count:\n # calculate the difference between IDs and use that to interpolate segment IDs\n # the last id->hash item will use the previous diff to extrapolate segment IDs\n diff = sorted_ids[idx_next] - segment_id\n for num in range(segment_id, segment_id + diff):\n self._segments_append(Segment(\n num=num,\n duration=duration,\n available_at=current_time + timedelta(seconds=(num - current_id - 1) * duration / 1000),\n hash=hashes[segment_id],\n path=path\n ))\n\n self._set_ready()\n\n # ----\n\n _MESSAGE_HANDLERS: Dict[str, Dict[str, Callable[[\"UStreamTVWsClient\", Any], None]]] = {\n \"warning\": {\n \"code\": _handle_warning,\n },\n \"reject\": {\n \"cluster\": _handle_reject_cluster,\n \"referrerLock\": _handle_reject_referrer_lock,\n \"nonexistent\": _handle_reject_nonexistent,\n \"geoLock\": _handle_reject_geo_lock,\n },\n \"moduleInfo\": {\n \"cdnConfig\": _handle_module_info_cdn_config,\n \"stream\": _handle_module_info_stream,\n }\n }\n\n\nclass UStreamTVStreamWriter(SegmentedStreamWriter):\n stream: \"UStreamTVStream\"\n reader: \"UStreamTVStreamReader\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self._has_init = False\n\n def put(self, segment):\n if self.closed: # pragma: no cover\n return\n\n if segment is None:\n self.queue(None, None)\n else:\n if not self._has_init:\n self._has_init = True\n self.queue(segment, self.executor.submit(self.fetch, segment, True))\n self.queue(segment, self.executor.submit(self.fetch, segment, False))\n\n # noinspection PyMethodOverriding\n def fetch(self, segment: Segment, is_init: bool):\n if self.closed: # pragma: no cover\n return\n\n now = datetime.now()\n if segment.available_at > now:\n time_to_wait = (segment.available_at - now).total_seconds()\n log.debug(f\"Waiting for {self.stream.kind} segment: {segment.num} ({time_to_wait:.01f}s)\")\n if not self.reader.worker.wait(time_to_wait):\n return\n\n try:\n return self.session.http.get(\n segment.url(\n self.stream.wsclient.stream_cdn,\n self.stream.stream_format.initUrl if is_init else self.stream.stream_format.segmentUrl\n ),\n timeout=self.timeout,\n retries=self.retries,\n exception=StreamError\n )\n except StreamError as err:\n log.error(f\"Failed to fetch {self.stream.kind} segment {segment.num}: {err}\")\n\n def write(self, segment: Segment, res: Response, *data):\n if self.closed: # pragma: no cover\n return\n try:\n for chunk in res.iter_content(8192):\n self.reader.buffer.write(chunk)\n log.debug(f\"Download of {self.stream.kind} segment {segment.num} complete\")\n except OSError as err:\n log.error(f\"Failed to read {self.stream.kind} segment {segment.num}: {err}\")\n\n\nclass UStreamTVStreamWorker(SegmentedStreamWorker):\n stream: \"UStreamTVStream\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.wsclient = self.stream.wsclient\n self.segment_id = self.wsclient.stream_initial_id\n self.queue = self.wsclient.segments_subscribe()\n\n def iter_segments(self):\n duration = 5000\n while not self.closed:\n try:\n with self.wsclient.stream_segments_lock:\n segment = self.queue.popleft()\n duration = segment.duration\n except IndexError:\n # wait for new segments to be queued (half the last segment's duration in seconds)\n if self.wait(duration / 1000 / 2):\n continue\n\n if self.closed:\n return\n\n if segment.num < self.segment_id:\n continue\n\n log.debug(f\"Adding {self.stream.kind} segment {segment.num} to queue\")\n yield segment\n self.segment_id = segment.num + 1\n\n\nclass UStreamTVStreamReader(SegmentedStreamReader):\n __worker__ = UStreamTVStreamWorker\n __writer__ = UStreamTVStreamWriter\n stream: \"UStreamTVStream\"\n\n def open(self):\n self.stream.wsclient.opened.set()\n super().open()\n\n def close(self):\n super().close()\n self.stream.wsclient.close()\n\n\nclass UStreamTVStream(Stream):\n __shortname__ = \"ustreamtv\"\n\n def __init__(\n self,\n session,\n kind: str,\n wsclient: UStreamTVWsClient,\n stream_format: Union[StreamFormatVideo, StreamFormatAudio]\n ):\n super().__init__(session)\n self.kind = kind\n self.wsclient = wsclient\n self.stream_format = stream_format\n\n def open(self):\n reader = UStreamTVStreamReader(self)\n reader.open()\n\n return reader\n\n\n@pluginmatcher(re.compile(r\"\"\"\n https?://(?:(www\\.)?ustream\\.tv|video\\.ibm\\.com)\n (?:\n (/embed/|/channel/id/)(?P<channel_id>\\d+)\n )?\n (?:\n (/embed)?/recorded/(?P<video_id>\\d+)\n )?\n\"\"\", re.VERBOSE))\nclass UStreamTV(Plugin):\n arguments = PluginArguments(\n PluginArgument(\n \"password\",\n argument_name=\"ustream-password\",\n sensitive=True,\n metavar=\"PASSWORD\",\n help=\"A password to access password protected UStream.tv channels.\"\n )\n )\n\n STREAM_READY_TIMEOUT = 15\n\n def _get_media_app(self):\n video_id = self.match.group(\"video_id\")\n if video_id:\n return video_id, \"recorded\"\n\n channel_id = self.match.group(\"channel_id\")\n if not channel_id:\n channel_id = self.session.http.get(\n self.url,\n headers={\"User-Agent\": useragents.CHROME},\n schema=validate.Schema(\n validate.parse_html(),\n validate.xml_xpath_string(\".//meta[@name='ustream:channel_id'][@content][1]/@content\")\n )\n )\n\n return channel_id, \"channel\"\n\n def _get_streams(self):\n if not MuxedStream.is_usable(self.session):\n return\n\n media_id, application = self._get_media_app()\n if not media_id:\n return\n\n wsclient = UStreamTVWsClient(\n self.session,\n media_id,\n application,\n referrer=self.url,\n cluster=\"live\",\n password=self.get_option(\"password\")\n )\n log.debug(\n f\"Connecting to UStream API:\"\n f\" media_id={media_id},\"\n f\" application={application},\"\n f\" referrer={self.url},\"\n f\" cluster=live\"\n )\n wsclient.start()\n\n log.debug(f\"Waiting for stream data (for at most {self.STREAM_READY_TIMEOUT} seconds)...\")\n if (\n not wsclient.ready.wait(self.STREAM_READY_TIMEOUT)\n or not wsclient.is_alive()\n or wsclient.stream_error\n ):\n log.error(wsclient.stream_error or \"Waiting for stream data timed out.\")\n wsclient.close()\n return\n\n if not wsclient.stream_formats_audio:\n for video in wsclient.stream_formats_video:\n yield f\"{video.height}p\", UStreamTVStream(self.session, \"video\", wsclient, video)\n else:\n for video in wsclient.stream_formats_video:\n for audio in wsclient.stream_formats_audio:\n yield f\"{video.height}p+a{audio.bitrate}k\", MuxedStream(\n self.session,\n UStreamTVStream(self.session, \"video\", wsclient, video),\n UStreamTVStream(self.session, \"audio\", wsclient, audio)\n )\n\n\n__plugin__ = UStreamTV\n", "path": "src/streamlink/plugins/ustreamtv.py" } ]
[ { "content": "import logging\nimport re\nfrom collections import deque\nfrom datetime import datetime, timedelta\nfrom random import randint\nfrom threading import Event, RLock\nfrom typing import Any, Callable, Deque, Dict, List, NamedTuple, Union\nfrom urllib.parse import urljoin, urlunparse\n\nfrom requests import Response\n\nfrom streamlink.exceptions import PluginError, StreamError\nfrom streamlink.plugin import Plugin, PluginArgument, PluginArguments, pluginmatcher\nfrom streamlink.plugin.api import useragents, validate\nfrom streamlink.plugin.api.websocket import WebsocketClient\nfrom streamlink.stream.ffmpegmux import MuxedStream\nfrom streamlink.stream.segmented import SegmentedStreamReader, SegmentedStreamWorker, SegmentedStreamWriter\nfrom streamlink.stream.stream import Stream\nfrom streamlink.utils.parse import parse_json\n\n\nlog = logging.getLogger(__name__)\n\n\n# TODO: use dataclasses for stream formats after dropping py36 to be able to subclass\nclass StreamFormatVideo(NamedTuple):\n contentType: str\n sourceStreamVersion: int\n initUrl: str\n segmentUrl: str\n bitrate: int\n height: int\n\n\nclass StreamFormatAudio(NamedTuple):\n contentType: str\n sourceStreamVersion: int\n initUrl: str\n segmentUrl: str\n bitrate: int\n language: str = \"\"\n\n\nclass Segment(NamedTuple):\n num: int\n duration: int\n available_at: datetime\n hash: str\n path: str\n\n # the segment URLs depend on the CDN and the chosen stream format and its segment template string\n def url(self, base: str, template: str) -> str:\n return urljoin(\n base,\n f\"{self.path}/{template.replace('%', str(self.num), 1).replace('%', self.hash, 1)}\"\n )\n\n\nclass UStreamTVWsClient(WebsocketClient):\n API_URL = \"wss://r{0}-1-{1}-{2}-ws-{3}.ums.services.video.ibm.com/1/ustream\"\n APP_ID = 3\n APP_VERSION = 2\n\n STREAM_OPENED_TIMEOUT = 6\n\n _schema_cmd = validate.Schema({\n \"cmd\": str,\n \"args\": [{str: object}],\n })\n _schema_stream_formats = validate.Schema({\n \"streams\": [validate.any(\n validate.all(\n {\n \"contentType\": \"video/mp4\",\n \"sourceStreamVersion\": int,\n \"initUrl\": str,\n \"segmentUrl\": str,\n \"bitrate\": int,\n \"height\": int,\n },\n validate.transform(lambda obj: StreamFormatVideo(**obj))\n ),\n validate.all(\n {\n \"contentType\": \"audio/mp4\",\n \"sourceStreamVersion\": int,\n \"initUrl\": str,\n \"segmentUrl\": str,\n \"bitrate\": int,\n validate.optional(\"language\"): str,\n },\n validate.transform(lambda obj: StreamFormatAudio(**obj))\n ),\n object\n )]\n })\n _schema_stream_segments = validate.Schema({\n \"chunkId\": int,\n \"chunkTime\": int,\n \"contentAccess\": validate.all(\n {\n \"accessList\": [{\n \"data\": {\n \"path\": str\n }\n }]\n },\n validate.get((\"accessList\", 0, \"data\", \"path\"))\n ),\n \"hashes\": {validate.transform(int): str}\n })\n\n stream_cdn: str = None\n stream_formats_video: List[StreamFormatVideo] = None\n stream_formats_audio: List[StreamFormatAudio] = None\n stream_initial_id: int = None\n\n def __init__(\n self,\n session,\n media_id,\n application,\n referrer=None,\n cluster=\"live\",\n password=None,\n app_id=APP_ID,\n app_version=APP_VERSION\n ):\n self.opened = Event()\n self.ready = Event()\n self.stream_error = None\n # a list of deques subscribed by worker threads which independently need to read segments\n self.stream_segments_subscribers: List[Deque[Segment]] = []\n self.stream_segments_initial: Deque[Segment] = deque()\n self.stream_segments_lock = RLock()\n\n self.media_id = media_id\n self.application = application\n self.referrer = referrer\n self.cluster = cluster\n self.password = password\n self.app_id = app_id\n self.app_version = app_version\n\n super().__init__(session, self._get_url(), origin=\"https://www.ustream.tv\")\n\n def _get_url(self):\n return self.API_URL.format(randint(0, 0xffffff), self.media_id, self.application, self.cluster)\n\n def _set_error(self, error: Any):\n self.stream_error = error\n self.ready.set()\n\n def _set_ready(self):\n if not self.ready.is_set() and self.stream_cdn and self.stream_initial_id is not None:\n self.ready.set()\n\n if self.opened.wait(self.STREAM_OPENED_TIMEOUT):\n log.debug(\"Stream opened, keeping websocket connection alive\")\n else:\n log.info(\"Closing websocket connection\")\n self.ws.close()\n\n def segments_subscribe(self) -> Deque[Segment]:\n with self.stream_segments_lock:\n # copy the initial segments deque (segments arrive early)\n new_deque = self.stream_segments_initial.copy()\n self.stream_segments_subscribers.append(new_deque)\n\n return new_deque\n\n def _segments_append(self, segment: Segment):\n # if there are no subscribers yet, add segment(s) to the initial deque\n if not self.stream_segments_subscribers:\n self.stream_segments_initial.append(segment)\n else:\n for subscriber_deque in self.stream_segments_subscribers:\n subscriber_deque.append(segment)\n\n def on_open(self, wsapp):\n args = {\n \"type\": \"viewer\",\n \"appId\": self.app_id,\n \"appVersion\": self.app_version,\n \"rsid\": f\"{randint(0, 10_000_000_000):x}:{randint(0, 10_000_000_000):x}\",\n \"rpin\": f\"_rpin.{randint(0, 1_000_000_000_000_000)}\",\n \"referrer\": self.referrer,\n \"clusterHost\": \"r%rnd%-1-%mediaId%-%mediaType%-%protocolPrefix%-%cluster%.ums.ustream.tv\",\n \"media\": self.media_id,\n \"application\": self.application\n }\n if self.password:\n args[\"password\"] = self.password\n\n self.send_json({\n \"cmd\": \"connect\",\n \"args\": [args]\n })\n\n def on_message(self, wsapp, data: str):\n try:\n parsed = parse_json(data, schema=self._schema_cmd)\n except PluginError:\n log.error(f\"Could not parse message: {data[:50]}\")\n return\n\n cmd: str = parsed[\"cmd\"]\n args: List[Dict] = parsed[\"args\"]\n log.trace(f\"Received '{cmd}' command\")\n log.trace(f\"{args!r}\")\n\n handlers = self._MESSAGE_HANDLERS.get(cmd)\n if handlers is not None:\n for arg in args:\n for name, handler in handlers.items():\n argdata = arg.get(name)\n if argdata is not None:\n log.debug(f\"Processing '{cmd}' - '{name}'\")\n handler(self, argdata)\n\n # noinspection PyMethodMayBeStatic\n def _handle_warning(self, data: Dict):\n log.warning(f\"{data['code']}: {str(data['message'])[:50]}\")\n\n # noinspection PyUnusedLocal\n def _handle_reject_nonexistent(self, *args):\n self._set_error(\"This channel does not exist\")\n\n # noinspection PyUnusedLocal\n def _handle_reject_geo_lock(self, *args):\n self._set_error(\"This content is not available in your area\")\n\n def _handle_reject_cluster(self, arg: Dict):\n self.cluster = arg[\"name\"]\n log.info(f\"Switching cluster to: {self.cluster}\")\n self.reconnect(url=self._get_url())\n\n def _handle_reject_referrer_lock(self, arg: Dict):\n self.referrer = arg[\"redirectUrl\"]\n log.info(f\"Updating referrer to: {self.referrer}\")\n self.reconnect(url=self._get_url())\n\n def _handle_module_info_cdn_config(self, data: Dict):\n self.stream_cdn = urlunparse((\n data[\"protocol\"],\n data[\"data\"][0][\"data\"][0][\"sites\"][0][\"host\"],\n data[\"data\"][0][\"data\"][0][\"sites\"][0][\"path\"],\n \"\", \"\", \"\"\n ))\n self._set_ready()\n\n def _handle_module_info_stream(self, data: Dict):\n if data.get(\"contentAvailable\") is False:\n return self._set_error(\"This stream is currently offline\")\n\n mp4_segmented = data.get(\"streamFormats\", {}).get(\"mp4/segmented\")\n if not mp4_segmented:\n return\n\n # parse the stream formats once\n if self.stream_initial_id is None:\n try:\n formats = self._schema_stream_formats.validate(mp4_segmented)\n formats = formats[\"streams\"]\n except PluginError as err:\n return self._set_error(err)\n self.stream_formats_video = list(filter(lambda f: type(f) is StreamFormatVideo, formats))\n self.stream_formats_audio = list(filter(lambda f: type(f) is StreamFormatAudio, formats))\n\n # parse segment duration and hashes, and queue new segments\n try:\n segmentdata: Dict = self._schema_stream_segments.validate(mp4_segmented)\n except PluginError:\n log.error(\"Failed parsing hashes\")\n return\n\n current_id: int = segmentdata[\"chunkId\"]\n duration: int = segmentdata[\"chunkTime\"]\n path: str = segmentdata[\"contentAccess\"]\n hashes: Dict[int, str] = segmentdata[\"hashes\"]\n\n sorted_ids = sorted(hashes.keys())\n count = len(sorted_ids)\n if count == 0:\n return\n\n # initial segment ID (needed by the workers to filter queued segments)\n if self.stream_initial_id is None:\n self.stream_initial_id = current_id\n\n current_time = datetime.now()\n\n # lock the stream segments deques for the worker threads\n with self.stream_segments_lock:\n # interpolate and extrapolate segments from the provided id->hash data\n diff = 10 - sorted_ids[0] % 10 # if there's only one id->hash item, extrapolate until the next decimal\n for idx, segment_id in enumerate(sorted_ids):\n idx_next = idx + 1\n if idx_next < count:\n # calculate the difference between IDs and use that to interpolate segment IDs\n # the last id->hash item will use the previous diff to extrapolate segment IDs\n diff = sorted_ids[idx_next] - segment_id\n for num in range(segment_id, segment_id + diff):\n self._segments_append(Segment(\n num=num,\n duration=duration,\n available_at=current_time + timedelta(seconds=(num - current_id - 1) * duration / 1000),\n hash=hashes[segment_id],\n path=path\n ))\n\n self._set_ready()\n\n # ----\n\n _MESSAGE_HANDLERS: Dict[str, Dict[str, Callable[[\"UStreamTVWsClient\", Any], None]]] = {\n \"warning\": {\n \"code\": _handle_warning,\n },\n \"reject\": {\n \"cluster\": _handle_reject_cluster,\n \"referrerLock\": _handle_reject_referrer_lock,\n \"nonexistent\": _handle_reject_nonexistent,\n \"geoLock\": _handle_reject_geo_lock,\n },\n \"moduleInfo\": {\n \"cdnConfig\": _handle_module_info_cdn_config,\n \"stream\": _handle_module_info_stream,\n }\n }\n\n\nclass UStreamTVStreamWriter(SegmentedStreamWriter):\n stream: \"UStreamTVStream\"\n reader: \"UStreamTVStreamReader\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self._has_init = False\n\n def put(self, segment):\n if self.closed: # pragma: no cover\n return\n\n if segment is None:\n self.queue(None, None)\n else:\n if not self._has_init:\n self._has_init = True\n self.queue(segment, self.executor.submit(self.fetch, segment, True))\n self.queue(segment, self.executor.submit(self.fetch, segment, False))\n\n # noinspection PyMethodOverriding\n def fetch(self, segment: Segment, is_init: bool):\n if self.closed: # pragma: no cover\n return\n\n now = datetime.now()\n if segment.available_at > now:\n time_to_wait = (segment.available_at - now).total_seconds()\n log.debug(f\"Waiting for {self.stream.kind} segment: {segment.num} ({time_to_wait:.01f}s)\")\n if not self.reader.worker.wait(time_to_wait):\n return\n\n try:\n return self.session.http.get(\n segment.url(\n self.stream.wsclient.stream_cdn,\n self.stream.stream_format.initUrl if is_init else self.stream.stream_format.segmentUrl\n ),\n timeout=self.timeout,\n retries=self.retries,\n exception=StreamError\n )\n except StreamError as err:\n log.error(f\"Failed to fetch {self.stream.kind} segment {segment.num}: {err}\")\n\n def write(self, segment: Segment, res: Response, *data):\n if self.closed: # pragma: no cover\n return\n try:\n for chunk in res.iter_content(8192):\n self.reader.buffer.write(chunk)\n log.debug(f\"Download of {self.stream.kind} segment {segment.num} complete\")\n except OSError as err:\n log.error(f\"Failed to read {self.stream.kind} segment {segment.num}: {err}\")\n\n\nclass UStreamTVStreamWorker(SegmentedStreamWorker):\n stream: \"UStreamTVStream\"\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.wsclient = self.stream.wsclient\n self.segment_id = self.wsclient.stream_initial_id\n self.queue = self.wsclient.segments_subscribe()\n\n def iter_segments(self):\n duration = 5000\n while not self.closed:\n try:\n with self.wsclient.stream_segments_lock:\n segment = self.queue.popleft()\n duration = segment.duration\n except IndexError:\n # wait for new segments to be queued (half the last segment's duration in seconds)\n if self.wait(duration / 1000 / 2):\n continue\n\n if self.closed:\n return\n\n if segment.num < self.segment_id:\n continue\n\n log.debug(f\"Adding {self.stream.kind} segment {segment.num} to queue\")\n yield segment\n self.segment_id = segment.num + 1\n\n\nclass UStreamTVStreamReader(SegmentedStreamReader):\n __worker__ = UStreamTVStreamWorker\n __writer__ = UStreamTVStreamWriter\n stream: \"UStreamTVStream\"\n\n def open(self):\n self.stream.wsclient.opened.set()\n super().open()\n\n def close(self):\n super().close()\n self.stream.wsclient.close()\n\n\nclass UStreamTVStream(Stream):\n __shortname__ = \"ustreamtv\"\n\n def __init__(\n self,\n session,\n kind: str,\n wsclient: UStreamTVWsClient,\n stream_format: Union[StreamFormatVideo, StreamFormatAudio]\n ):\n super().__init__(session)\n self.kind = kind\n self.wsclient = wsclient\n self.stream_format = stream_format\n\n def open(self):\n reader = UStreamTVStreamReader(self)\n reader.open()\n\n return reader\n\n\n@pluginmatcher(re.compile(r\"\"\"\n https?://(?:(www\\.)?ustream\\.tv|video\\.ibm\\.com)\n (?:\n (/embed/|/channel/id/)(?P<channel_id>\\d+)\n )?\n (?:\n (/embed)?/recorded/(?P<video_id>\\d+)\n )?\n\"\"\", re.VERBOSE))\nclass UStreamTV(Plugin):\n arguments = PluginArguments(\n PluginArgument(\n \"password\",\n argument_name=\"ustream-password\",\n sensitive=True,\n metavar=\"PASSWORD\",\n help=\"A password to access password protected UStream.tv channels.\"\n )\n )\n\n STREAM_READY_TIMEOUT = 15\n\n def _get_media_app(self):\n video_id = self.match.group(\"video_id\")\n if video_id:\n return video_id, \"recorded\"\n\n channel_id = self.match.group(\"channel_id\")\n if not channel_id:\n channel_id = self.session.http.get(\n self.url,\n headers={\"User-Agent\": useragents.CHROME},\n schema=validate.Schema(\n validate.parse_html(),\n validate.xml_xpath_string(\".//meta[@name='ustream:channel_id'][@content][1]/@content\")\n )\n )\n\n return channel_id, \"channel\"\n\n def _get_streams(self):\n if not MuxedStream.is_usable(self.session):\n return\n\n media_id, application = self._get_media_app()\n if not media_id:\n return\n\n wsclient = UStreamTVWsClient(\n self.session,\n media_id,\n application,\n referrer=self.url,\n cluster=\"live\",\n password=self.get_option(\"password\")\n )\n log.debug(\n f\"Connecting to UStream API:\"\n f\" media_id={media_id},\"\n f\" application={application},\"\n f\" referrer={self.url},\"\n f\" cluster=live\"\n )\n wsclient.start()\n\n log.debug(f\"Waiting for stream data (for at most {self.STREAM_READY_TIMEOUT} seconds)...\")\n if (\n not wsclient.ready.wait(self.STREAM_READY_TIMEOUT)\n or not wsclient.is_alive()\n or wsclient.stream_error\n ):\n log.error(wsclient.stream_error or \"Waiting for stream data timed out.\")\n wsclient.close()\n return\n\n if not wsclient.stream_formats_audio:\n for video in wsclient.stream_formats_video:\n yield f\"{video.height}p\", UStreamTVStream(self.session, \"video\", wsclient, video)\n else:\n for video in wsclient.stream_formats_video:\n for audio in wsclient.stream_formats_audio:\n yield f\"{video.height}p+a{audio.bitrate}k\", MuxedStream(\n self.session,\n UStreamTVStream(self.session, \"video\", wsclient, video),\n UStreamTVStream(self.session, \"audio\", wsclient, audio)\n )\n\n\n__plugin__ = UStreamTV\n", "path": "src/streamlink/plugins/ustreamtv.py" } ]
diff --git a/src/streamlink/plugins/ustreamtv.py b/src/streamlink/plugins/ustreamtv.py index ca6f628691a..b1e31730985 100644 --- a/src/streamlink/plugins/ustreamtv.py +++ b/src/streamlink/plugins/ustreamtv.py @@ -57,7 +57,7 @@ def url(self, base: str, template: str) -> str: class UStreamTVWsClient(WebsocketClient): - API_URL = "wss://r{0}-1-{1}-{2}-ws-{3}.ums.ustream.tv:1935/1/ustream" + API_URL = "wss://r{0}-1-{1}-{2}-ws-{3}.ums.services.video.ibm.com/1/ustream" APP_ID = 3 APP_VERSION = 2
mitmproxy__mitmproxy-6796
Failed to proxy HTTPS request to unicode domains #### Problem Description Just like issue https://github.com/mitmproxy/mitmproxy/issues/6381. #### Steps to reproduce the behavior: 1. start mitmproxy: `mitmproxy -p 8080` 2. browse url with proxy setup, for example: `https://tt.广西阀门.net` and then mitmproxy throws following exception: ``` Addon error: DNSName values should be passed as an A-label string. This means unicode characters should be encoded via a library like idna. Traceback (most recent call last): File "/home/pan/.local/lib/python3.10/site-packages/mitmproxy/addons/tlsconfig.py", line 526, in _ip_or_dns_name ip = ipaddress.ip_address(val) File "/usr/lib/python3.10/ipaddress.py", line 54, in ip_address raise ValueError(f'{address!r} does not appear to be an IPv4 or IPv6 address') ValueError: 'tt.广西阀门.net' does not appear to be an IPv4 or IPv6 address During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/pan/.local/lib/python3.10/site-packages/cryptography/x509/general_name.py", line 85, in __init__ value.encode("ascii") UnicodeEncodeError: 'ascii' codec can't encode characters in position 3-6: ordinal not in range(128) During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/pan/.local/lib/python3.10/site-packages/mitmproxy/addons/tlsconfig.py", line 178, in tls_start_client entry = self.get_cert(tls_start.context) File "/home/pan/.local/lib/python3.10/site-packages/mitmproxy/addons/tlsconfig.py", line 512, in get_cert altnames.append(_ip_or_dns_name(conn_context.server.address[0])) File "/home/pan/.local/lib/python3.10/site-packages/mitmproxy/addons/tlsconfig.py", line 528, in _ip_or_dns_name return x509.DNSName(val) File "/home/pan/.local/lib/python3.10/site-packages/cryptography/x509/general_name.py", line 87, in __init__ raise ValueError( ValueError: DNSName values should be passed as an A-label string. This means unicode characters should be encoded via a library like idna. [16:31:32.448][127.0.0.1:53048] No TLS context was provided, failing connection. ``` #### System Information ```sh $ mitmproxy --version Mitmproxy: 10.2.4 Python: 3.10.12 OpenSSL: OpenSSL 3.2.1 30 Jan 2024 Platform: Linux-6.5.0-21-generic-x86_64-with-glibc2.35 ``` Browser: ``` Google Chrome 122.0.6261.94 (Official Build) (64-bit) Revision 880dbf29479c6152d5e4f08dfd3a96b30f919e56-refs/branch-heads/6261@{#960} OS Linux JavaScript V8 12.2.281.19 User Agent Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 Command Line /usr/bin/google-chrome-stable --flag-switches-begin --flag-switches-end --desktop-startup-id=gnome-shell/Google Chrome/2430-1-PC_TIME219086 ```
[ { "content": "import ipaddress\nimport logging\nimport os\nimport ssl\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import TypedDict\n\nfrom aioquic.h3.connection import H3_ALPN\nfrom aioquic.tls import CipherSuite\nfrom cryptography import x509\nfrom OpenSSL import crypto\nfrom OpenSSL import SSL\n\nfrom mitmproxy import certs\nfrom mitmproxy import connection\nfrom mitmproxy import ctx\nfrom mitmproxy import exceptions\nfrom mitmproxy import tls\nfrom mitmproxy.net import tls as net_tls\nfrom mitmproxy.options import CONF_BASENAME\nfrom mitmproxy.proxy import context\nfrom mitmproxy.proxy.layers import modes\nfrom mitmproxy.proxy.layers import quic\nfrom mitmproxy.proxy.layers import tls as proxy_tls\n\n# We manually need to specify this, otherwise OpenSSL may select a non-HTTP2 cipher by default.\n# https://ssl-config.mozilla.org/#config=old\n\nDEFAULT_CIPHERS = (\n \"ECDHE-ECDSA-AES128-GCM-SHA256\",\n \"ECDHE-RSA-AES128-GCM-SHA256\",\n \"ECDHE-ECDSA-AES256-GCM-SHA384\",\n \"ECDHE-RSA-AES256-GCM-SHA384\",\n \"ECDHE-ECDSA-CHACHA20-POLY1305\",\n \"ECDHE-RSA-CHACHA20-POLY1305\",\n \"DHE-RSA-AES128-GCM-SHA256\",\n \"DHE-RSA-AES256-GCM-SHA384\",\n \"DHE-RSA-CHACHA20-POLY1305\",\n \"ECDHE-ECDSA-AES128-SHA256\",\n \"ECDHE-RSA-AES128-SHA256\",\n \"ECDHE-ECDSA-AES128-SHA\",\n \"ECDHE-RSA-AES128-SHA\",\n \"ECDHE-ECDSA-AES256-SHA384\",\n \"ECDHE-RSA-AES256-SHA384\",\n \"ECDHE-ECDSA-AES256-SHA\",\n \"ECDHE-RSA-AES256-SHA\",\n \"DHE-RSA-AES128-SHA256\",\n \"DHE-RSA-AES256-SHA256\",\n \"AES128-GCM-SHA256\",\n \"AES256-GCM-SHA384\",\n \"AES128-SHA256\",\n \"AES256-SHA256\",\n \"AES128-SHA\",\n \"AES256-SHA\",\n \"DES-CBC3-SHA\",\n)\n\n# 2022/05: X509_CHECK_FLAG_NEVER_CHECK_SUBJECT is not available in LibreSSL, ignore gracefully as it's not critical.\nDEFAULT_HOSTFLAGS = (\n SSL._lib.X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS # type: ignore\n | getattr(SSL._lib, \"X509_CHECK_FLAG_NEVER_CHECK_SUBJECT\", 0) # type: ignore\n)\n\n\nclass AppData(TypedDict):\n client_alpn: bytes | None\n server_alpn: bytes | None\n http2: bool\n\n\ndef alpn_select_callback(conn: SSL.Connection, options: list[bytes]) -> Any:\n app_data: AppData = conn.get_app_data()\n client_alpn = app_data[\"client_alpn\"]\n server_alpn = app_data[\"server_alpn\"]\n http2 = app_data[\"http2\"]\n if client_alpn is not None:\n if client_alpn in options:\n return client_alpn\n else:\n return SSL.NO_OVERLAPPING_PROTOCOLS\n if server_alpn and server_alpn in options:\n return server_alpn\n if server_alpn == b\"\":\n # We do have a server connection, but the remote server refused to negotiate a protocol:\n # We need to mirror this on the client connection.\n return SSL.NO_OVERLAPPING_PROTOCOLS\n http_alpns = proxy_tls.HTTP_ALPNS if http2 else proxy_tls.HTTP1_ALPNS\n # client sends in order of preference, so we are nice and respect that.\n for alpn in options:\n if alpn in http_alpns:\n return alpn\n else:\n return SSL.NO_OVERLAPPING_PROTOCOLS\n\n\nclass TlsConfig:\n \"\"\"\n This addon supplies the proxy core with the desired OpenSSL connection objects to negotiate TLS.\n \"\"\"\n\n certstore: certs.CertStore = None # type: ignore\n\n # TODO: We should support configuring TLS 1.3 cipher suites (https://github.com/mitmproxy/mitmproxy/issues/4260)\n # TODO: We should re-use SSL.Context options here, if only for TLS session resumption.\n # This may require patches to pyOpenSSL, as some functionality is only exposed on contexts.\n # TODO: This addon should manage the following options itself, which are current defined in mitmproxy/options.py:\n # - upstream_cert\n # - add_upstream_certs_to_client_chain\n # - ciphers_client\n # - ciphers_server\n # - key_size\n # - certs\n # - cert_passphrase\n # - ssl_verify_upstream_trusted_ca\n # - ssl_verify_upstream_trusted_confdir\n\n def load(self, loader):\n loader.add_option(\n name=\"tls_version_client_min\",\n typespec=str,\n default=net_tls.DEFAULT_MIN_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the minimum TLS version for client connections.\",\n )\n loader.add_option(\n name=\"tls_version_client_max\",\n typespec=str,\n default=net_tls.DEFAULT_MAX_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the maximum TLS version for client connections.\",\n )\n loader.add_option(\n name=\"tls_version_server_min\",\n typespec=str,\n default=net_tls.DEFAULT_MIN_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the minimum TLS version for server connections.\",\n )\n loader.add_option(\n name=\"tls_version_server_max\",\n typespec=str,\n default=net_tls.DEFAULT_MAX_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the maximum TLS version for server connections.\",\n )\n loader.add_option(\n name=\"tls_ecdh_curve_client\",\n typespec=str | None,\n default=None,\n help=\"Use a specific elliptic curve for ECDHE key exchange on client connections. \"\n 'OpenSSL syntax, for example \"prime256v1\" (see `openssl ecparam -list_curves`).',\n )\n loader.add_option(\n name=\"tls_ecdh_curve_server\",\n typespec=str | None,\n default=None,\n help=\"Use a specific elliptic curve for ECDHE key exchange on server connections. \"\n 'OpenSSL syntax, for example \"prime256v1\" (see `openssl ecparam -list_curves`).',\n )\n\n def tls_clienthello(self, tls_clienthello: tls.ClientHelloData):\n conn_context = tls_clienthello.context\n tls_clienthello.establish_server_tls_first = (\n conn_context.server.tls and ctx.options.connection_strategy == \"eager\"\n )\n\n def tls_start_client(self, tls_start: tls.TlsData) -> None:\n \"\"\"Establish TLS or DTLS between client and proxy.\"\"\"\n if tls_start.ssl_conn is not None:\n return # a user addon has already provided the pyOpenSSL context.\n\n assert isinstance(tls_start.conn, connection.Client)\n\n client: connection.Client = tls_start.conn\n server: connection.Server = tls_start.context.server\n\n entry = self.get_cert(tls_start.context)\n\n if not client.cipher_list and ctx.options.ciphers_client:\n client.cipher_list = ctx.options.ciphers_client.split(\":\")\n # don't assign to client.cipher_list, doesn't need to be stored.\n cipher_list = client.cipher_list or DEFAULT_CIPHERS\n\n if ctx.options.add_upstream_certs_to_client_chain: # pragma: no cover\n # exempted from coverage until https://bugs.python.org/issue18233 is fixed.\n extra_chain_certs = server.certificate_list\n else:\n extra_chain_certs = []\n\n ssl_ctx = net_tls.create_client_proxy_context(\n method=net_tls.Method.DTLS_SERVER_METHOD\n if tls_start.is_dtls\n else net_tls.Method.TLS_SERVER_METHOD,\n min_version=net_tls.Version[ctx.options.tls_version_client_min],\n max_version=net_tls.Version[ctx.options.tls_version_client_max],\n cipher_list=tuple(cipher_list),\n ecdh_curve=ctx.options.tls_ecdh_curve_client,\n chain_file=entry.chain_file,\n request_client_cert=False,\n alpn_select_callback=alpn_select_callback,\n extra_chain_certs=tuple(extra_chain_certs),\n dhparams=self.certstore.dhparams,\n )\n tls_start.ssl_conn = SSL.Connection(ssl_ctx)\n\n tls_start.ssl_conn.use_certificate(entry.cert.to_pyopenssl())\n tls_start.ssl_conn.use_privatekey(\n crypto.PKey.from_cryptography_key(entry.privatekey)\n )\n\n # Force HTTP/1 for secure web proxies, we currently don't support CONNECT over HTTP/2.\n # There is a proof-of-concept branch at https://github.com/mhils/mitmproxy/tree/http2-proxy,\n # but the complexity outweighs the benefits for now.\n if len(tls_start.context.layers) == 2 and isinstance(\n tls_start.context.layers[0], modes.HttpProxy\n ):\n client_alpn: bytes | None = b\"http/1.1\"\n else:\n client_alpn = client.alpn\n\n tls_start.ssl_conn.set_app_data(\n AppData(\n client_alpn=client_alpn,\n server_alpn=server.alpn,\n http2=ctx.options.http2,\n )\n )\n tls_start.ssl_conn.set_accept_state()\n\n def tls_start_server(self, tls_start: tls.TlsData) -> None:\n \"\"\"Establish TLS or DTLS between proxy and server.\"\"\"\n if tls_start.ssl_conn is not None:\n return # a user addon has already provided the pyOpenSSL context.\n\n assert isinstance(tls_start.conn, connection.Server)\n\n client: connection.Client = tls_start.context.client\n # tls_start.conn may be different from tls_start.context.server, e.g. an upstream HTTPS proxy.\n server: connection.Server = tls_start.conn\n assert server.address\n\n if ctx.options.ssl_insecure:\n verify = net_tls.Verify.VERIFY_NONE\n else:\n verify = net_tls.Verify.VERIFY_PEER\n\n if server.sni is None:\n server.sni = client.sni or server.address[0]\n\n if not server.alpn_offers:\n if client.alpn_offers:\n if ctx.options.http2:\n # We would perfectly support HTTP/1 -> HTTP/2, but we want to keep things on the same protocol\n # version. There are some edge cases where we want to mirror the regular server's behavior\n # accurately, for example header capitalization.\n server.alpn_offers = tuple(client.alpn_offers)\n else:\n server.alpn_offers = tuple(\n x for x in client.alpn_offers if x != b\"h2\"\n )\n else:\n # We either have no client TLS or a client without ALPN.\n # - If the client does use TLS but did not send an ALPN extension, we want to mirror that upstream.\n # - If the client does not use TLS, there's no clear-cut answer. As a pragmatic approach, we also do\n # not send any ALPN extension in this case, which defaults to whatever protocol we are speaking\n # or falls back to HTTP.\n server.alpn_offers = []\n\n if not server.cipher_list and ctx.options.ciphers_server:\n server.cipher_list = ctx.options.ciphers_server.split(\":\")\n # don't assign to client.cipher_list, doesn't need to be stored.\n cipher_list = server.cipher_list or DEFAULT_CIPHERS\n\n client_cert: str | None = None\n if ctx.options.client_certs:\n client_certs = os.path.expanduser(ctx.options.client_certs)\n if os.path.isfile(client_certs):\n client_cert = client_certs\n else:\n server_name: str = server.sni or server.address[0]\n p = os.path.join(client_certs, f\"{server_name}.pem\")\n if os.path.isfile(p):\n client_cert = p\n\n ssl_ctx = net_tls.create_proxy_server_context(\n method=net_tls.Method.DTLS_CLIENT_METHOD\n if tls_start.is_dtls\n else net_tls.Method.TLS_CLIENT_METHOD,\n min_version=net_tls.Version[ctx.options.tls_version_server_min],\n max_version=net_tls.Version[ctx.options.tls_version_server_max],\n cipher_list=tuple(cipher_list),\n ecdh_curve=ctx.options.tls_ecdh_curve_server,\n verify=verify,\n ca_path=ctx.options.ssl_verify_upstream_trusted_confdir,\n ca_pemfile=ctx.options.ssl_verify_upstream_trusted_ca,\n client_cert=client_cert,\n legacy_server_connect=ctx.options.ssl_insecure,\n )\n\n tls_start.ssl_conn = SSL.Connection(ssl_ctx)\n if server.sni:\n # We need to set SNI + enable hostname verification.\n assert isinstance(server.sni, str)\n # Manually enable hostname verification on the context object.\n # https://wiki.openssl.org/index.php/Hostname_validation\n param = SSL._lib.SSL_get0_param(tls_start.ssl_conn._ssl) # type: ignore\n # Matching on the CN is disabled in both Chrome and Firefox, so we disable it, too.\n # https://www.chromestatus.com/feature/4981025180483584\n\n SSL._lib.X509_VERIFY_PARAM_set_hostflags(param, DEFAULT_HOSTFLAGS) # type: ignore\n\n try:\n ip: bytes = ipaddress.ip_address(server.sni).packed\n except ValueError:\n host_name = server.sni.encode(\"idna\")\n tls_start.ssl_conn.set_tlsext_host_name(host_name)\n ok = SSL._lib.X509_VERIFY_PARAM_set1_host( # type: ignore\n param, host_name, len(host_name)\n ) # type: ignore\n SSL._openssl_assert(ok == 1) # type: ignore\n else:\n # RFC 6066: Literal IPv4 and IPv6 addresses are not permitted in \"HostName\",\n # so we don't call set_tlsext_host_name.\n ok = SSL._lib.X509_VERIFY_PARAM_set1_ip(param, ip, len(ip)) # type: ignore\n SSL._openssl_assert(ok == 1) # type: ignore\n elif verify is not net_tls.Verify.VERIFY_NONE:\n raise ValueError(\"Cannot validate certificate hostname without SNI\")\n\n if server.alpn_offers:\n tls_start.ssl_conn.set_alpn_protos(server.alpn_offers)\n\n tls_start.ssl_conn.set_connect_state()\n\n def quic_start_client(self, tls_start: quic.QuicTlsData) -> None:\n \"\"\"Establish QUIC between client and proxy.\"\"\"\n if tls_start.settings is not None:\n return # a user addon has already provided the settings.\n tls_start.settings = quic.QuicTlsSettings()\n\n # keep the following part in sync with `tls_start_client`\n assert isinstance(tls_start.conn, connection.Client)\n\n client: connection.Client = tls_start.conn\n server: connection.Server = tls_start.context.server\n\n entry = self.get_cert(tls_start.context)\n\n if not client.cipher_list and ctx.options.ciphers_client:\n client.cipher_list = ctx.options.ciphers_client.split(\":\")\n\n if ctx.options.add_upstream_certs_to_client_chain: # pragma: no cover\n extra_chain_certs = server.certificate_list\n else:\n extra_chain_certs = []\n\n # set context parameters\n if client.cipher_list:\n tls_start.settings.cipher_suites = [\n CipherSuite[cipher] for cipher in client.cipher_list\n ]\n # if we don't have upstream ALPN, we allow all offered by the client\n tls_start.settings.alpn_protocols = [\n alpn.decode(\"ascii\")\n for alpn in [alpn for alpn in (client.alpn, server.alpn) if alpn]\n or client.alpn_offers\n ]\n\n # set the certificates\n tls_start.settings.certificate = entry.cert._cert\n tls_start.settings.certificate_private_key = entry.privatekey\n tls_start.settings.certificate_chain = [\n cert._cert for cert in (*entry.chain_certs, *extra_chain_certs)\n ]\n\n def quic_start_server(self, tls_start: quic.QuicTlsData) -> None:\n \"\"\"Establish QUIC between proxy and server.\"\"\"\n if tls_start.settings is not None:\n return # a user addon has already provided the settings.\n tls_start.settings = quic.QuicTlsSettings()\n\n # keep the following part in sync with `tls_start_server`\n assert isinstance(tls_start.conn, connection.Server)\n\n client: connection.Client = tls_start.context.client\n server: connection.Server = tls_start.conn\n assert server.address\n\n if ctx.options.ssl_insecure:\n tls_start.settings.verify_mode = ssl.CERT_NONE\n else:\n tls_start.settings.verify_mode = ssl.CERT_REQUIRED\n\n if server.sni is None:\n server.sni = client.sni or server.address[0]\n\n if not server.alpn_offers:\n if client.alpn_offers:\n server.alpn_offers = tuple(client.alpn_offers)\n else:\n # aioquic fails if no ALPN is offered, so use H3\n server.alpn_offers = tuple(alpn.encode(\"ascii\") for alpn in H3_ALPN)\n\n if not server.cipher_list and ctx.options.ciphers_server:\n server.cipher_list = ctx.options.ciphers_server.split(\":\")\n\n # set context parameters\n if server.cipher_list:\n tls_start.settings.cipher_suites = [\n CipherSuite[cipher] for cipher in server.cipher_list\n ]\n if server.alpn_offers:\n tls_start.settings.alpn_protocols = [\n alpn.decode(\"ascii\") for alpn in server.alpn_offers\n ]\n\n # set the certificates\n # NOTE client certificates are not supported\n tls_start.settings.ca_path = ctx.options.ssl_verify_upstream_trusted_confdir\n tls_start.settings.ca_file = ctx.options.ssl_verify_upstream_trusted_ca\n\n def running(self):\n # FIXME: We have a weird bug where the contract for configure is not followed and it is never called with\n # confdir or command_history as updated.\n self.configure(\"confdir\") # pragma: no cover\n\n def configure(self, updated):\n if (\n \"certs\" in updated\n or \"confdir\" in updated\n or \"key_size\" in updated\n or \"cert_passphrase\" in updated\n ):\n certstore_path = os.path.expanduser(ctx.options.confdir)\n self.certstore = certs.CertStore.from_store(\n path=certstore_path,\n basename=CONF_BASENAME,\n key_size=ctx.options.key_size,\n passphrase=ctx.options.cert_passphrase.encode(\"utf8\")\n if ctx.options.cert_passphrase\n else None,\n )\n if self.certstore.default_ca.has_expired():\n logging.warning(\n \"The mitmproxy certificate authority has expired!\\n\"\n \"Please delete all CA-related files in your ~/.mitmproxy folder.\\n\"\n \"The CA will be regenerated automatically after restarting mitmproxy.\\n\"\n \"See https://docs.mitmproxy.org/stable/concepts-certificates/ for additional help.\",\n )\n\n for certspec in ctx.options.certs:\n parts = certspec.split(\"=\", 1)\n if len(parts) == 1:\n parts = [\"*\", parts[0]]\n\n cert = Path(parts[1]).expanduser()\n if not cert.exists():\n raise exceptions.OptionsError(\n f\"Certificate file does not exist: {cert}\"\n )\n try:\n self.certstore.add_cert_file(\n parts[0],\n cert,\n passphrase=ctx.options.cert_passphrase.encode(\"utf8\")\n if ctx.options.cert_passphrase\n else None,\n )\n except ValueError as e:\n raise exceptions.OptionsError(\n f\"Invalid certificate format for {cert}: {e}\"\n ) from e\n\n if \"tls_ecdh_curve_client\" in updated or \"tls_ecdh_curve_server\" in updated:\n for ecdh_curve in [\n ctx.options.tls_ecdh_curve_client,\n ctx.options.tls_ecdh_curve_server,\n ]:\n if ecdh_curve is not None:\n try:\n crypto.get_elliptic_curve(ecdh_curve)\n except Exception as e:\n raise exceptions.OptionsError(\n f\"Invalid ECDH curve: {ecdh_curve!r}\"\n ) from e\n\n def get_cert(self, conn_context: context.Context) -> certs.CertStoreEntry:\n \"\"\"\n This function determines the Common Name (CN), Subject Alternative Names (SANs) and Organization Name\n our certificate should have and then fetches a matching cert from the certstore.\n \"\"\"\n altnames: list[x509.GeneralName] = []\n organization: str | None = None\n\n # Use upstream certificate if available.\n if ctx.options.upstream_cert and conn_context.server.certificate_list:\n upstream_cert = conn_context.server.certificate_list[0]\n if upstream_cert.cn:\n altnames.append(_ip_or_dns_name(upstream_cert.cn))\n altnames.extend(upstream_cert.altnames)\n if upstream_cert.organization:\n organization = upstream_cert.organization\n\n # Add SNI or our local IP address.\n if conn_context.client.sni:\n altnames.append(_ip_or_dns_name(conn_context.client.sni))\n else:\n altnames.append(_ip_or_dns_name(conn_context.client.sockname[0]))\n\n # If we already know of a server address, include that in the SANs as well.\n if conn_context.server.address:\n altnames.append(_ip_or_dns_name(conn_context.server.address[0]))\n\n # only keep first occurrence of each hostname\n altnames = list(dict.fromkeys(altnames))\n\n # RFC 2818: If a subjectAltName extension of type dNSName is present, that MUST be used as the identity.\n # In other words, the Common Name is irrelevant then.\n cn = next((str(x.value) for x in altnames), None)\n return self.certstore.get_cert(cn, altnames, organization)\n\n\ndef _ip_or_dns_name(val: str) -> x509.GeneralName:\n \"\"\"Convert a string into either an x509.IPAddress or x509.DNSName object.\"\"\"\n try:\n ip = ipaddress.ip_address(val)\n except ValueError:\n return x509.DNSName(val)\n else:\n return x509.IPAddress(ip)\n", "path": "mitmproxy/addons/tlsconfig.py" } ]
[ { "content": "import ipaddress\nimport logging\nimport os\nimport ssl\nfrom pathlib import Path\nfrom typing import Any\nfrom typing import TypedDict\n\nfrom aioquic.h3.connection import H3_ALPN\nfrom aioquic.tls import CipherSuite\nfrom cryptography import x509\nfrom OpenSSL import crypto\nfrom OpenSSL import SSL\n\nfrom mitmproxy import certs\nfrom mitmproxy import connection\nfrom mitmproxy import ctx\nfrom mitmproxy import exceptions\nfrom mitmproxy import tls\nfrom mitmproxy.net import tls as net_tls\nfrom mitmproxy.options import CONF_BASENAME\nfrom mitmproxy.proxy import context\nfrom mitmproxy.proxy.layers import modes\nfrom mitmproxy.proxy.layers import quic\nfrom mitmproxy.proxy.layers import tls as proxy_tls\n\n# We manually need to specify this, otherwise OpenSSL may select a non-HTTP2 cipher by default.\n# https://ssl-config.mozilla.org/#config=old\n\nDEFAULT_CIPHERS = (\n \"ECDHE-ECDSA-AES128-GCM-SHA256\",\n \"ECDHE-RSA-AES128-GCM-SHA256\",\n \"ECDHE-ECDSA-AES256-GCM-SHA384\",\n \"ECDHE-RSA-AES256-GCM-SHA384\",\n \"ECDHE-ECDSA-CHACHA20-POLY1305\",\n \"ECDHE-RSA-CHACHA20-POLY1305\",\n \"DHE-RSA-AES128-GCM-SHA256\",\n \"DHE-RSA-AES256-GCM-SHA384\",\n \"DHE-RSA-CHACHA20-POLY1305\",\n \"ECDHE-ECDSA-AES128-SHA256\",\n \"ECDHE-RSA-AES128-SHA256\",\n \"ECDHE-ECDSA-AES128-SHA\",\n \"ECDHE-RSA-AES128-SHA\",\n \"ECDHE-ECDSA-AES256-SHA384\",\n \"ECDHE-RSA-AES256-SHA384\",\n \"ECDHE-ECDSA-AES256-SHA\",\n \"ECDHE-RSA-AES256-SHA\",\n \"DHE-RSA-AES128-SHA256\",\n \"DHE-RSA-AES256-SHA256\",\n \"AES128-GCM-SHA256\",\n \"AES256-GCM-SHA384\",\n \"AES128-SHA256\",\n \"AES256-SHA256\",\n \"AES128-SHA\",\n \"AES256-SHA\",\n \"DES-CBC3-SHA\",\n)\n\n# 2022/05: X509_CHECK_FLAG_NEVER_CHECK_SUBJECT is not available in LibreSSL, ignore gracefully as it's not critical.\nDEFAULT_HOSTFLAGS = (\n SSL._lib.X509_CHECK_FLAG_NO_PARTIAL_WILDCARDS # type: ignore\n | getattr(SSL._lib, \"X509_CHECK_FLAG_NEVER_CHECK_SUBJECT\", 0) # type: ignore\n)\n\n\nclass AppData(TypedDict):\n client_alpn: bytes | None\n server_alpn: bytes | None\n http2: bool\n\n\ndef alpn_select_callback(conn: SSL.Connection, options: list[bytes]) -> Any:\n app_data: AppData = conn.get_app_data()\n client_alpn = app_data[\"client_alpn\"]\n server_alpn = app_data[\"server_alpn\"]\n http2 = app_data[\"http2\"]\n if client_alpn is not None:\n if client_alpn in options:\n return client_alpn\n else:\n return SSL.NO_OVERLAPPING_PROTOCOLS\n if server_alpn and server_alpn in options:\n return server_alpn\n if server_alpn == b\"\":\n # We do have a server connection, but the remote server refused to negotiate a protocol:\n # We need to mirror this on the client connection.\n return SSL.NO_OVERLAPPING_PROTOCOLS\n http_alpns = proxy_tls.HTTP_ALPNS if http2 else proxy_tls.HTTP1_ALPNS\n # client sends in order of preference, so we are nice and respect that.\n for alpn in options:\n if alpn in http_alpns:\n return alpn\n else:\n return SSL.NO_OVERLAPPING_PROTOCOLS\n\n\nclass TlsConfig:\n \"\"\"\n This addon supplies the proxy core with the desired OpenSSL connection objects to negotiate TLS.\n \"\"\"\n\n certstore: certs.CertStore = None # type: ignore\n\n # TODO: We should support configuring TLS 1.3 cipher suites (https://github.com/mitmproxy/mitmproxy/issues/4260)\n # TODO: We should re-use SSL.Context options here, if only for TLS session resumption.\n # This may require patches to pyOpenSSL, as some functionality is only exposed on contexts.\n # TODO: This addon should manage the following options itself, which are current defined in mitmproxy/options.py:\n # - upstream_cert\n # - add_upstream_certs_to_client_chain\n # - ciphers_client\n # - ciphers_server\n # - key_size\n # - certs\n # - cert_passphrase\n # - ssl_verify_upstream_trusted_ca\n # - ssl_verify_upstream_trusted_confdir\n\n def load(self, loader):\n loader.add_option(\n name=\"tls_version_client_min\",\n typespec=str,\n default=net_tls.DEFAULT_MIN_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the minimum TLS version for client connections.\",\n )\n loader.add_option(\n name=\"tls_version_client_max\",\n typespec=str,\n default=net_tls.DEFAULT_MAX_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the maximum TLS version for client connections.\",\n )\n loader.add_option(\n name=\"tls_version_server_min\",\n typespec=str,\n default=net_tls.DEFAULT_MIN_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the minimum TLS version for server connections.\",\n )\n loader.add_option(\n name=\"tls_version_server_max\",\n typespec=str,\n default=net_tls.DEFAULT_MAX_VERSION.name,\n choices=[x.name for x in net_tls.Version],\n help=f\"Set the maximum TLS version for server connections.\",\n )\n loader.add_option(\n name=\"tls_ecdh_curve_client\",\n typespec=str | None,\n default=None,\n help=\"Use a specific elliptic curve for ECDHE key exchange on client connections. \"\n 'OpenSSL syntax, for example \"prime256v1\" (see `openssl ecparam -list_curves`).',\n )\n loader.add_option(\n name=\"tls_ecdh_curve_server\",\n typespec=str | None,\n default=None,\n help=\"Use a specific elliptic curve for ECDHE key exchange on server connections. \"\n 'OpenSSL syntax, for example \"prime256v1\" (see `openssl ecparam -list_curves`).',\n )\n\n def tls_clienthello(self, tls_clienthello: tls.ClientHelloData):\n conn_context = tls_clienthello.context\n tls_clienthello.establish_server_tls_first = (\n conn_context.server.tls and ctx.options.connection_strategy == \"eager\"\n )\n\n def tls_start_client(self, tls_start: tls.TlsData) -> None:\n \"\"\"Establish TLS or DTLS between client and proxy.\"\"\"\n if tls_start.ssl_conn is not None:\n return # a user addon has already provided the pyOpenSSL context.\n\n assert isinstance(tls_start.conn, connection.Client)\n\n client: connection.Client = tls_start.conn\n server: connection.Server = tls_start.context.server\n\n entry = self.get_cert(tls_start.context)\n\n if not client.cipher_list and ctx.options.ciphers_client:\n client.cipher_list = ctx.options.ciphers_client.split(\":\")\n # don't assign to client.cipher_list, doesn't need to be stored.\n cipher_list = client.cipher_list or DEFAULT_CIPHERS\n\n if ctx.options.add_upstream_certs_to_client_chain: # pragma: no cover\n # exempted from coverage until https://bugs.python.org/issue18233 is fixed.\n extra_chain_certs = server.certificate_list\n else:\n extra_chain_certs = []\n\n ssl_ctx = net_tls.create_client_proxy_context(\n method=net_tls.Method.DTLS_SERVER_METHOD\n if tls_start.is_dtls\n else net_tls.Method.TLS_SERVER_METHOD,\n min_version=net_tls.Version[ctx.options.tls_version_client_min],\n max_version=net_tls.Version[ctx.options.tls_version_client_max],\n cipher_list=tuple(cipher_list),\n ecdh_curve=ctx.options.tls_ecdh_curve_client,\n chain_file=entry.chain_file,\n request_client_cert=False,\n alpn_select_callback=alpn_select_callback,\n extra_chain_certs=tuple(extra_chain_certs),\n dhparams=self.certstore.dhparams,\n )\n tls_start.ssl_conn = SSL.Connection(ssl_ctx)\n\n tls_start.ssl_conn.use_certificate(entry.cert.to_pyopenssl())\n tls_start.ssl_conn.use_privatekey(\n crypto.PKey.from_cryptography_key(entry.privatekey)\n )\n\n # Force HTTP/1 for secure web proxies, we currently don't support CONNECT over HTTP/2.\n # There is a proof-of-concept branch at https://github.com/mhils/mitmproxy/tree/http2-proxy,\n # but the complexity outweighs the benefits for now.\n if len(tls_start.context.layers) == 2 and isinstance(\n tls_start.context.layers[0], modes.HttpProxy\n ):\n client_alpn: bytes | None = b\"http/1.1\"\n else:\n client_alpn = client.alpn\n\n tls_start.ssl_conn.set_app_data(\n AppData(\n client_alpn=client_alpn,\n server_alpn=server.alpn,\n http2=ctx.options.http2,\n )\n )\n tls_start.ssl_conn.set_accept_state()\n\n def tls_start_server(self, tls_start: tls.TlsData) -> None:\n \"\"\"Establish TLS or DTLS between proxy and server.\"\"\"\n if tls_start.ssl_conn is not None:\n return # a user addon has already provided the pyOpenSSL context.\n\n assert isinstance(tls_start.conn, connection.Server)\n\n client: connection.Client = tls_start.context.client\n # tls_start.conn may be different from tls_start.context.server, e.g. an upstream HTTPS proxy.\n server: connection.Server = tls_start.conn\n assert server.address\n\n if ctx.options.ssl_insecure:\n verify = net_tls.Verify.VERIFY_NONE\n else:\n verify = net_tls.Verify.VERIFY_PEER\n\n if server.sni is None:\n server.sni = client.sni or server.address[0]\n\n if not server.alpn_offers:\n if client.alpn_offers:\n if ctx.options.http2:\n # We would perfectly support HTTP/1 -> HTTP/2, but we want to keep things on the same protocol\n # version. There are some edge cases where we want to mirror the regular server's behavior\n # accurately, for example header capitalization.\n server.alpn_offers = tuple(client.alpn_offers)\n else:\n server.alpn_offers = tuple(\n x for x in client.alpn_offers if x != b\"h2\"\n )\n else:\n # We either have no client TLS or a client without ALPN.\n # - If the client does use TLS but did not send an ALPN extension, we want to mirror that upstream.\n # - If the client does not use TLS, there's no clear-cut answer. As a pragmatic approach, we also do\n # not send any ALPN extension in this case, which defaults to whatever protocol we are speaking\n # or falls back to HTTP.\n server.alpn_offers = []\n\n if not server.cipher_list and ctx.options.ciphers_server:\n server.cipher_list = ctx.options.ciphers_server.split(\":\")\n # don't assign to client.cipher_list, doesn't need to be stored.\n cipher_list = server.cipher_list or DEFAULT_CIPHERS\n\n client_cert: str | None = None\n if ctx.options.client_certs:\n client_certs = os.path.expanduser(ctx.options.client_certs)\n if os.path.isfile(client_certs):\n client_cert = client_certs\n else:\n server_name: str = server.sni or server.address[0]\n p = os.path.join(client_certs, f\"{server_name}.pem\")\n if os.path.isfile(p):\n client_cert = p\n\n ssl_ctx = net_tls.create_proxy_server_context(\n method=net_tls.Method.DTLS_CLIENT_METHOD\n if tls_start.is_dtls\n else net_tls.Method.TLS_CLIENT_METHOD,\n min_version=net_tls.Version[ctx.options.tls_version_server_min],\n max_version=net_tls.Version[ctx.options.tls_version_server_max],\n cipher_list=tuple(cipher_list),\n ecdh_curve=ctx.options.tls_ecdh_curve_server,\n verify=verify,\n ca_path=ctx.options.ssl_verify_upstream_trusted_confdir,\n ca_pemfile=ctx.options.ssl_verify_upstream_trusted_ca,\n client_cert=client_cert,\n legacy_server_connect=ctx.options.ssl_insecure,\n )\n\n tls_start.ssl_conn = SSL.Connection(ssl_ctx)\n if server.sni:\n # We need to set SNI + enable hostname verification.\n assert isinstance(server.sni, str)\n # Manually enable hostname verification on the context object.\n # https://wiki.openssl.org/index.php/Hostname_validation\n param = SSL._lib.SSL_get0_param(tls_start.ssl_conn._ssl) # type: ignore\n # Matching on the CN is disabled in both Chrome and Firefox, so we disable it, too.\n # https://www.chromestatus.com/feature/4981025180483584\n\n SSL._lib.X509_VERIFY_PARAM_set_hostflags(param, DEFAULT_HOSTFLAGS) # type: ignore\n\n try:\n ip: bytes = ipaddress.ip_address(server.sni).packed\n except ValueError:\n host_name = server.sni.encode(\"idna\")\n tls_start.ssl_conn.set_tlsext_host_name(host_name)\n ok = SSL._lib.X509_VERIFY_PARAM_set1_host( # type: ignore\n param, host_name, len(host_name)\n ) # type: ignore\n SSL._openssl_assert(ok == 1) # type: ignore\n else:\n # RFC 6066: Literal IPv4 and IPv6 addresses are not permitted in \"HostName\",\n # so we don't call set_tlsext_host_name.\n ok = SSL._lib.X509_VERIFY_PARAM_set1_ip(param, ip, len(ip)) # type: ignore\n SSL._openssl_assert(ok == 1) # type: ignore\n elif verify is not net_tls.Verify.VERIFY_NONE:\n raise ValueError(\"Cannot validate certificate hostname without SNI\")\n\n if server.alpn_offers:\n tls_start.ssl_conn.set_alpn_protos(server.alpn_offers)\n\n tls_start.ssl_conn.set_connect_state()\n\n def quic_start_client(self, tls_start: quic.QuicTlsData) -> None:\n \"\"\"Establish QUIC between client and proxy.\"\"\"\n if tls_start.settings is not None:\n return # a user addon has already provided the settings.\n tls_start.settings = quic.QuicTlsSettings()\n\n # keep the following part in sync with `tls_start_client`\n assert isinstance(tls_start.conn, connection.Client)\n\n client: connection.Client = tls_start.conn\n server: connection.Server = tls_start.context.server\n\n entry = self.get_cert(tls_start.context)\n\n if not client.cipher_list and ctx.options.ciphers_client:\n client.cipher_list = ctx.options.ciphers_client.split(\":\")\n\n if ctx.options.add_upstream_certs_to_client_chain: # pragma: no cover\n extra_chain_certs = server.certificate_list\n else:\n extra_chain_certs = []\n\n # set context parameters\n if client.cipher_list:\n tls_start.settings.cipher_suites = [\n CipherSuite[cipher] for cipher in client.cipher_list\n ]\n # if we don't have upstream ALPN, we allow all offered by the client\n tls_start.settings.alpn_protocols = [\n alpn.decode(\"ascii\")\n for alpn in [alpn for alpn in (client.alpn, server.alpn) if alpn]\n or client.alpn_offers\n ]\n\n # set the certificates\n tls_start.settings.certificate = entry.cert._cert\n tls_start.settings.certificate_private_key = entry.privatekey\n tls_start.settings.certificate_chain = [\n cert._cert for cert in (*entry.chain_certs, *extra_chain_certs)\n ]\n\n def quic_start_server(self, tls_start: quic.QuicTlsData) -> None:\n \"\"\"Establish QUIC between proxy and server.\"\"\"\n if tls_start.settings is not None:\n return # a user addon has already provided the settings.\n tls_start.settings = quic.QuicTlsSettings()\n\n # keep the following part in sync with `tls_start_server`\n assert isinstance(tls_start.conn, connection.Server)\n\n client: connection.Client = tls_start.context.client\n server: connection.Server = tls_start.conn\n assert server.address\n\n if ctx.options.ssl_insecure:\n tls_start.settings.verify_mode = ssl.CERT_NONE\n else:\n tls_start.settings.verify_mode = ssl.CERT_REQUIRED\n\n if server.sni is None:\n server.sni = client.sni or server.address[0]\n\n if not server.alpn_offers:\n if client.alpn_offers:\n server.alpn_offers = tuple(client.alpn_offers)\n else:\n # aioquic fails if no ALPN is offered, so use H3\n server.alpn_offers = tuple(alpn.encode(\"ascii\") for alpn in H3_ALPN)\n\n if not server.cipher_list and ctx.options.ciphers_server:\n server.cipher_list = ctx.options.ciphers_server.split(\":\")\n\n # set context parameters\n if server.cipher_list:\n tls_start.settings.cipher_suites = [\n CipherSuite[cipher] for cipher in server.cipher_list\n ]\n if server.alpn_offers:\n tls_start.settings.alpn_protocols = [\n alpn.decode(\"ascii\") for alpn in server.alpn_offers\n ]\n\n # set the certificates\n # NOTE client certificates are not supported\n tls_start.settings.ca_path = ctx.options.ssl_verify_upstream_trusted_confdir\n tls_start.settings.ca_file = ctx.options.ssl_verify_upstream_trusted_ca\n\n def running(self):\n # FIXME: We have a weird bug where the contract for configure is not followed and it is never called with\n # confdir or command_history as updated.\n self.configure(\"confdir\") # pragma: no cover\n\n def configure(self, updated):\n if (\n \"certs\" in updated\n or \"confdir\" in updated\n or \"key_size\" in updated\n or \"cert_passphrase\" in updated\n ):\n certstore_path = os.path.expanduser(ctx.options.confdir)\n self.certstore = certs.CertStore.from_store(\n path=certstore_path,\n basename=CONF_BASENAME,\n key_size=ctx.options.key_size,\n passphrase=ctx.options.cert_passphrase.encode(\"utf8\")\n if ctx.options.cert_passphrase\n else None,\n )\n if self.certstore.default_ca.has_expired():\n logging.warning(\n \"The mitmproxy certificate authority has expired!\\n\"\n \"Please delete all CA-related files in your ~/.mitmproxy folder.\\n\"\n \"The CA will be regenerated automatically after restarting mitmproxy.\\n\"\n \"See https://docs.mitmproxy.org/stable/concepts-certificates/ for additional help.\",\n )\n\n for certspec in ctx.options.certs:\n parts = certspec.split(\"=\", 1)\n if len(parts) == 1:\n parts = [\"*\", parts[0]]\n\n cert = Path(parts[1]).expanduser()\n if not cert.exists():\n raise exceptions.OptionsError(\n f\"Certificate file does not exist: {cert}\"\n )\n try:\n self.certstore.add_cert_file(\n parts[0],\n cert,\n passphrase=ctx.options.cert_passphrase.encode(\"utf8\")\n if ctx.options.cert_passphrase\n else None,\n )\n except ValueError as e:\n raise exceptions.OptionsError(\n f\"Invalid certificate format for {cert}: {e}\"\n ) from e\n\n if \"tls_ecdh_curve_client\" in updated or \"tls_ecdh_curve_server\" in updated:\n for ecdh_curve in [\n ctx.options.tls_ecdh_curve_client,\n ctx.options.tls_ecdh_curve_server,\n ]:\n if ecdh_curve is not None:\n try:\n crypto.get_elliptic_curve(ecdh_curve)\n except Exception as e:\n raise exceptions.OptionsError(\n f\"Invalid ECDH curve: {ecdh_curve!r}\"\n ) from e\n\n def get_cert(self, conn_context: context.Context) -> certs.CertStoreEntry:\n \"\"\"\n This function determines the Common Name (CN), Subject Alternative Names (SANs) and Organization Name\n our certificate should have and then fetches a matching cert from the certstore.\n \"\"\"\n altnames: list[x509.GeneralName] = []\n organization: str | None = None\n\n # Use upstream certificate if available.\n if ctx.options.upstream_cert and conn_context.server.certificate_list:\n upstream_cert = conn_context.server.certificate_list[0]\n if upstream_cert.cn:\n altnames.append(_ip_or_dns_name(upstream_cert.cn))\n altnames.extend(upstream_cert.altnames)\n if upstream_cert.organization:\n organization = upstream_cert.organization\n\n # Add SNI or our local IP address.\n if conn_context.client.sni:\n altnames.append(_ip_or_dns_name(conn_context.client.sni))\n else:\n altnames.append(_ip_or_dns_name(conn_context.client.sockname[0]))\n\n # If we already know of a server address, include that in the SANs as well.\n if conn_context.server.address:\n altnames.append(_ip_or_dns_name(conn_context.server.address[0]))\n\n # only keep first occurrence of each hostname\n altnames = list(dict.fromkeys(altnames))\n\n # RFC 2818: If a subjectAltName extension of type dNSName is present, that MUST be used as the identity.\n # In other words, the Common Name is irrelevant then.\n cn = next((str(x.value) for x in altnames), None)\n return self.certstore.get_cert(cn, altnames, organization)\n\n\ndef _ip_or_dns_name(val: str) -> x509.GeneralName:\n \"\"\"Convert a string into either an x509.IPAddress or x509.DNSName object.\"\"\"\n try:\n ip = ipaddress.ip_address(val)\n except ValueError:\n return x509.DNSName(val.encode(\"idna\").decode())\n else:\n return x509.IPAddress(ip)\n", "path": "mitmproxy/addons/tlsconfig.py" } ]
diff --git a/CHANGELOG.md b/CHANGELOG.md index a0a31038c6..1df19bb268 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,8 @@ ([#6767](https://github.com/mitmproxy/mitmproxy/pull/6767), @txrp0x9) * Fix compatibility with older cryptography versions and silence a DeprecationWarning on Python <3.11. ([#6790](https://github.com/mitmproxy/mitmproxy/pull/6790), @mhils) +* Fix a bug when proxying unicode domains. + ([#6796](https://github.com/mitmproxy/mitmproxy/pull/6796), @mhils) ## 07 March 2024: mitmproxy 10.2.4 diff --git a/mitmproxy/addons/tlsconfig.py b/mitmproxy/addons/tlsconfig.py index 63f876a0da..20d6aa8e1b 100644 --- a/mitmproxy/addons/tlsconfig.py +++ b/mitmproxy/addons/tlsconfig.py @@ -525,6 +525,6 @@ def _ip_or_dns_name(val: str) -> x509.GeneralName: try: ip = ipaddress.ip_address(val) except ValueError: - return x509.DNSName(val) + return x509.DNSName(val.encode("idna").decode()) else: return x509.IPAddress(ip) diff --git a/test/mitmproxy/addons/test_tlsconfig.py b/test/mitmproxy/addons/test_tlsconfig.py index d209fd91eb..f4a7bf9ea9 100644 --- a/test/mitmproxy/addons/test_tlsconfig.py +++ b/test/mitmproxy/addons/test_tlsconfig.py @@ -138,12 +138,12 @@ def test_get_cert(self, tdata): ) # And now we also incorporate SNI. - ctx.client.sni = "sni.example" + ctx.client.sni = "🌈.sni.example" entry = ta.get_cert(ctx) assert entry.cert.altnames == x509.GeneralNames( [ x509.DNSName("example.mitmproxy.org"), - x509.DNSName("sni.example"), + x509.DNSName("xn--og8h.sni.example"), x509.DNSName("server-address.example"), ] )
pytorch__pytorch-4384
Profiler for Python 2.7 Running `prof = torch.autograd.profiler.load_nvprof('trace_name.prof')` on Python 2.7 causes an error: `NameError: global name 'FileNotFoundError' is not defined` and according to [here](https://github.com/philkr/lpo/issues/5#issuecomment-100596671), FileNotFoundError apparently doesn't exist in python 2.7. A simple solution that worked for me is to add to the beginning of torch/autograd/profiler.py file the following lines: ``` try: FileNotFoundError except NameError: #py2 FileNotFoundError = IOError ``` Which I got from [here](https://stackoverflow.com/a/21368622/850760). PS: I can create a PR if you agree with the solution. Cheers,
[ { "content": "import torch\nimport subprocess\nimport os\nimport sys\nimport copy\nimport tempfile\nimport itertools\nfrom collections import defaultdict, namedtuple\n\n\nclass range(object):\n def __init__(self, name):\n self.name = name\n\n def __enter__(self):\n torch.autograd._push_range(self.name)\n\n def __exit__(self, *args):\n torch.autograd._pop_range()\n return False\n\n\nclass EventList(list):\n \"\"\"A list of Events (for pretty printing)\"\"\"\n def __init__(self, *args, **kwargs):\n super(EventList, self).__init__(*args, **kwargs)\n\n def __str__(self):\n return self.table()\n\n def table(self, sort_by=None):\n \"\"\"Prints an EventList as a nicely formatted table.\n\n Arguments:\n sort_by (str, optional): Attribute used to sort entries. By default\n they are printed in the same order as they were registered.\n Valid keys include: ``cpu_time``, ``cuda_time``, ``cpu_time_total``,\n ``cuda_time_total``, ``count``.\n\n Returns:\n A string containing the table.\n \"\"\"\n return build_table(self, sort_by)\n\n def export_chrome_trace(self, path):\n \"\"\"Exports an EventList as a Chrome tracing tools file.\n\n The checkpoint can be later loaded and inspected under ``chrome://tracing`` URL.\n\n Arguments:\n path (str): Path where the trace will be written.\n \"\"\"\n import json\n with open(path, 'w') as f:\n chrome_events = []\n next_id = 0\n for evt in self:\n chrome_events.append(dict(\n name=evt.name,\n ph='X',\n ts=evt.cpu_interval.start,\n dur=evt.cpu_interval.elapsed_us(),\n tid=evt.thread,\n pid='CPU functions',\n args={},\n ))\n for k in evt.kernels:\n # 's' and 'f' draw Flow arrows from\n # the CPU launch to the GPU kernel\n chrome_events.append(dict(\n name=evt.name,\n ph='s',\n ts=evt.cpu_interval.start,\n tid=evt.thread,\n pid='CPU functions',\n id=next_id,\n cat='cpu_to_cuda',\n args={},\n ))\n chrome_events.append(dict(\n name=k.name,\n ph='f',\n ts=k.interval.start,\n tid=k.device,\n pid='CUDA functions',\n id=next_id,\n cat='cpu_to_cuda',\n args={},\n ))\n chrome_events.append(dict(\n name=k.name,\n ph='X',\n ts=k.interval.start,\n dur=k.interval.elapsed_us(),\n tid=k.device,\n pid='CUDA functions',\n args={},\n ))\n next_id += 1\n\n json.dump(chrome_events, f)\n\n def key_averages(self):\n \"\"\"Averages all function events over their keys.\n\n Returns:\n An EventList containing FunctionEventAvg objects.\n \"\"\"\n stats = defaultdict(FunctionEventAvg)\n for evt in self:\n stats[evt.key] += evt\n return EventList(stats.values())\n\n def total_average(self):\n \"\"\"Averages all events.\n\n Returns:\n A FunctionEventAvg object.\n \"\"\"\n total_stat = FunctionEventAvg()\n for evt in self:\n total_stat += evt\n total_stat.key = None\n total_stat.key = 'Total'\n return total_stat\n\n\nclass profile(object):\n \"\"\"Context manager that manages autograd profiler state and holds a summary of results.\n\n Arguments:\n enabled (bool, optional): Setting this to False makes this context manager a no-op.\n Default: ``True``.\n\n use_cuda (bool, optional): Enables timing of CUDA events as well using the cudaEvent API.\n Adds approximately 4us of overhead to each tensor operation.\n Default: ``False``\n\n .. warning:\n This context managers should not be called recursively, i.e. at most one\n instance should be enabled at any given time.\n\n Example:\n >>> x = Variable(torch.randn(1, 1), requires_grad=True)\n >>> with torch.autograd.profiler.profile() as prof:\n ... y = x ** 2\n ... y.backward()\n >>> # NOTE: some columns were removed for brevity\n ... print(prof)\n ------------------------------------- --------------- ---------------\n Name CPU time CUDA time\n ------------------------------------- --------------- ---------------\n PowConstant 142.036us 0.000us\n N5torch8autograd9GraphRootE 63.524us 0.000us\n PowConstantBackward 184.228us 0.000us\n MulConstant 50.288us 0.000us\n PowConstant 28.439us 0.000us\n Mul 20.154us 0.000us\n N5torch8autograd14AccumulateGradE 13.790us 0.000us\n N5torch8autograd5CloneE 4.088us 0.000us\n \"\"\"\n\n def __init__(self, enabled=True, use_cuda=False):\n self.enabled = enabled\n self.use_cuda = use_cuda\n self.function_events = None\n if not self.enabled:\n return\n self.entered = False\n\n def __enter__(self):\n if not self.enabled:\n return\n if self.entered:\n raise RuntimeError(\"autograd profiler traces are not reentrant\")\n self.entered = True\n profiler_kind = torch.autograd.ProfilerState.CUDA if self.use_cuda \\\n else torch.autograd.ProfilerState.CPU\n torch.autograd._enable_profiler(profiler_kind)\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n if not self.enabled:\n return\n records = torch.autograd._disable_profiler()\n self.function_events = EventList(parse_cpu_trace(records))\n return False\n\n def __repr__(self):\n if self.function_events is None:\n return '<unfinished torch.autograd.profile>'\n return repr(self.function_events)\n\n def __str__(self):\n if self.function_events is None:\n return '<unfinished torch.autograd.profile>'\n return str(self.function_events)\n\n def table(self, sort_by=None):\n if self.function_events is None:\n raise RuntimeError(\"can't export a trace that didn't finish running\")\n return self.function_events.table(sort_by)\n table.__doc__ = EventList.table.__doc__\n\n def export_chrome_trace(self, path):\n if self.function_events is None:\n raise RuntimeError(\"can't export a trace that didn't finish running\")\n return self.function_events.export_chrome_trace(path)\n export_chrome_trace.__doc__ = EventList.export_chrome_trace.__doc__\n\n def key_averages(self):\n if self.function_events is None:\n raise RuntimeError(\"can't average a trace that didn't finish running\")\n return self.function_events.key_averages()\n key_averages.__doc__ = EventList.key_averages.__doc__\n\n def total_average(self):\n if self.function_events is None:\n raise RuntimeError(\"can't average a trace that didn't finish running\")\n return self.function_events.total_average()\n total_average.__doc__ = EventList.total_average.__doc__\n\n\nclass emit_nvtx(object):\n \"\"\"Context manager that makes every autograd operation emit an NVTX range.\n\n It is useful when running the program under nvprof::\n\n nvprof --profile-from-start off -o trace_name.prof -- <regular command here>\n\n Unfortunately, there's no way to force nvprof to flush the data it collected\n to disk, so for CUDA profiling one has to use this context manager to annotate\n nvprof traces and wait for the process to exit before inspecting them.\n Then, either NVIDIA Visual Profiler (nvvp) can be used to visualize the timeline, or\n :func:`torch.autograd.profiler.load_nvprof` can load the results for inspection\n e.g. in Python REPL.\n\n .. warning:\n This context manager should not be called recursively, i.e. at most one\n instance should be enabled at any given time.\n\n Arguments:\n enabled (bool, optional): Setting this to False makes this context manager a no-op.\n Default: ``True``.\n\n Example:\n >>> with torch.cuda.profiler.profile():\n ... model(x) # Warmup CUDA memory allocator and profiler\n ... with torch.autograd.profiler.emit_nvtx():\n ... model(x)\n \"\"\"\n def __init__(self, enabled=True):\n self.enabled = enabled\n self.entered = False\n\n def __enter__(self):\n if not self.enabled:\n return\n if self.entered:\n raise RuntimeError(\"NVTX annotation context manager is not reentrant\")\n self.entered = True\n torch.cuda.synchronize()\n torch.autograd._enable_profiler(torch.autograd.ProfilerState.NVTX)\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n if not self.enabled:\n return\n torch.cuda.synchronize()\n torch.autograd._disable_profiler()\n return False\n\n\ndef load_nvprof(path):\n \"\"\"Opens an nvprof trace file and parses autograd annotations.\n\n Arguments:\n path (str): path to nvprof trace\n \"\"\"\n return EventList(parse_nvprof_trace(path))\n\n\n################################################################################\n# FunctionEvent\n\ndef format_time(time_us):\n \"\"\"Defines how to format time in FunctionEvent\"\"\"\n return '{:.3f}us'.format(time_us)\n\n\ndef attr_formatter(name):\n return property(lambda self: format_time(getattr(self, name)))\n\n\nclass FormattedTimesMixin(object):\n \"\"\"Helpers for FunctionEvent and FunctionEventAvg.\n\n The subclass should define `*_time_total` and `count` attributes.\n \"\"\"\n cpu_time_str = attr_formatter('cpu_time')\n cuda_time_str = attr_formatter('cuda_time')\n cpu_time_total_str = attr_formatter('cpu_time_total')\n cuda_time_total_str = attr_formatter('cuda_time_total')\n\n @property\n def cpu_time(self):\n return 0.0 if self.count == 0 else 1.0 * self.cpu_time_total / self.count\n\n @property\n def cuda_time(self):\n return 0.0 if self.count == 0 else 1.0 * self.cuda_time_total / self.count\n\n\nclass Interval(object):\n def __init__(self, start, end):\n self.start = start\n self.end = end\n\n def elapsed_us(self):\n return self.end - self.start\n\n\nclass Kernel(object):\n def __init__(self, name, device, interval):\n self.name = name\n self.device = device\n self.interval = interval\n\n\n# TODO: record TID too\nclass FunctionEvent(FormattedTimesMixin):\n \"\"\"Profiling information about a single function.\"\"\"\n def __init__(self, id, name, thread, cpu_start, cpu_end):\n self.id = id\n self.name = name\n self.cpu_interval = Interval(cpu_start, cpu_end)\n self.thread = thread\n self.kernels = []\n self.count = 1\n\n def append_kernel(self, name, device, start, end):\n self.kernels.append(Kernel(name, device, Interval(start, end)))\n\n @property\n def cuda_time_total(self):\n return sum(kinfo.interval.elapsed_us() for kinfo in self.kernels)\n\n @property\n def cpu_time_total(self):\n return self.cpu_interval.elapsed_us()\n\n @property\n def key(self):\n return self.name\n\n def __repr__(self):\n return '<FunctionEvent id={} cpu_time={} cuda_time={} name={} thread={}>'.format(\n self.id, self.cpu_time_str, self.cuda_time_str, self.name, self.thread)\n\n\nclass FunctionEventAvg(FormattedTimesMixin):\n \"\"\"Used to average stats over multiple FunctionEvent objects.\"\"\"\n def __init__(self):\n self.key = None\n self.count = self.cpu_time_total = self.cuda_time_total = 0\n\n def __iadd__(self, other):\n if self.key is None:\n self.key = other.key\n assert isinstance(other, FunctionEvent)\n assert other.key == self.key\n self.cpu_time_total += other.cpu_time\n self.cuda_time_total += other.cuda_time\n self.count += 1\n return self\n\n def __repr__(self):\n return '<FunctionEventAvg cpu_time={} cuda_time={} key={}>'.format(\n self.cpu_time_str, self.cuda_time_str, self.key)\n\n\n################################################################################\n# Utilities\n\ndef demangle(name):\n \"\"\"Demangle a C++ identifier using c++filt\"\"\"\n try:\n with open(os.devnull, 'w') as devnull:\n return subprocess.check_output(['c++filt', '-n', name], stderr=devnull).rstrip().decode(\"ascii\")\n except subprocess.CalledProcessError:\n return name\n\n\nclass StringTable(defaultdict):\n def __missing__(self, key):\n self[key] = demangle(key)\n return self[key]\n\n\n################################################################################\n# CPU checkpoints\n\ndef parse_cpu_trace(thread_records):\n next_id = 0\n start_record = None\n cuda_records = {}\n functions = []\n record_stack = []\n string_table = StringTable()\n\n # cuda start events and the overall profiler start event don't happen\n # at exactly the same time because we need to record an event on each device\n # and each record takes ~4us. So we adjust here by the difference\n # adding the difference in CPU time between the profiler start event\n # and the CPU time of the cuda start event for the device\n def adjusted_time(cuda_record):\n assert cuda_record.device() != -1\n cuda_time_0 = cuda_records[cuda_record.device()]\n return cuda_time_0.cuda_elapsed_us(cuda_record) + start_record.cpu_elapsed_us(cuda_time_0)\n\n # '__start_profile' is not guarenteed to be first, so we must find it here\n for record in itertools.chain(*thread_records):\n if record.name() == '__start_profile':\n start_record = record\n elif record.name() == '__cuda_start_event':\n assert record.device() != -1\n cuda_records[record.device()] = record\n assert start_record is not None\n\n for record in itertools.chain(*thread_records):\n if record.kind() == 'mark':\n continue\n elif record.kind() == 'push':\n record_stack.append((next_id, record))\n next_id += 1\n elif record.kind() == 'pop':\n function_id, start = record_stack.pop()\n fe = FunctionEvent(\n id=function_id,\n name=string_table[start.name()],\n thread=start.thread_id(),\n cpu_start=start_record.cpu_elapsed_us(start),\n cpu_end=start_record.cpu_elapsed_us(record))\n if start.has_cuda():\n cuda_start = adjusted_time(start)\n cuda_end = adjusted_time(record)\n fe.append_kernel(start.name(),\n start.device(),\n cuda_start,\n cuda_end)\n functions.append(fe)\n\n functions.sort(key=lambda evt: evt.cpu_interval.start)\n return functions\n\n\n################################################################################\n# CUDA checkpoints\n\nclass EnforceUnique(object):\n \"\"\"Raises an error if a key is seen more than once.\"\"\"\n def __init__(self):\n self.seen = set()\n\n def see(self, *key):\n if key in self.seen:\n raise RuntimeError('duplicate key: ' + str(key))\n self.seen.add(key)\n\n\ndef parse_nvprof_trace(path):\n import sqlite3\n conn = sqlite3.connect(path)\n conn.row_factory = sqlite3.Row\n\n # Parse strings table\n strings = {}\n for r in conn.execute(\"SELECT _id_ as id, value FROM StringTable\"):\n strings[r[\"id\"]] = demangle(r[\"value\"])\n\n # First, find all functions and create FunctionEvents for them\n marker_query = \"\"\"\n SELECT\n start.id AS marker_id, start.name, start.timestamp AS start_time, end.timestamp AS end_time\n FROM\n CUPTI_ACTIVITY_KIND_MARKER AS start INNER JOIN CUPTI_ACTIVITY_KIND_MARKER AS end\n ON start.id = end.id\n WHERE\n start.name != 0 AND end.name = 0\n \"\"\"\n functions = []\n functions_map = {}\n unique = EnforceUnique()\n for row in conn.execute(marker_query):\n unique.see(row['marker_id'])\n evt = FunctionEvent(id=row['marker_id'],\n name=strings[row['name']],\n cpu_start=row['start_time'],\n cpu_end=row['end_time'],\n thread=0) # TODO: find in sqlite database\n functions.append(evt)\n functions_map[evt.id] = evt\n\n # Now, correlate all kernels with FunctionEvents\n kernel_query = \"\"\"\n SELECT\n start.id AS marker_id, start.name, start.timestamp, end.timestamp,\n runtime._id_ AS runtime_id, runtime.cbid, runtime.start AS runtime_start, runtime.end AS runtime_end,\n kernel.start AS kernel_start, kernel.end AS kernel_end, kernel.name AS kernel_name\n FROM\n CUPTI_ACTIVITY_KIND_MARKER AS start\n INNER JOIN CUPTI_ACTIVITY_KIND_MARKER AS end\n ON start.id = end.id\n INNER JOIN CUPTI_ACTIVITY_KIND_RUNTIME as runtime\n ON (start.timestamp < runtime.start AND runtime.end < end.timestamp)\n INNER JOIN CUPTI_ACTIVITY_KIND_CONCURRENT_KERNEL AS kernel\n ON kernel.correlationId = runtime.correlationId\n \"\"\"\n unique = EnforceUnique()\n for row in conn.execute(kernel_query):\n unique.see(row['marker_id'], row['runtime_id'])\n assert row['cbid'] == 13 # 13 == Launch\n evt = functions_map[row['marker_id']]\n evt.append_kernel(row['kernel_name'],\n 0,\n row['kernel_start'],\n row['kernel_end'])\n\n functions.sort(key=lambda evt: evt.start)\n return functions\n\n\n################################################################################\n# Pretty printer\n\ndef build_table(events, sort_by=None, header=None):\n \"\"\"Prints a summary of events (which can be a list of FunctionEvent or FunctionEventAvg).\"\"\"\n if sort_by is not None:\n events = sorted(events, key=lambda evt: getattr(evt, sort_by))\n\n max_name_length = max(len(evt.key) for evt in events)\n max_name_length += 4 # Add some nice padding\n col_width = 15\n col_format = ' {: >' + str(col_width) + '}'\n row_format = '{: <' + str(max_name_length) + '}' + col_format * 5\n header_sep = '-' * max_name_length + (' ' + '-' * col_width) * 5\n\n # Have to use a list because nonlocal is Py3 only...\n result = ['']\n\n def append(s):\n result[0] += s\n result[0] += '\\n'\n\n # Actual printing\n if header is not None:\n line_length = max_name_length + (col_width + 2) * 5\n append('=' * line_length)\n append(header)\n append(header_sep)\n append(row_format.format('Name', 'CPU time', 'CUDA time', 'Calls', 'CPU total', 'CUDA total'))\n append(header_sep)\n for evt in events:\n append(row_format.format(evt.key, evt.cpu_time_str, evt.cuda_time_str,\n evt.count, evt.cpu_time_total_str, evt.cuda_time_total_str))\n\n return result[0]\n", "path": "torch/autograd/profiler.py" } ]
[ { "content": "import torch\nimport subprocess\nimport os\nimport sys\nimport copy\nimport tempfile\nimport itertools\nfrom collections import defaultdict, namedtuple\n\ntry:\n FileNotFoundError\nexcept NameError:\n # py2.7\n FileNotFoundError = IOError\n\n\nclass range(object):\n def __init__(self, name):\n self.name = name\n\n def __enter__(self):\n torch.autograd._push_range(self.name)\n\n def __exit__(self, *args):\n torch.autograd._pop_range()\n return False\n\n\nclass EventList(list):\n \"\"\"A list of Events (for pretty printing)\"\"\"\n def __init__(self, *args, **kwargs):\n super(EventList, self).__init__(*args, **kwargs)\n\n def __str__(self):\n return self.table()\n\n def table(self, sort_by=None):\n \"\"\"Prints an EventList as a nicely formatted table.\n\n Arguments:\n sort_by (str, optional): Attribute used to sort entries. By default\n they are printed in the same order as they were registered.\n Valid keys include: ``cpu_time``, ``cuda_time``, ``cpu_time_total``,\n ``cuda_time_total``, ``count``.\n\n Returns:\n A string containing the table.\n \"\"\"\n return build_table(self, sort_by)\n\n def export_chrome_trace(self, path):\n \"\"\"Exports an EventList as a Chrome tracing tools file.\n\n The checkpoint can be later loaded and inspected under ``chrome://tracing`` URL.\n\n Arguments:\n path (str): Path where the trace will be written.\n \"\"\"\n import json\n with open(path, 'w') as f:\n chrome_events = []\n next_id = 0\n for evt in self:\n chrome_events.append(dict(\n name=evt.name,\n ph='X',\n ts=evt.cpu_interval.start,\n dur=evt.cpu_interval.elapsed_us(),\n tid=evt.thread,\n pid='CPU functions',\n args={},\n ))\n for k in evt.kernels:\n # 's' and 'f' draw Flow arrows from\n # the CPU launch to the GPU kernel\n chrome_events.append(dict(\n name=evt.name,\n ph='s',\n ts=evt.cpu_interval.start,\n tid=evt.thread,\n pid='CPU functions',\n id=next_id,\n cat='cpu_to_cuda',\n args={},\n ))\n chrome_events.append(dict(\n name=k.name,\n ph='f',\n ts=k.interval.start,\n tid=k.device,\n pid='CUDA functions',\n id=next_id,\n cat='cpu_to_cuda',\n args={},\n ))\n chrome_events.append(dict(\n name=k.name,\n ph='X',\n ts=k.interval.start,\n dur=k.interval.elapsed_us(),\n tid=k.device,\n pid='CUDA functions',\n args={},\n ))\n next_id += 1\n\n json.dump(chrome_events, f)\n\n def key_averages(self):\n \"\"\"Averages all function events over their keys.\n\n Returns:\n An EventList containing FunctionEventAvg objects.\n \"\"\"\n stats = defaultdict(FunctionEventAvg)\n for evt in self:\n stats[evt.key] += evt\n return EventList(stats.values())\n\n def total_average(self):\n \"\"\"Averages all events.\n\n Returns:\n A FunctionEventAvg object.\n \"\"\"\n total_stat = FunctionEventAvg()\n for evt in self:\n total_stat += evt\n total_stat.key = None\n total_stat.key = 'Total'\n return total_stat\n\n\nclass profile(object):\n \"\"\"Context manager that manages autograd profiler state and holds a summary of results.\n\n Arguments:\n enabled (bool, optional): Setting this to False makes this context manager a no-op.\n Default: ``True``.\n\n use_cuda (bool, optional): Enables timing of CUDA events as well using the cudaEvent API.\n Adds approximately 4us of overhead to each tensor operation.\n Default: ``False``\n\n .. warning:\n This context managers should not be called recursively, i.e. at most one\n instance should be enabled at any given time.\n\n Example:\n >>> x = Variable(torch.randn(1, 1), requires_grad=True)\n >>> with torch.autograd.profiler.profile() as prof:\n ... y = x ** 2\n ... y.backward()\n >>> # NOTE: some columns were removed for brevity\n ... print(prof)\n ------------------------------------- --------------- ---------------\n Name CPU time CUDA time\n ------------------------------------- --------------- ---------------\n PowConstant 142.036us 0.000us\n N5torch8autograd9GraphRootE 63.524us 0.000us\n PowConstantBackward 184.228us 0.000us\n MulConstant 50.288us 0.000us\n PowConstant 28.439us 0.000us\n Mul 20.154us 0.000us\n N5torch8autograd14AccumulateGradE 13.790us 0.000us\n N5torch8autograd5CloneE 4.088us 0.000us\n \"\"\"\n\n def __init__(self, enabled=True, use_cuda=False):\n self.enabled = enabled\n self.use_cuda = use_cuda\n self.function_events = None\n if not self.enabled:\n return\n self.entered = False\n\n def __enter__(self):\n if not self.enabled:\n return\n if self.entered:\n raise RuntimeError(\"autograd profiler traces are not reentrant\")\n self.entered = True\n profiler_kind = torch.autograd.ProfilerState.CUDA if self.use_cuda \\\n else torch.autograd.ProfilerState.CPU\n torch.autograd._enable_profiler(profiler_kind)\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n if not self.enabled:\n return\n records = torch.autograd._disable_profiler()\n self.function_events = EventList(parse_cpu_trace(records))\n return False\n\n def __repr__(self):\n if self.function_events is None:\n return '<unfinished torch.autograd.profile>'\n return repr(self.function_events)\n\n def __str__(self):\n if self.function_events is None:\n return '<unfinished torch.autograd.profile>'\n return str(self.function_events)\n\n def table(self, sort_by=None):\n if self.function_events is None:\n raise RuntimeError(\"can't export a trace that didn't finish running\")\n return self.function_events.table(sort_by)\n table.__doc__ = EventList.table.__doc__\n\n def export_chrome_trace(self, path):\n if self.function_events is None:\n raise RuntimeError(\"can't export a trace that didn't finish running\")\n return self.function_events.export_chrome_trace(path)\n export_chrome_trace.__doc__ = EventList.export_chrome_trace.__doc__\n\n def key_averages(self):\n if self.function_events is None:\n raise RuntimeError(\"can't average a trace that didn't finish running\")\n return self.function_events.key_averages()\n key_averages.__doc__ = EventList.key_averages.__doc__\n\n def total_average(self):\n if self.function_events is None:\n raise RuntimeError(\"can't average a trace that didn't finish running\")\n return self.function_events.total_average()\n total_average.__doc__ = EventList.total_average.__doc__\n\n\nclass emit_nvtx(object):\n \"\"\"Context manager that makes every autograd operation emit an NVTX range.\n\n It is useful when running the program under nvprof::\n\n nvprof --profile-from-start off -o trace_name.prof -- <regular command here>\n\n Unfortunately, there's no way to force nvprof to flush the data it collected\n to disk, so for CUDA profiling one has to use this context manager to annotate\n nvprof traces and wait for the process to exit before inspecting them.\n Then, either NVIDIA Visual Profiler (nvvp) can be used to visualize the timeline, or\n :func:`torch.autograd.profiler.load_nvprof` can load the results for inspection\n e.g. in Python REPL.\n\n .. warning:\n This context manager should not be called recursively, i.e. at most one\n instance should be enabled at any given time.\n\n Arguments:\n enabled (bool, optional): Setting this to False makes this context manager a no-op.\n Default: ``True``.\n\n Example:\n >>> with torch.cuda.profiler.profile():\n ... model(x) # Warmup CUDA memory allocator and profiler\n ... with torch.autograd.profiler.emit_nvtx():\n ... model(x)\n \"\"\"\n def __init__(self, enabled=True):\n self.enabled = enabled\n self.entered = False\n\n def __enter__(self):\n if not self.enabled:\n return\n if self.entered:\n raise RuntimeError(\"NVTX annotation context manager is not reentrant\")\n self.entered = True\n torch.cuda.synchronize()\n torch.autograd._enable_profiler(torch.autograd.ProfilerState.NVTX)\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n if not self.enabled:\n return\n torch.cuda.synchronize()\n torch.autograd._disable_profiler()\n return False\n\n\ndef load_nvprof(path):\n \"\"\"Opens an nvprof trace file and parses autograd annotations.\n\n Arguments:\n path (str): path to nvprof trace\n \"\"\"\n return EventList(parse_nvprof_trace(path))\n\n\n################################################################################\n# FunctionEvent\n\ndef format_time(time_us):\n \"\"\"Defines how to format time in FunctionEvent\"\"\"\n return '{:.3f}us'.format(time_us)\n\n\ndef attr_formatter(name):\n return property(lambda self: format_time(getattr(self, name)))\n\n\nclass FormattedTimesMixin(object):\n \"\"\"Helpers for FunctionEvent and FunctionEventAvg.\n\n The subclass should define `*_time_total` and `count` attributes.\n \"\"\"\n cpu_time_str = attr_formatter('cpu_time')\n cuda_time_str = attr_formatter('cuda_time')\n cpu_time_total_str = attr_formatter('cpu_time_total')\n cuda_time_total_str = attr_formatter('cuda_time_total')\n\n @property\n def cpu_time(self):\n return 0.0 if self.count == 0 else 1.0 * self.cpu_time_total / self.count\n\n @property\n def cuda_time(self):\n return 0.0 if self.count == 0 else 1.0 * self.cuda_time_total / self.count\n\n\nclass Interval(object):\n def __init__(self, start, end):\n self.start = start\n self.end = end\n\n def elapsed_us(self):\n return self.end - self.start\n\n\nclass Kernel(object):\n def __init__(self, name, device, interval):\n self.name = name\n self.device = device\n self.interval = interval\n\n\n# TODO: record TID too\nclass FunctionEvent(FormattedTimesMixin):\n \"\"\"Profiling information about a single function.\"\"\"\n def __init__(self, id, name, thread, cpu_start, cpu_end):\n self.id = id\n self.name = name\n self.cpu_interval = Interval(cpu_start, cpu_end)\n self.thread = thread\n self.kernels = []\n self.count = 1\n\n def append_kernel(self, name, device, start, end):\n self.kernels.append(Kernel(name, device, Interval(start, end)))\n\n @property\n def cuda_time_total(self):\n return sum(kinfo.interval.elapsed_us() for kinfo in self.kernels)\n\n @property\n def cpu_time_total(self):\n return self.cpu_interval.elapsed_us()\n\n @property\n def key(self):\n return self.name\n\n def __repr__(self):\n return '<FunctionEvent id={} cpu_time={} cuda_time={} name={} thread={}>'.format(\n self.id, self.cpu_time_str, self.cuda_time_str, self.name, self.thread)\n\n\nclass FunctionEventAvg(FormattedTimesMixin):\n \"\"\"Used to average stats over multiple FunctionEvent objects.\"\"\"\n def __init__(self):\n self.key = None\n self.count = self.cpu_time_total = self.cuda_time_total = 0\n\n def __iadd__(self, other):\n if self.key is None:\n self.key = other.key\n assert isinstance(other, FunctionEvent)\n assert other.key == self.key\n self.cpu_time_total += other.cpu_time\n self.cuda_time_total += other.cuda_time\n self.count += 1\n return self\n\n def __repr__(self):\n return '<FunctionEventAvg cpu_time={} cuda_time={} key={}>'.format(\n self.cpu_time_str, self.cuda_time_str, self.key)\n\n\n################################################################################\n# Utilities\n\ndef demangle(name):\n \"\"\"Demangle a C++ identifier using c++filt\"\"\"\n try:\n with open(os.devnull, 'w') as devnull:\n return subprocess.check_output(['c++filt', '-n', name], stderr=devnull).rstrip().decode(\"ascii\")\n except subprocess.CalledProcessError:\n return name\n\n\nclass StringTable(defaultdict):\n def __missing__(self, key):\n self[key] = demangle(key)\n return self[key]\n\n\n################################################################################\n# CPU checkpoints\n\ndef parse_cpu_trace(thread_records):\n next_id = 0\n start_record = None\n cuda_records = {}\n functions = []\n record_stack = []\n string_table = StringTable()\n\n # cuda start events and the overall profiler start event don't happen\n # at exactly the same time because we need to record an event on each device\n # and each record takes ~4us. So we adjust here by the difference\n # adding the difference in CPU time between the profiler start event\n # and the CPU time of the cuda start event for the device\n def adjusted_time(cuda_record):\n assert cuda_record.device() != -1\n cuda_time_0 = cuda_records[cuda_record.device()]\n return cuda_time_0.cuda_elapsed_us(cuda_record) + start_record.cpu_elapsed_us(cuda_time_0)\n\n # '__start_profile' is not guarenteed to be first, so we must find it here\n for record in itertools.chain(*thread_records):\n if record.name() == '__start_profile':\n start_record = record\n elif record.name() == '__cuda_start_event':\n assert record.device() != -1\n cuda_records[record.device()] = record\n assert start_record is not None\n\n for record in itertools.chain(*thread_records):\n if record.kind() == 'mark':\n continue\n elif record.kind() == 'push':\n record_stack.append((next_id, record))\n next_id += 1\n elif record.kind() == 'pop':\n function_id, start = record_stack.pop()\n fe = FunctionEvent(\n id=function_id,\n name=string_table[start.name()],\n thread=start.thread_id(),\n cpu_start=start_record.cpu_elapsed_us(start),\n cpu_end=start_record.cpu_elapsed_us(record))\n if start.has_cuda():\n cuda_start = adjusted_time(start)\n cuda_end = adjusted_time(record)\n fe.append_kernel(start.name(),\n start.device(),\n cuda_start,\n cuda_end)\n functions.append(fe)\n\n functions.sort(key=lambda evt: evt.cpu_interval.start)\n return functions\n\n\n################################################################################\n# CUDA checkpoints\n\nclass EnforceUnique(object):\n \"\"\"Raises an error if a key is seen more than once.\"\"\"\n def __init__(self):\n self.seen = set()\n\n def see(self, *key):\n if key in self.seen:\n raise RuntimeError('duplicate key: ' + str(key))\n self.seen.add(key)\n\n\ndef parse_nvprof_trace(path):\n import sqlite3\n conn = sqlite3.connect(path)\n conn.row_factory = sqlite3.Row\n\n # Parse strings table\n strings = {}\n for r in conn.execute(\"SELECT _id_ as id, value FROM StringTable\"):\n strings[r[\"id\"]] = demangle(r[\"value\"])\n\n # First, find all functions and create FunctionEvents for them\n marker_query = \"\"\"\n SELECT\n start.id AS marker_id, start.name, start.timestamp AS start_time, end.timestamp AS end_time\n FROM\n CUPTI_ACTIVITY_KIND_MARKER AS start INNER JOIN CUPTI_ACTIVITY_KIND_MARKER AS end\n ON start.id = end.id\n WHERE\n start.name != 0 AND end.name = 0\n \"\"\"\n functions = []\n functions_map = {}\n unique = EnforceUnique()\n for row in conn.execute(marker_query):\n unique.see(row['marker_id'])\n evt = FunctionEvent(id=row['marker_id'],\n name=strings[row['name']],\n cpu_start=row['start_time'],\n cpu_end=row['end_time'],\n thread=0) # TODO: find in sqlite database\n functions.append(evt)\n functions_map[evt.id] = evt\n\n # Now, correlate all kernels with FunctionEvents\n kernel_query = \"\"\"\n SELECT\n start.id AS marker_id, start.name, start.timestamp, end.timestamp,\n runtime._id_ AS runtime_id, runtime.cbid, runtime.start AS runtime_start, runtime.end AS runtime_end,\n kernel.start AS kernel_start, kernel.end AS kernel_end, kernel.name AS kernel_name\n FROM\n CUPTI_ACTIVITY_KIND_MARKER AS start\n INNER JOIN CUPTI_ACTIVITY_KIND_MARKER AS end\n ON start.id = end.id\n INNER JOIN CUPTI_ACTIVITY_KIND_RUNTIME as runtime\n ON (start.timestamp < runtime.start AND runtime.end < end.timestamp)\n INNER JOIN CUPTI_ACTIVITY_KIND_CONCURRENT_KERNEL AS kernel\n ON kernel.correlationId = runtime.correlationId\n \"\"\"\n unique = EnforceUnique()\n for row in conn.execute(kernel_query):\n unique.see(row['marker_id'], row['runtime_id'])\n assert row['cbid'] == 13 # 13 == Launch\n evt = functions_map[row['marker_id']]\n evt.append_kernel(row['kernel_name'],\n 0,\n row['kernel_start'],\n row['kernel_end'])\n\n functions.sort(key=lambda evt: evt.start)\n return functions\n\n\n################################################################################\n# Pretty printer\n\ndef build_table(events, sort_by=None, header=None):\n \"\"\"Prints a summary of events (which can be a list of FunctionEvent or FunctionEventAvg).\"\"\"\n if sort_by is not None:\n events = sorted(events, key=lambda evt: getattr(evt, sort_by))\n\n max_name_length = max(len(evt.key) for evt in events)\n max_name_length += 4 # Add some nice padding\n col_width = 15\n col_format = ' {: >' + str(col_width) + '}'\n row_format = '{: <' + str(max_name_length) + '}' + col_format * 5\n header_sep = '-' * max_name_length + (' ' + '-' * col_width) * 5\n\n # Have to use a list because nonlocal is Py3 only...\n result = ['']\n\n def append(s):\n result[0] += s\n result[0] += '\\n'\n\n # Actual printing\n if header is not None:\n line_length = max_name_length + (col_width + 2) * 5\n append('=' * line_length)\n append(header)\n append(header_sep)\n append(row_format.format('Name', 'CPU time', 'CUDA time', 'Calls', 'CPU total', 'CUDA total'))\n append(header_sep)\n for evt in events:\n append(row_format.format(evt.key, evt.cpu_time_str, evt.cuda_time_str,\n evt.count, evt.cpu_time_total_str, evt.cuda_time_total_str))\n\n return result[0]\n", "path": "torch/autograd/profiler.py" } ]
diff --git a/torch/autograd/profiler.py b/torch/autograd/profiler.py index 107d176430accf..1c9f8fd9d2b348 100644 --- a/torch/autograd/profiler.py +++ b/torch/autograd/profiler.py @@ -7,6 +7,12 @@ import itertools from collections import defaultdict, namedtuple +try: + FileNotFoundError +except NameError: + # py2.7 + FileNotFoundError = IOError + class range(object): def __init__(self, name):
open-telemetry__opentelemetry-python-contrib-98
EC2 resource detector hangs for a long time outside of an EC2 instance **Describe your environment** Describe any aspect of your environment relevant to the problem, including your Python version, [platform](https://docs.python.org/3/library/platform.html), version numbers of installed dependencies, information about your cloud hosting provider, etc. If you're reporting a problem with a specific version of a library in this repo, please check whether the problem has been fixed on main. The environment I initially saw this in was a container running in Docker compose on an AWS EC2 instance but I've been able to reproduce it on my laptop as well. I think it will show up in anything not directly running in AWS. **Steps to reproduce** Describe exactly how to reproduce the error. Include a code sample if applicable. The following code reproduced the issue on my laptop: ```python from opentelemetry.sdk.extension.aws.resource.ec2 import AwsEc2ResourceDetector from opentelemetry.sdk.resources import get_aggregated_resources resource = get_aggregated_resources( detectors=[AwsEc2ResourceDetector()] ) ``` **What is the expected behavior?** It should complete quickly (this is the behavior I see running on an EC2 instance). **What is the actual behavior?** What did you see instead? On my laptop, it will hand ~indefinitely. Note: one solution is just to remove the resource detector but we'd like to be able to include it and just have it fail silently, which is the behavior we've seen in other resource detectors. **Additional context** I think the problem is here: https://github.com/open-telemetry/opentelemetry-python-contrib/blob/80969a06da77d1e616124de178d12a1ebe3ffe7f/sdk-extension/opentelemetry-sdk-extension-aws/src/opentelemetry/sdk/extension/aws/resource/ec2.py#L37 It looks like the request is using a 1000 _second_ timeout which I suspect is intended to be a 1000 _millisecond_ timeout. At least with the server program I've been working on that will block the startup of the program until the request completes. You can verify by running: ``` curl http://169.254.169.254/latest/api/token ``` Which is one of the requests that the resource detector makes -- it should hang indefinitely as well.
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n__version__ = \"0.15.dev0\"\n", "path": "instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/version.py" } ]
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n__version__ = \"0.15b0\"\n", "path": "instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/version.py" } ]
diff --git a/instrumentation/opentelemetry-instrumentation-botocore/setup.cfg b/instrumentation/opentelemetry-instrumentation-botocore/setup.cfg index ee7849c143..a299838577 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/setup.cfg +++ b/instrumentation/opentelemetry-instrumentation-botocore/setup.cfg @@ -40,13 +40,13 @@ package_dir= packages=find_namespace: install_requires = botocore ~= 1.0 - opentelemetry-api == 0.15.dev0 - opentelemetry-instrumentation == 0.15.dev0 + opentelemetry-api == 0.15b0 + opentelemetry-instrumentation == 0.15b0 [options.extras_require] test = moto ~= 1.0 - opentelemetry-test == 0.15.dev0 + opentelemetry-test == 0.15b0 [options.packages.find] where = src diff --git a/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/version.py b/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/version.py index e7b342d644..ff494d225a 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/version.py +++ b/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.15.dev0" +__version__ = "0.15b0"
huggingface__diffusers-1052
Improve the precision of our integration tests We currently have a rather low precision when testing our pipeline due to due reasons. 1. - Our reference is an image and not a numpy array. This means that when we created our reference image we lost float precision which is unnecessary 2. - We only test for `.max() < 1e-2` . IMO we should test for `.max() < 1e-4` with the numpy arrays. In my experiements across multiple devices I have **not** seen differences bigger than `.max() < 1e-4` when using full precision. IMO this could have also prevented: https://github.com/huggingface/diffusers/issues/902
[ { "content": "# Copyright 2022 The HuggingFace Inc. team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport os\n\nfrom .deprecation_utils import deprecate\nfrom .import_utils import (\n ENV_VARS_TRUE_AND_AUTO_VALUES,\n ENV_VARS_TRUE_VALUES,\n USE_JAX,\n USE_TF,\n USE_TORCH,\n DummyObject,\n is_accelerate_available,\n is_flax_available,\n is_inflect_available,\n is_modelcards_available,\n is_onnx_available,\n is_scipy_available,\n is_tf_available,\n is_torch_available,\n is_transformers_available,\n is_unidecode_available,\n requires_backends,\n)\nfrom .logging import get_logger\nfrom .outputs import BaseOutput\n\n\nif is_torch_available():\n from .testing_utils import (\n floats_tensor,\n load_image,\n load_numpy,\n parse_flag_from_env,\n require_torch_gpu,\n slow,\n torch_all_close,\n torch_device,\n )\n\n\nlogger = get_logger(__name__)\n\n\nhf_cache_home = os.path.expanduser(\n os.getenv(\"HF_HOME\", os.path.join(os.getenv(\"XDG_CACHE_HOME\", \"~/.cache\"), \"huggingface\"))\n)\ndefault_cache_path = os.path.join(hf_cache_home, \"diffusers\")\n\n\nCONFIG_NAME = \"config.json\"\nWEIGHTS_NAME = \"diffusion_pytorch_model.bin\"\nFLAX_WEIGHTS_NAME = \"diffusion_flax_model.msgpack\"\nONNX_WEIGHTS_NAME = \"model.onnx\"\nHUGGINGFACE_CO_RESOLVE_ENDPOINT = \"https://huggingface.co\"\nDIFFUSERS_CACHE = default_cache_path\nDIFFUSERS_DYNAMIC_MODULE_NAME = \"diffusers_modules\"\nHF_MODULES_CACHE = os.getenv(\"HF_MODULES_CACHE\", os.path.join(hf_cache_home, \"modules\"))\n", "path": "src/diffusers/utils/__init__.py" } ]
[ { "content": "# Copyright 2022 The HuggingFace Inc. team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport os\n\nfrom .deprecation_utils import deprecate\nfrom .import_utils import (\n ENV_VARS_TRUE_AND_AUTO_VALUES,\n ENV_VARS_TRUE_VALUES,\n USE_JAX,\n USE_TF,\n USE_TORCH,\n DummyObject,\n is_accelerate_available,\n is_flax_available,\n is_inflect_available,\n is_modelcards_available,\n is_onnx_available,\n is_scipy_available,\n is_tf_available,\n is_torch_available,\n is_transformers_available,\n is_unidecode_available,\n requires_backends,\n)\nfrom .logging import get_logger\nfrom .outputs import BaseOutput\n\n\nif is_torch_available():\n from .testing_utils import (\n floats_tensor,\n load_hf_numpy,\n load_image,\n load_numpy,\n parse_flag_from_env,\n require_torch_gpu,\n slow,\n torch_all_close,\n torch_device,\n )\n\n\nlogger = get_logger(__name__)\n\n\nhf_cache_home = os.path.expanduser(\n os.getenv(\"HF_HOME\", os.path.join(os.getenv(\"XDG_CACHE_HOME\", \"~/.cache\"), \"huggingface\"))\n)\ndefault_cache_path = os.path.join(hf_cache_home, \"diffusers\")\n\n\nCONFIG_NAME = \"config.json\"\nWEIGHTS_NAME = \"diffusion_pytorch_model.bin\"\nFLAX_WEIGHTS_NAME = \"diffusion_flax_model.msgpack\"\nONNX_WEIGHTS_NAME = \"model.onnx\"\nHUGGINGFACE_CO_RESOLVE_ENDPOINT = \"https://huggingface.co\"\nDIFFUSERS_CACHE = default_cache_path\nDIFFUSERS_DYNAMIC_MODULE_NAME = \"diffusers_modules\"\nHF_MODULES_CACHE = os.getenv(\"HF_MODULES_CACHE\", os.path.join(hf_cache_home, \"modules\"))\n", "path": "src/diffusers/utils/__init__.py" } ]
diff --git a/src/diffusers/utils/__init__.py b/src/diffusers/utils/__init__.py index 12d731128385..7395f4edfa26 100644 --- a/src/diffusers/utils/__init__.py +++ b/src/diffusers/utils/__init__.py @@ -42,6 +42,7 @@ if is_torch_available(): from .testing_utils import ( floats_tensor, + load_hf_numpy, load_image, load_numpy, parse_flag_from_env, diff --git a/src/diffusers/utils/testing_utils.py b/src/diffusers/utils/testing_utils.py index bd3b08d54a1c..bf398e5b6fe5 100644 --- a/src/diffusers/utils/testing_utils.py +++ b/src/diffusers/utils/testing_utils.py @@ -139,6 +139,29 @@ def require_onnxruntime(test_case): return unittest.skipUnless(is_onnx_available(), "test requires onnxruntime")(test_case) +def load_numpy(arry: Union[str, np.ndarray]) -> np.ndarray: + if isinstance(arry, str): + if arry.startswith("http://") or arry.startswith("https://"): + response = requests.get(arry) + response.raise_for_status() + arry = np.load(BytesIO(response.content)) + elif os.path.isfile(arry): + arry = np.load(arry) + else: + raise ValueError( + f"Incorrect path or url, URLs must start with `http://` or `https://`, and {arry} is not a valid path" + ) + elif isinstance(arry, np.ndarray): + pass + else: + raise ValueError( + "Incorrect format used for numpy ndarray. Should be an url linking to an image, a local path, or a" + " ndarray." + ) + + return arry + + def load_image(image: Union[str, PIL.Image.Image]) -> PIL.Image.Image: """ Args: @@ -168,17 +191,13 @@ def load_image(image: Union[str, PIL.Image.Image]) -> PIL.Image.Image: return image -def load_numpy(path) -> np.ndarray: +def load_hf_numpy(path) -> np.ndarray: if not path.startswith("http://") or path.startswith("https://"): path = os.path.join( "https://huggingface.co/datasets/fusing/diffusers-testing/resolve/main", urllib.parse.quote(path) ) - response = requests.get(path) - response.raise_for_status() - array = np.load(BytesIO(response.content)) - - return array + return load_numpy(path) # --- pytest conf functions --- # diff --git a/tests/models/test_models_unet_2d.py b/tests/models/test_models_unet_2d.py index 548588918c88..20371708a4d8 100644 --- a/tests/models/test_models_unet_2d.py +++ b/tests/models/test_models_unet_2d.py @@ -21,7 +21,15 @@ import torch from diffusers import UNet2DConditionModel, UNet2DModel -from diffusers.utils import floats_tensor, load_numpy, logging, require_torch_gpu, slow, torch_all_close, torch_device +from diffusers.utils import ( + floats_tensor, + load_hf_numpy, + logging, + require_torch_gpu, + slow, + torch_all_close, + torch_device, +) from parameterized import parameterized from ..test_modeling_common import ModelTesterMixin @@ -423,7 +431,7 @@ def tearDown(self): def get_latents(self, seed=0, shape=(4, 4, 64, 64), fp16=False): dtype = torch.float16 if fp16 else torch.float32 - image = torch.from_numpy(load_numpy(self.get_file_format(seed, shape))).to(torch_device).to(dtype) + image = torch.from_numpy(load_hf_numpy(self.get_file_format(seed, shape))).to(torch_device).to(dtype) return image def get_unet_model(self, fp16=False, model_id="CompVis/stable-diffusion-v1-4"): @@ -439,7 +447,7 @@ def get_unet_model(self, fp16=False, model_id="CompVis/stable-diffusion-v1-4"): def get_encoder_hidden_states(self, seed=0, shape=(4, 77, 768), fp16=False): dtype = torch.float16 if fp16 else torch.float32 - hidden_states = torch.from_numpy(load_numpy(self.get_file_format(seed, shape))).to(torch_device).to(dtype) + hidden_states = torch.from_numpy(load_hf_numpy(self.get_file_format(seed, shape))).to(torch_device).to(dtype) return hidden_states @parameterized.expand( diff --git a/tests/models/test_models_vae.py b/tests/models/test_models_vae.py index f6333d6cd906..3da7b50e34f3 100644 --- a/tests/models/test_models_vae.py +++ b/tests/models/test_models_vae.py @@ -20,7 +20,7 @@ from diffusers import AutoencoderKL from diffusers.modeling_utils import ModelMixin -from diffusers.utils import floats_tensor, load_numpy, require_torch_gpu, slow, torch_all_close, torch_device +from diffusers.utils import floats_tensor, load_hf_numpy, require_torch_gpu, slow, torch_all_close, torch_device from parameterized import parameterized from ..test_modeling_common import ModelTesterMixin @@ -147,7 +147,7 @@ def tearDown(self): def get_sd_image(self, seed=0, shape=(4, 3, 512, 512), fp16=False): dtype = torch.float16 if fp16 else torch.float32 - image = torch.from_numpy(load_numpy(self.get_file_format(seed, shape))).to(torch_device).to(dtype) + image = torch.from_numpy(load_hf_numpy(self.get_file_format(seed, shape))).to(torch_device).to(dtype) return image def get_sd_vae_model(self, model_id="CompVis/stable-diffusion-v1-4", fp16=False): diff --git a/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint.py b/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint.py index 0a373ada68bc..f5a8b3cf9ecc 100644 --- a/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint.py +++ b/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint.py @@ -28,7 +28,7 @@ UNet2DModel, VQModel, ) -from diffusers.utils import floats_tensor, load_image, slow, torch_device +from diffusers.utils import floats_tensor, load_image, load_numpy, slow, torch_device from diffusers.utils.testing_utils import require_torch_gpu from PIL import Image from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer @@ -278,11 +278,10 @@ def test_stable_diffusion_inpaint_pipeline(self): "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" "/in_paint/overture-creations-5sI6fQgYIuo_mask.png" ) - expected_image = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" - "/in_paint/yellow_cat_sitting_on_a_park_bench.png" + expected_image = load_numpy( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/in_paint" + "/yellow_cat_sitting_on_a_park_bench.npy" ) - expected_image = np.array(expected_image, dtype=np.float32) / 255.0 model_id = "runwayml/stable-diffusion-inpainting" pipe = StableDiffusionInpaintPipeline.from_pretrained( @@ -307,7 +306,7 @@ def test_stable_diffusion_inpaint_pipeline(self): image = output.images[0] assert image.shape == (512, 512, 3) - assert np.abs(expected_image - image).max() < 1e-2 + assert np.abs(expected_image - image).max() < 1e-3 def test_stable_diffusion_inpaint_pipeline_fp16(self): init_image = load_image( @@ -318,11 +317,10 @@ def test_stable_diffusion_inpaint_pipeline_fp16(self): "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" "/in_paint/overture-creations-5sI6fQgYIuo_mask.png" ) - expected_image = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" - "/in_paint/yellow_cat_sitting_on_a_park_bench_fp16.png" + expected_image = load_numpy( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/in_paint" + "/yellow_cat_sitting_on_a_park_bench_fp16.npy" ) - expected_image = np.array(expected_image, dtype=np.float32) / 255.0 model_id = "runwayml/stable-diffusion-inpainting" pipe = StableDiffusionInpaintPipeline.from_pretrained( @@ -360,11 +358,10 @@ def test_stable_diffusion_inpaint_pipeline_pndm(self): "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" "/in_paint/overture-creations-5sI6fQgYIuo_mask.png" ) - expected_image = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" - "/in_paint/yellow_cat_sitting_on_a_park_bench_pndm.png" + expected_image = load_numpy( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/in_paint" + "/yellow_cat_sitting_on_a_park_bench_pndm.npy" ) - expected_image = np.array(expected_image, dtype=np.float32) / 255.0 model_id = "runwayml/stable-diffusion-inpainting" pndm = PNDMScheduler.from_config(model_id, subfolder="scheduler") @@ -388,4 +385,4 @@ def test_stable_diffusion_inpaint_pipeline_pndm(self): image = output.images[0] assert image.shape == (512, 512, 3) - assert np.abs(expected_image - image).max() < 1e-2 + assert np.abs(expected_image - image).max() < 1e-3 diff --git a/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint_legacy.py b/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint_legacy.py index d25342a35aea..81deba67f274 100644 --- a/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint_legacy.py +++ b/tests/pipelines/stable_diffusion/test_stable_diffusion_inpaint_legacy.py @@ -31,7 +31,7 @@ VQModel, ) from diffusers.utils import floats_tensor, load_image, slow, torch_device -from diffusers.utils.testing_utils import require_torch_gpu +from diffusers.utils.testing_utils import load_numpy, require_torch_gpu from PIL import Image from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer @@ -358,11 +358,10 @@ def test_stable_diffusion_inpaint_legacy_pipeline(self): "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" "/in_paint/overture-creations-5sI6fQgYIuo_mask.png" ) - expected_image = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" - "/in_paint/red_cat_sitting_on_a_park_bench.png" + expected_image = load_numpy( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/in_paint" + "/red_cat_sitting_on_a_park_bench.npy" ) - expected_image = np.array(expected_image, dtype=np.float32) / 255.0 model_id = "CompVis/stable-diffusion-v1-4" pipe = StableDiffusionInpaintPipeline.from_pretrained( @@ -389,7 +388,7 @@ def test_stable_diffusion_inpaint_legacy_pipeline(self): image = output.images[0] assert image.shape == (512, 512, 3) - assert np.abs(expected_image - image).max() < 1e-2 + assert np.abs(expected_image - image).max() < 1e-3 def test_stable_diffusion_inpaint_legacy_pipeline_k_lms(self): # TODO(Anton, Patrick) - I think we can remove this test soon @@ -401,11 +400,10 @@ def test_stable_diffusion_inpaint_legacy_pipeline_k_lms(self): "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" "/in_paint/overture-creations-5sI6fQgYIuo_mask.png" ) - expected_image = load_image( - "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main" - "/in_paint/red_cat_sitting_on_a_park_bench_k_lms.png" + expected_image = load_numpy( + "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/in_paint" + "/red_cat_sitting_on_a_park_bench_k_lms.npy" ) - expected_image = np.array(expected_image, dtype=np.float32) / 255.0 model_id = "CompVis/stable-diffusion-v1-4" lms = LMSDiscreteScheduler.from_config(model_id, subfolder="scheduler") @@ -434,7 +432,7 @@ def test_stable_diffusion_inpaint_legacy_pipeline_k_lms(self): image = output.images[0] assert image.shape == (512, 512, 3) - assert np.abs(expected_image - image).max() < 1e-2 + assert np.abs(expected_image - image).max() < 1e-3 def test_stable_diffusion_inpaint_legacy_intermediate_state(self): number_of_steps = 0
e2nIEE__pandapower-563
from_mpc failed to load the case generated by to_mpc After checking the source code, I found the to_mpc function saves the fields in a loose format. According to the from_mpc function, all the fields should be under a variable called "mpc" (default), however the to_mpc function does not follow this, which leads to a situation that the from_mpc function cannot load the case generated by the to_mpc function.
[ { "content": "# -*- coding: utf-8 -*-\n\n# Copyright (c) 2016-2019 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\n\nimport copy\n\nimport numpy as np\nfrom scipy.io import savemat\n\nfrom pandapower.converter.pypower import to_ppc\n\ntry:\n import pplog as logging\nexcept ImportError:\n import logging\n\nlogger = logging.getLogger(__name__)\n\n\ndef to_mpc(net, filename=None, **kwargs):\n \"\"\"\n This function converts a pandapower net to a matpower case files (.mat) version 2.\n Note: python is 0-based while Matlab is 1-based.\n\n INPUT:\n **net** - The pandapower net.\n\n OPTIONAL:\n **filename** (str, None) - File path + name of the mat file which will be created. If None\n the mpc will only be returned\n\n ****kwargs** - please look at to_ppc() documentation\n\n EXAMPLE:\n import pandapower.converter as pc\n import pandapower.networks as pn\n net = pn.case9()\n pc.to_mpc(net, \"case9.mat\")\n\n \"\"\"\n ppc = to_ppc(net, **kwargs)\n\n mpc = _ppc2mpc(ppc)\n if filename is not None:\n # savemat\n savemat(filename, mpc)\n\n return mpc\n\n\ndef _ppc2mpc(ppc):\n \"\"\"\n Convert network in Pypower/Matpower format\n Convert 0-based python to 1-based Matlab\n\n **INPUT**:\n * net - The pandapower format network\n * filename - File path + name of the mat file which is created\n \"\"\"\n\n # convert to matpower\n # Matlab is one-based, so all entries (buses, lines, gens) have to start with 1 instead of 0\n mpc = copy.deepcopy(ppc)\n if len(np.where(mpc[\"bus\"][:, 0] == 0)[0]):\n mpc[\"bus\"][:, 0] = mpc[\"bus\"][:, 0] + 1\n mpc[\"gen\"][:, 0] = mpc[\"gen\"][:, 0] + 1\n mpc[\"branch\"][:, 0:2] = mpc[\"branch\"][:, 0:2] + 1\n # adjust for the matpower converter -> taps should be 0 when there is no transformer, but are 1\n mpc[\"branch\"][np.where(mpc[\"branch\"][:, 8] == 1), 8] = 0\n # version is a string\n mpc[\"version\"] = str(mpc[\"version\"])\n # baseMVA has to be a float instead of int\n mpc[\"baseMVA\"] = mpc[\"baseMVA\"] * 1.0\n return mpc\n\n\nif \"__main__\" == __name__:\n pass\n", "path": "pandapower/converter/matpower/to_mpc.py" } ]
[ { "content": "# -*- coding: utf-8 -*-\n\n# Copyright (c) 2016-2019 by University of Kassel and Fraunhofer Institute for Energy Economics\n# and Energy System Technology (IEE), Kassel. All rights reserved.\n\n\nimport copy\n\nimport numpy as np\nfrom scipy.io import savemat\n\nfrom pandapower.converter.pypower import to_ppc\n\ntry:\n import pplog as logging\nexcept ImportError:\n import logging\n\nlogger = logging.getLogger(__name__)\n\n\ndef to_mpc(net, filename=None, **kwargs):\n \"\"\"\n This function converts a pandapower net to a matpower case files (.mat) version 2.\n Note: python is 0-based while Matlab is 1-based.\n\n INPUT:\n **net** - The pandapower net.\n\n OPTIONAL:\n **filename** (str, None) - File path + name of the mat file which will be created. If None\n the mpc will only be returned\n\n ****kwargs** - please look at to_ppc() documentation\n\n EXAMPLE:\n import pandapower.converter as pc\n import pandapower.networks as pn\n net = pn.case9()\n pc.to_mpc(net, \"case9.mat\")\n\n \"\"\"\n ppc = to_ppc(net, **kwargs)\n\n mpc = dict()\n mpc[\"mpc\"] = _ppc2mpc(ppc)\n if filename is not None:\n # savemat\n savemat(filename, mpc)\n\n return mpc\n\n\ndef _ppc2mpc(ppc):\n \"\"\"\n Convert network in Pypower/Matpower format\n Convert 0-based python to 1-based Matlab\n\n **INPUT**:\n * net - The pandapower format network\n * filename - File path + name of the mat file which is created\n \"\"\"\n\n # convert to matpower\n # Matlab is one-based, so all entries (buses, lines, gens) have to start with 1 instead of 0\n mpc = copy.deepcopy(ppc)\n if len(np.where(mpc[\"bus\"][:, 0] == 0)[0]):\n mpc[\"bus\"][:, 0] = mpc[\"bus\"][:, 0] + 1\n mpc[\"gen\"][:, 0] = mpc[\"gen\"][:, 0] + 1\n mpc[\"branch\"][:, 0:2] = mpc[\"branch\"][:, 0:2] + 1\n # adjust for the matpower converter -> taps should be 0 when there is no transformer, but are 1\n mpc[\"branch\"][np.where(mpc[\"branch\"][:, 8] == 1), 8] = 0\n # version is a string\n mpc[\"version\"] = str(mpc[\"version\"])\n # baseMVA has to be a float instead of int\n mpc[\"baseMVA\"] = mpc[\"baseMVA\"] * 1.0\n return mpc\n\n\nif \"__main__\" == __name__:\n pass\n", "path": "pandapower/converter/matpower/to_mpc.py" } ]
diff --git a/pandapower/converter/matpower/to_mpc.py b/pandapower/converter/matpower/to_mpc.py index e345cb66a..8afac2ee9 100644 --- a/pandapower/converter/matpower/to_mpc.py +++ b/pandapower/converter/matpower/to_mpc.py @@ -42,7 +42,8 @@ def to_mpc(net, filename=None, **kwargs): """ ppc = to_ppc(net, **kwargs) - mpc = _ppc2mpc(ppc) + mpc = dict() + mpc["mpc"] = _ppc2mpc(ppc) if filename is not None: # savemat savemat(filename, mpc)
django-wiki__django-wiki-891
Translations out-of-date I noticed the string "Search whole wiki..." has not been translated on my wiki because the translation file has not been updated since December 2017. Would you update the `django.po` file so I can work on the translation in Transifex? Thanks.
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nfrom glob import glob\n\nfrom setuptools import find_packages, setup\n\nsys.path.append(\n os.path.join(os.path.dirname(__file__), 'src')\n)\n\n# noqa\nfrom wiki import __version__ # isort:skip # noqa\n\n\n# Utility function to read the README file.\n# Used for the long_description. It's nice, because now 1) we have a top level\n# README file and 2) it's easier to type in the README file than to put a raw\n# string in below ...\ndef get_path(fname):\n return os.path.join(os.path.dirname(__file__), fname)\n\n\ninstall_requirements = [\n \"Django>=1.11,<2.1\",\n \"bleach>=2.1,<2.2\",\n \"Pillow\",\n \"django-nyt>=1.1b1,<1.2\",\n \"django-mptt>=0.9,<0.10\",\n \"django-sekizai>=0.10\",\n \"sorl-thumbnail>=12,<13\",\n \"Markdown>=2.6,<2.7\",\n]\n\ntest_requirements = [\n 'django-functest>=1.0.3,<1.1',\n 'pytest>=3.4,<3.5',\n 'pytest-django>=3.1,<3.2',\n 'pytest-cov>=2.4,<2.5',\n 'pytest-pythonpath>=0.7,<0.8',\n]\n\ntest_lint_requirements = [\n 'flake8>=3.5,<3.6',\n]\n\nsetup_requirements = [\n 'pytest-runner',\n]\n\ndevelopment_requirements = test_requirements + test_lint_requirements\n\nextras_requirements = {\n 'devel': development_requirements,\n 'test': test_requirements,\n 'testlint': test_lint_requirements,\n}\n\nsetup(\n name=\"wiki\",\n version=__version__,\n author=\"Benjamin Bach\",\n author_email=\"[email protected]\",\n url=\"http://www.django-wiki.org\",\n description=\"A wiki system written for the Django framework.\",\n license=\"GPLv3\",\n keywords=[\"django\", \"wiki\", \"markdown\"],\n packages=find_packages('src'),\n package_dir={'': 'src'},\n py_modules=[os.path.splitext(os.path.basename(path))[0] for path in glob('src/*.py')],\n long_description=open('README.rst').read(),\n zip_safe=False,\n install_requires=install_requirements,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',\n 'Environment :: Web Environment',\n 'Framework :: Django',\n 'Intended Audience :: Developers',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n ],\n include_package_data=True,\n setup_requires=setup_requirements,\n tests_require=test_requirements,\n extras_require=extras_requirements,\n)\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nfrom glob import glob\n\nfrom setuptools import find_packages, setup\n\nsys.path.append(\n os.path.join(os.path.dirname(__file__), 'src')\n)\n\n# noqa\nfrom wiki import __version__ # isort:skip # noqa\n\n\n# Utility function to read the README file.\n# Used for the long_description. It's nice, because now 1) we have a top level\n# README file and 2) it's easier to type in the README file than to put a raw\n# string in below ...\ndef get_path(fname):\n return os.path.join(os.path.dirname(__file__), fname)\n\n\ninstall_requirements = [\n \"Django>=1.11,<2.1\",\n \"bleach>=2.1,<2.2\",\n \"Pillow\",\n \"django-nyt>=1.1b1,<1.2\",\n \"django-mptt>=0.9,<0.10\",\n \"django-sekizai>=0.10\",\n \"sorl-thumbnail>=12,<13\",\n \"Markdown>=2.6,<2.7\",\n]\n\ntest_requirements = [\n 'django-functest>=1.0.2,<1.1',\n 'pytest>=3.4,<3.5',\n 'pytest-django>=3.1,<3.2',\n 'pytest-cov>=2.4,<2.5',\n 'pytest-pythonpath>=0.7,<0.8',\n]\n\ntest_lint_requirements = [\n 'flake8>=3.5,<3.6',\n]\n\nsetup_requirements = [\n 'pytest-runner',\n]\n\ndevelopment_requirements = test_requirements + test_lint_requirements + [\n 'pre-commit',\n]\n\nextras_requirements = {\n 'devel': development_requirements,\n 'test': test_requirements,\n 'testlint': test_lint_requirements,\n}\n\nsetup(\n name=\"wiki\",\n version=__version__,\n author=\"Benjamin Bach\",\n author_email=\"[email protected]\",\n url=\"http://www.django-wiki.org\",\n description=\"A wiki system written for the Django framework.\",\n license=\"GPLv3\",\n keywords=[\"django\", \"wiki\", \"markdown\"],\n packages=find_packages('src'),\n package_dir={'': 'src'},\n py_modules=[os.path.splitext(os.path.basename(path))[0] for path in glob('src/*.py')],\n long_description=open('README.rst').read(),\n zip_safe=False,\n install_requires=install_requirements,\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',\n 'Environment :: Web Environment',\n 'Framework :: Django',\n 'Intended Audience :: Developers',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n ],\n include_package_data=True,\n setup_requires=setup_requirements,\n tests_require=test_requirements,\n extras_require=extras_requirements,\n)\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 97c52daef..55ec1efde 100755 --- a/setup.py +++ b/setup.py @@ -50,7 +50,9 @@ def get_path(fname): 'pytest-runner', ] -development_requirements = test_requirements + test_lint_requirements +development_requirements = test_requirements + test_lint_requirements + [ + 'pre-commit', +] extras_requirements = { 'devel': development_requirements, diff --git a/src/wiki/locale/en/LC_MESSAGES/django.po b/src/wiki/locale/en/LC_MESSAGES/django.po index 6e344e57f..48f6c1d10 100644 --- a/src/wiki/locale/en/LC_MESSAGES/django.po +++ b/src/wiki/locale/en/LC_MESSAGES/django.po @@ -3,13 +3,12 @@ # This file is distributed under the same license as the PACKAGE package. # FIRST AUTHOR <EMAIL@ADDRESS>, YEAR. # -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:19 #, fuzzy msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2017-12-11 00:56+0100\n" +"POT-Creation-Date: 2018-07-26 16:02+0200\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language-Team: LANGUAGE <[email protected]>\n" @@ -18,471 +17,463 @@ msgstr "" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" -#: admin.py:87 models/article.py:36 +#: admin.py:85 models/article.py:38 msgid "created" msgstr "" -#: apps.py:9 -msgid "Wiki notifications" -msgstr "" - -#: apps.py:15 -msgid "Wiki images" -msgstr "" - -#: apps.py:21 -msgid "Wiki attachments" +#: apps.py:12 +msgid "Wiki" msgstr "" -#: conf/settings.py:52 +#: conf/settings.py:50 msgid "Table of Contents" msgstr "" -#: core/plugins/base.py:65 +#: core/plugins/base.py:63 msgid "Settings for plugin" msgstr "" -#: forms.py:37 +#: forms.py:26 msgid "A 'slug' cannot consist solely of numbers." msgstr "" -#: forms.py:64 +#: forms.py:53 msgid "A slug may not begin with an underscore." msgstr "" -#: forms.py:67 +#: forms.py:56 msgid "'admin' is not a permitted slug name." msgstr "" -#: forms.py:82 +#: forms.py:71 #, python-format msgid "A deleted article with slug \"%s\" already exists." msgstr "" -#: forms.py:86 +#: forms.py:75 #, python-format msgid "A slug named \"%s\" already exists." msgstr "" -#: forms.py:95 +#: forms.py:84 msgid "This slug conflicts with an existing URL." msgstr "" -#: forms.py:130 +#: forms.py:119 msgid "Spam protection failed to find both a logged in user and an IP address." msgstr "" -#: forms.py:145 +#: forms.py:134 #, python-format msgid "" "Spam protection: You are only allowed to create or edit %(revisions)d " "article(s) per %(interval_name)s." msgstr "" -#: forms.py:163 +#: forms.py:152 msgid "minute" msgstr "" -#: forms.py:164 +#: forms.py:153 #, python-format msgid "%d minutes" msgstr "" -#: forms.py:173 +#: forms.py:162 msgid "hour" msgstr "" -#: forms.py:179 forms.py:207 forms.py:382 templates/wiki/dir.html:48 -#: templates/wiki/search.html:30 +#: forms.py:168 forms.py:196 forms.py:335 templates/wiki/dir.html:48 +#: templates/wiki/search.html:35 msgid "Title" msgstr "" -#: forms.py:181 +#: forms.py:170 msgid "Initial title of the article. May be overridden with revision titles." msgstr "" -#: forms.py:183 +#: forms.py:172 msgid "Type in some contents" msgstr "" -#: forms.py:185 +#: forms.py:174 msgid "" "This is just the initial contents of your article. After creating it, you " "can use more complex features like adding plugins, meta data, related " "articles etc..." msgstr "" -#: forms.py:191 +#: forms.py:180 msgid "Destination" msgstr "" -#: forms.py:193 +#: forms.py:182 msgid "Redirect pages" msgstr "" -#: forms.py:194 +#: forms.py:183 msgid "Create a redirect page for every moved article?" msgstr "" -#: forms.py:209 forms.py:389 +#: forms.py:198 forms.py:342 msgid "Contents" msgstr "" -#: forms.py:214 forms.py:394 +#: forms.py:203 forms.py:347 msgctxt "Revision comment" msgid "Summary" msgstr "" -#: forms.py:216 +#: forms.py:205 msgid "" "Give a short reason for your edit, which will be stated in the revision log." msgstr "" -#: forms.py:272 +#: forms.py:261 msgid "Article is missing title or has an invalid title" msgstr "" -#: forms.py:286 +#: forms.py:275 msgid "" "While you were editing, someone else changed the revision. Your contents " "have been automatically merged with the new contents. Please review the text " "below." msgstr "" -#: forms.py:289 +#: forms.py:278 msgid "No changes made. Nothing to save." msgstr "" -#: forms.py:329 +#: forms.py:305 msgid "Select an option" msgstr "" -#: forms.py:384 templates/wiki/dir.html:49 +#: forms.py:337 templates/wiki/dir.html:49 msgid "Slug" msgstr "" -#: forms.py:386 +#: forms.py:339 msgid "" "This will be the address where your article can be found. Use only " "alphanumeric characters and - or _.<br>Note: If you change the slug later " "on, links pointing to this article are <b>not</b> updated." msgstr "" -#: forms.py:395 +#: forms.py:348 msgid "Write a brief message for the article's history log." msgstr "" -#: forms.py:415 +#: forms.py:367 msgid "Yes, I am sure" msgstr "" -#: forms.py:418 templates/wiki/deleted.html:47 +#: forms.py:370 templates/wiki/deleted.html:47 msgid "Purge" msgstr "" -#: forms.py:420 +#: forms.py:372 msgid "" "Purge the article: Completely remove it (and all its contents) with no undo. " "Purging is a good idea if you want to free the slug such that users can " "create new articles in its place." msgstr "" -#: forms.py:427 plugins/attachments/forms.py:167 plugins/images/forms.py:69 +#: forms.py:379 plugins/attachments/forms.py:163 plugins/images/forms.py:66 msgid "You are not sure enough!" msgstr "" -#: forms.py:431 +#: forms.py:382 msgid "While you tried to delete this article, it was modified. TAKE CARE!" msgstr "" -#: forms.py:438 +#: forms.py:389 msgid "Lock article" msgstr "" -#: forms.py:439 +#: forms.py:390 msgid "Deny all users access to edit this article." msgstr "" -#: forms.py:442 +#: forms.py:393 msgid "Permissions" msgstr "" -#: forms.py:448 +#: forms.py:399 msgid "Owner" msgstr "" -#: forms.py:449 +#: forms.py:400 msgid "Enter the username of the owner." msgstr "" -#: forms.py:452 forms.py:507 plugins/notifications/util.py:14 +#: forms.py:403 forms.py:458 plugins/notifications/util.py:12 msgid "(none)" msgstr "" -#: forms.py:453 +#: forms.py:404 msgid "Group" msgstr "" -#: forms.py:459 +#: forms.py:410 msgid "Inherit permissions" msgstr "" -#: forms.py:460 +#: forms.py:411 msgid "" "Check here to apply the above permissions (excluding group and owner of the " "article) recursively to articles below this one." msgstr "" -#: forms.py:464 +#: forms.py:415 msgid "Inherit owner" msgstr "" -#: forms.py:465 +#: forms.py:416 msgid "" "Check here to apply the ownership setting recursively to articles below this " "one." msgstr "" -#: forms.py:469 +#: forms.py:420 msgid "Inherit group" msgstr "" -#: forms.py:470 +#: forms.py:421 msgid "" "Check here to apply the group setting recursively to articles below this one." msgstr "" -#: forms.py:475 +#: forms.py:426 msgid "Permission settings for the article were updated." msgstr "" -#: forms.py:477 +#: forms.py:428 msgid "Your permission settings were unchanged, so nothing saved." msgstr "" -#: forms.py:537 +#: forms.py:486 msgid "No user with that username" msgstr "" -#: forms.py:572 +#: forms.py:521 msgid "Article locked for editing" msgstr "" -#: forms.py:579 +#: forms.py:528 msgid "Article unlocked for editing" msgstr "" -#: forms.py:606 +#: forms.py:555 msgid "Filter..." msgstr "" -#: forms.py:616 templates/wiki/base_site.html:44 +#: forms.py:565 msgid "Search..." msgstr "" -#: forms.py:665 +#: forms.py:614 msgid "Passwords don't match" msgstr "" -#: models/article.py:29 models/pluginbase.py:161 -#: plugins/attachments/models.py:32 +#: models/article.py:30 models/pluginbase.py:165 +#: plugins/attachments/models.py:26 msgid "current revision" msgstr "" -#: models/article.py:32 +#: models/article.py:34 msgid "" "The revision being displayed for this article. If you need to do a roll-" "back, simply change the value of this field." msgstr "" -#: models/article.py:40 +#: models/article.py:42 msgid "modified" msgstr "" -#: models/article.py:41 +#: models/article.py:43 msgid "Article properties last modified" msgstr "" -#: models/article.py:44 +#: models/article.py:46 msgid "owner" msgstr "" -#: models/article.py:47 +#: models/article.py:49 msgid "" "The owner of the article, usually the creator. The owner always has both " "read and write access." msgstr "" -#: models/article.py:51 +#: models/article.py:53 msgid "group" msgstr "" -#: models/article.py:54 +#: models/article.py:56 msgid "" "Like in a UNIX file system, permissions can be given to a user according to " "group membership. Groups are handled through the Django auth system." msgstr "" -#: models/article.py:59 +#: models/article.py:61 msgid "group read access" msgstr "" -#: models/article.py:62 +#: models/article.py:64 msgid "group write access" msgstr "" -#: models/article.py:65 +#: models/article.py:67 msgid "others read access" msgstr "" -#: models/article.py:68 +#: models/article.py:70 msgid "others write access" msgstr "" -#: models/article.py:181 +#: models/article.py:178 #, python-format msgid "Article without content (%(id)d)" msgstr "" -#: models/article.py:186 +#: models/article.py:183 msgid "Can edit all articles and lock/unlock/restore" msgstr "" -#: models/article.py:187 +#: models/article.py:184 msgid "Can change ownership of any article" msgstr "" -#: models/article.py:188 +#: models/article.py:185 msgid "Can assign permissions to other users" msgstr "" -#: models/article.py:239 +#: models/article.py:260 msgid "content type" msgstr "" -#: models/article.py:241 +#: models/article.py:262 msgid "object ID" msgstr "" -#: models/article.py:250 +#: models/article.py:271 msgid "Article for object" msgstr "" -#: models/article.py:251 +#: models/article.py:272 msgid "Articles for object" msgstr "" -#: models/article.py:263 +#: models/article.py:284 msgid "revision number" msgstr "" -#: models/article.py:269 +#: models/article.py:290 msgid "IP address" msgstr "" -#: models/article.py:273 +#: models/article.py:294 msgid "user" msgstr "" -#: models/article.py:288 +#: models/article.py:309 #: plugins/attachments/templates/wiki/plugins/attachments/history.html:23 #: plugins/attachments/templates/wiki/plugins/attachments/index.html:25 #: plugins/attachments/templates/wiki/plugins/attachments/search.html:44 -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:69 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:68 #: templates/wiki/includes/revision_info.html:15 msgid "deleted" msgstr "" -#: models/article.py:292 -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:75 +#: models/article.py:313 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:74 #: templates/wiki/article.html:23 templates/wiki/includes/revision_info.html:21 msgid "locked" msgstr "" -#: models/article.py:333 models/pluginbase.py:40 models/urlpath.py:58 +#: models/article.py:351 models/pluginbase.py:44 models/urlpath.py:56 msgid "article" msgstr "" -#: models/article.py:336 +#: models/article.py:354 msgid "article contents" msgstr "" -#: models/article.py:341 +#: models/article.py:359 msgid "article title" msgstr "" -#: models/article.py:344 +#: models/article.py:362 msgid "" "Each revision contains a title field that must be filled out, even if the " "title has not changed" msgstr "" -#: models/pluginbase.py:81 +#: models/pluginbase.py:85 msgid "original article" msgstr "" -#: models/pluginbase.py:83 +#: models/pluginbase.py:87 msgid "Permissions are inherited from this article" msgstr "" -#: models/pluginbase.py:146 +#: models/pluginbase.py:150 msgid "A plugin was changed" msgstr "" -#: models/pluginbase.py:166 +#: models/pluginbase.py:171 msgid "" "The revision being displayed for this plugin. If you need to do a roll-back, " "simply change the value of this field." msgstr "" -#: models/urlpath.py:60 +#: models/urlpath.py:58 msgid "" "This field is automatically updated, but you need to populate it when " "creating a new URL path." msgstr "" -#: models/urlpath.py:67 +#: models/urlpath.py:65 msgid "slug" msgstr "" -#: models/urlpath.py:75 +#: models/urlpath.py:74 msgid "Position of URL path in the tree." msgstr "" -#: models/urlpath.py:79 +#: models/urlpath.py:78 msgid "Moved to" msgstr "" -#: models/urlpath.py:80 +#: models/urlpath.py:79 msgid "Article path was moved to this location" msgstr "" -#: models/urlpath.py:181 +#: models/urlpath.py:180 msgid "(root)" msgstr "" -#: models/urlpath.py:189 +#: models/urlpath.py:188 msgid "URL path" msgstr "" -#: models/urlpath.py:190 +#: models/urlpath.py:189 msgid "URL paths" msgstr "" -#: models/urlpath.py:196 +#: models/urlpath.py:195 msgid "Sorry but you cannot have a root article with a slug." msgstr "" -#: models/urlpath.py:199 +#: models/urlpath.py:198 msgid "A non-root note must always have a slug." msgstr "" -#: models/urlpath.py:205 +#: models/urlpath.py:204 #, python-format msgid "There is already a root node on %s" msgstr "" -#: models/urlpath.py:408 +#: models/urlpath.py:405 msgid "" "Articles who lost their parents\n" "===============================\n" @@ -491,90 +482,94 @@ msgid "" "probably find a new home for them." msgstr "" -#: models/urlpath.py:411 +#: models/urlpath.py:408 msgid "Lost and found" msgstr "" -#: plugins/attachments/forms.py:19 +#: plugins/attachments/apps.py:7 +msgid "Wiki attachments" +msgstr "" + +#: plugins/attachments/forms.py:15 #: plugins/attachments/templates/wiki/plugins/attachments/history.html:14 msgid "Description" msgstr "" -#: plugins/attachments/forms.py:20 +#: plugins/attachments/forms.py:16 msgid "A short summary of what the file contains" msgstr "" -#: plugins/attachments/forms.py:69 +#: plugins/attachments/forms.py:65 msgid "Remove previous" msgstr "" -#: plugins/attachments/forms.py:70 +#: plugins/attachments/forms.py:66 msgid "Remove previous attachment revisions and their files (to save space)?" msgstr "" -#: plugins/attachments/forms.py:79 +#: plugins/attachments/forms.py:75 msgid "File or zip archive" msgstr "" -#: plugins/attachments/forms.py:84 +#: plugins/attachments/forms.py:80 msgid "Unzip file" msgstr "" -#: plugins/attachments/forms.py:86 +#: plugins/attachments/forms.py:82 msgid "" "Create individual attachments for files in a .zip file - directories do not " "work." msgstr "" -#: plugins/attachments/forms.py:107 +#: plugins/attachments/forms.py:103 msgid "Not a zip file" msgstr "" -#: plugins/attachments/forms.py:116 +#: plugins/attachments/forms.py:112 msgid "User not allowed to moderate this article" msgstr "" -#: plugins/attachments/forms.py:162 +#: plugins/attachments/forms.py:158 msgid "Yes I am sure..." msgstr "" -#: plugins/attachments/models.py:35 +#: plugins/attachments/models.py:30 msgid "" "The revision of this attachment currently in use (on all articles using the " "attachment)" msgstr "" -#: plugins/attachments/models.py:39 +#: plugins/attachments/models.py:34 msgid "original filename" msgstr "" -#: plugins/attachments/models.py:52 +#: plugins/attachments/models.py:47 msgid "attachment" msgstr "" -#: plugins/attachments/models.py:53 +#: plugins/attachments/models.py:48 msgid "attachments" msgstr "" -#: plugins/attachments/models.py:72 +#: plugins/attachments/models.py:67 msgid "No file extension found in filename. That's not okay!" msgstr "" -#: plugins/attachments/models.py:78 +#: plugins/attachments/models.py:73 msgid "" "The following filename is illegal: {filename:s}. Extension has to be one of " "{extensions:s}" msgstr "" -#: plugins/attachments/models.py:126 +#: plugins/attachments/models.py:119 msgid "file" msgstr "" -#: plugins/attachments/models.py:132 +#: plugins/attachments/models.py:125 msgid "attachment revision" msgstr "" -#: plugins/attachments/models.py:133 +#: plugins/attachments/models.py:126 msgid "attachment revisions" msgstr "" @@ -587,15 +582,13 @@ msgstr "" #: plugins/attachments/templates/wiki/plugins/attachments/delete.html:12 msgid "" -"\n" -" The file may be referenced on other articles. Deleting it means that " -"they will loose their references to this file. The following articles " -"reference this file:\n" -" " +"The file may be referenced on other articles. Deleting it means that they " +"will loose their references to this file. The following articles reference " +"this file:" msgstr "" -#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:29 -#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:53 +#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:27 +#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:51 #: plugins/attachments/templates/wiki/plugins/attachments/history.html:49 #: plugins/attachments/templates/wiki/plugins/attachments/replace.html:39 #: plugins/attachments/templates/wiki/plugins/attachments/search.html:75 @@ -606,15 +599,15 @@ msgstr "" msgid "Go back" msgstr "" -#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:33 +#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:31 msgid "Delete it!" msgstr "" -#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:40 +#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:38 msgid "Remove" msgstr "" -#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:42 +#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:40 msgid "" "\n" " You can remove a reference to a file, but it will retain its references " @@ -622,7 +615,7 @@ msgid "" " " msgstr "" -#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:57 +#: plugins/attachments/templates/wiki/plugins/attachments/delete.html:55 msgid "Remove reference" msgstr "" @@ -633,12 +626,12 @@ msgstr "" #: plugins/attachments/templates/wiki/plugins/attachments/history.html:12 #: plugins/attachments/templates/wiki/plugins/attachments/search.html:28 -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:44 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:43 msgid "Date" msgstr "" #: plugins/attachments/templates/wiki/plugins/attachments/history.html:13 -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:43 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:42 msgid "User" msgstr "" @@ -675,7 +668,7 @@ msgid "Use this!" msgstr "" #: plugins/attachments/templates/wiki/plugins/attachments/index.html:5 -#: plugins/attachments/wiki_plugin.py:21 +#: plugins/attachments/wiki_plugin.py:18 msgid "Attachments" msgstr "" @@ -837,67 +830,71 @@ msgstr "" msgid "Your search did not return any results" msgstr "" -#: plugins/attachments/views.py:54 +#: plugins/attachments/views.py:51 #, python-format msgid "Successfully added: %s" msgstr "" -#: plugins/attachments/views.py:60 +#: plugins/attachments/views.py:57 #, python-format msgid "%s was successfully added." msgstr "" -#: plugins/attachments/views.py:156 +#: plugins/attachments/views.py:153 #, python-format msgid "%s uploaded and replaces old attachment." msgstr "" -#: plugins/attachments/views.py:160 +#: plugins/attachments/views.py:157 #, python-format msgid "Your file could not be saved: %s" msgstr "" -#: plugins/attachments/views.py:189 +#: plugins/attachments/views.py:186 msgid "" "Your new file will automatically be renamed to match the file already " "present. Files with different extensions are not allowed." msgstr "" -#: plugins/attachments/views.py:279 +#: plugins/attachments/views.py:276 #, python-format msgid "Current revision changed for %s." msgstr "" -#: plugins/attachments/views.py:311 +#: plugins/attachments/views.py:308 #, python-format msgid "Added a reference to \"%(att)s\" from \"%(art)s\"." msgstr "" -#: plugins/attachments/views.py:316 +#: plugins/attachments/views.py:313 #, python-format msgid "\"%(att)s\" is already referenced." msgstr "" -#: plugins/attachments/views.py:349 +#: plugins/attachments/views.py:346 #, python-format msgid "The file %s was deleted." msgstr "" -#: plugins/attachments/views.py:354 +#: plugins/attachments/views.py:351 #, python-format msgid "This article is no longer related to the file %s." msgstr "" -#: plugins/attachments/wiki_plugin.py:29 +#: plugins/attachments/wiki_plugin.py:26 #, python-format msgid "A file was changed: %s" msgstr "" -#: plugins/attachments/wiki_plugin.py:32 +#: plugins/attachments/wiki_plugin.py:29 #, python-format msgid "A file was deleted: %s" msgstr "" +#: plugins/globalhistory/apps.py:7 +msgid "Wiki Global History" +msgstr "" + #: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:4 #: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:8 #: plugins/globalhistory/templates/wiki/plugins/globalhistory/menubaritem.html:7 @@ -917,66 +914,70 @@ msgid_plural "" msgstr[0] "" msgstr[1] "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:26 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:25 msgid "Show all revisions of all articles" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:29 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:28 msgid "Show last revision of every article" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:40 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:39 msgid "Revision ID" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:41 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:40 msgid "Article" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:42 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:41 msgid "Message" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:66 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:65 #: templates/wiki/history.html:72 msgid "no log message" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:72 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:71 #: templates/wiki/includes/revision_info.html:18 msgid "restored" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:78 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:77 #: templates/wiki/includes/revision_info.html:24 msgid "unlocked" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:88 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:87 #: templates/wiki/includes/revision_info.html:10 msgid "anonymous (IP logged)" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:96 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:95 msgid "Go to article history" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:97 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:96 msgid "Go to article" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:106 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:105 msgid "No more changes to display !" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:107 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:106 msgid "Go back to previous page" msgstr "" -#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:109 +#: plugins/globalhistory/templates/wiki/plugins/globalhistory/globalhistory.html:108 msgid "No changes to display !" msgstr "" +#: plugins/help/apps.py:7 +msgid "Wiki help" +msgstr "" + #: plugins/help/templates/wiki/plugins/help/sidebar.html:1 msgid "Adding new articles" msgstr "" @@ -1013,53 +1014,57 @@ msgstr "" msgid "Tables" msgstr "" -#: plugins/help/wiki_plugin.py:12 +#: plugins/help/wiki_plugin.py:10 msgid "Help" msgstr "" -#: plugins/images/forms.py:20 +#: plugins/images/apps.py:10 +msgid "Wiki images" +msgstr "" + +#: plugins/images/forms.py:17 #, python-format msgid "" "New image %s was successfully uploaded. You can use it by selecting it from " "the list of available images." msgstr "" -#: plugins/images/forms.py:64 +#: plugins/images/forms.py:61 msgid "Are you sure?" msgstr "" -#: plugins/images/models.py:48 +#: plugins/images/models.py:38 msgid "image" msgstr "" -#: plugins/images/models.py:49 +#: plugins/images/models.py:39 msgid "images" msgstr "" -#: plugins/images/models.py:54 +#: plugins/images/models.py:44 #, python-format msgid "Image: %s" msgstr "" -#: plugins/images/models.py:56 +#: plugins/images/models.py:46 msgid "Current revision not set!!" msgstr "" -#: plugins/images/models.py:108 +#: plugins/images/models.py:97 msgid "image revision" msgstr "" -#: plugins/images/models.py:109 +#: plugins/images/models.py:98 msgid "image revisions" msgstr "" -#: plugins/images/models.py:115 +#: plugins/images/models.py:104 #, python-format msgid "Image Revision: %d" msgstr "" #: plugins/images/templates/wiki/plugins/images/index.html:5 -#: plugins/images/wiki_plugin.py:18 +#: plugins/images/wiki_plugin.py:15 msgid "Images" msgstr "" @@ -1205,31 +1210,35 @@ msgstr "" msgid "Cancel" msgstr "" -#: plugins/images/views.py:80 +#: plugins/images/views.py:78 #, python-format msgid "%s has been restored" msgstr "" -#: plugins/images/views.py:82 +#: plugins/images/views.py:80 #, python-format msgid "%s has been marked as deleted" msgstr "" -#: plugins/images/views.py:142 +#: plugins/images/views.py:140 #, python-format msgid "%(file)s has been changed to revision #%(revision)d" msgstr "" -#: plugins/images/views.py:187 +#: plugins/images/views.py:185 #, python-format msgid "%(file)s has been saved." msgstr "" -#: plugins/images/wiki_plugin.py:29 +#: plugins/images/wiki_plugin.py:26 #, python-format msgid "An image was added: %s" msgstr "" +#: plugins/links/apps.py:7 +msgid "Wiki links" +msgstr "" + #: plugins/links/templates/wiki/plugins/links/sidebar.html:3 msgid "Link to another wiki page" msgstr "" @@ -1246,35 +1255,39 @@ msgid "" "or http://example.com or by using the markdown syntax:" msgstr "" -#: plugins/links/wiki_plugin.py:23 +#: plugins/links/wiki_plugin.py:20 msgid "Links" msgstr "" -#: plugins/macros/mdx/macro.py:83 +#: plugins/macros/apps.py:7 +msgid "Wiki macros" +msgstr "" + +#: plugins/macros/mdx/macro.py:79 msgid "Article list" msgstr "" -#: plugins/macros/mdx/macro.py:84 +#: plugins/macros/mdx/macro.py:80 msgid "Insert a list of articles in this level." msgstr "" -#: plugins/macros/mdx/macro.py:86 +#: plugins/macros/mdx/macro.py:82 msgid "Maximum depth to show levels for." msgstr "" -#: plugins/macros/mdx/macro.py:92 +#: plugins/macros/mdx/macro.py:88 msgid "Table of contents" msgstr "" -#: plugins/macros/mdx/macro.py:93 +#: plugins/macros/mdx/macro.py:89 msgid "Insert a table of contents matching the headings." msgstr "" -#: plugins/macros/mdx/macro.py:101 +#: plugins/macros/mdx/macro.py:97 msgid "WikiLinks" msgstr "" -#: plugins/macros/mdx/macro.py:103 +#: plugins/macros/mdx/macro.py:99 msgid "Insert a link to another wiki page with a short notation." msgstr "" @@ -1286,86 +1299,90 @@ msgstr "" msgid "Nothing below this level" msgstr "" -#: plugins/macros/wiki_plugin.py:16 +#: plugins/macros/wiki_plugin.py:14 msgid "Macros" msgstr "" -#: plugins/notifications/forms.py:19 +#: plugins/notifications/apps.py:7 +msgid "Wiki notifications" +msgstr "" + +#: plugins/notifications/forms.py:16 #, python-format msgid "Receive notifications %(interval)s" msgstr "" -#: plugins/notifications/forms.py:29 +#: plugins/notifications/forms.py:26 #, python-format msgid "%(title)s - %(url)s" msgstr "" -#: plugins/notifications/forms.py:46 +#: plugins/notifications/forms.py:43 msgid "Remove subscriptions" msgstr "" -#: plugins/notifications/forms.py:48 +#: plugins/notifications/forms.py:45 msgid "Select article subscriptions to remove from notifications" msgstr "" -#: plugins/notifications/forms.py:52 +#: plugins/notifications/forms.py:49 msgid "Email digests" msgstr "" -#: plugins/notifications/forms.py:54 +#: plugins/notifications/forms.py:51 msgid "Unchanged (selected on each article)" msgstr "" -#: plugins/notifications/forms.py:55 +#: plugins/notifications/forms.py:52 msgid "No emails" msgstr "" -#: plugins/notifications/forms.py:56 +#: plugins/notifications/forms.py:53 msgid "Email on any change" msgstr "" -#: plugins/notifications/forms.py:108 +#: plugins/notifications/forms.py:102 #: plugins/notifications/templates/wiki/plugins/notifications/settings.html:5 msgid "Notifications" msgstr "" -#: plugins/notifications/forms.py:115 +#: plugins/notifications/forms.py:109 #: templates/wiki/includes/article_menu.html:9 templates/wiki/settings.html:5 msgid "Settings" msgstr "" -#: plugins/notifications/forms.py:119 +#: plugins/notifications/forms.py:113 msgid "When this article is edited" msgstr "" -#: plugins/notifications/forms.py:122 +#: plugins/notifications/forms.py:116 msgid "Also receive emails about article edits" msgstr "" -#: plugins/notifications/forms.py:164 +#: plugins/notifications/forms.py:158 msgid "Your notification settings were updated." msgstr "" -#: plugins/notifications/forms.py:167 +#: plugins/notifications/forms.py:161 msgid "Your notification settings were unchanged, so nothing saved." msgstr "" -#: plugins/notifications/models.py:25 +#: plugins/notifications/models.py:20 #, python-format msgid "%(user)s subscribing to %(article)s (%(type)s)" msgstr "" -#: plugins/notifications/models.py:51 +#: plugins/notifications/models.py:46 #, python-format msgid "Article deleted: %s" msgstr "" -#: plugins/notifications/models.py:59 +#: plugins/notifications/models.py:54 #, python-format msgid "Article modified: %s" msgstr "" -#: plugins/notifications/models.py:67 +#: plugins/notifications/models.py:62 #, python-format msgid "New article created: %s" msgstr "" @@ -1408,14 +1425,14 @@ msgstr "" msgid "Save changes" msgstr "" -#: plugins/notifications/views.py:27 +#: plugins/notifications/views.py:25 #, python-format msgid "You will receive notifications %(interval)s for %(articles)d articles" msgstr "" #: templates/wiki/accounts/account_settings.html:4 #: templates/wiki/accounts/account_settings.html:7 -#: templates/wiki/base_site.html:66 +#: templates/wiki/base_site.html:79 msgid "Account Settings" msgstr "" @@ -1423,7 +1440,7 @@ msgstr "" msgid "Update" msgstr "" -#: templates/wiki/accounts/login.html:4 templates/wiki/base_site.html:96 +#: templates/wiki/accounts/login.html:4 templates/wiki/base_site.html:109 msgid "Log in" msgstr "" @@ -1440,7 +1457,7 @@ msgid "Don't have an account?" msgstr "" #: templates/wiki/accounts/login.html:25 templates/wiki/accounts/signup.html:3 -#: templates/wiki/accounts/signup.html:6 templates/wiki/base_site.html:100 +#: templates/wiki/accounts/signup.html:6 templates/wiki/base_site.html:113 msgid "Sign up" msgstr "" @@ -1452,23 +1469,31 @@ msgstr "" msgid "This article was last modified:" msgstr "" -#: templates/wiki/base_site.html:73 +#: templates/wiki/base_site.html:53 +msgid "Search from current article..." +msgstr "" + +#: templates/wiki/base_site.html:55 +msgid "Search whole wiki..." +msgstr "" + +#: templates/wiki/base_site.html:86 msgid "Log out" msgstr "" -#: templates/wiki/base_site.html:80 +#: templates/wiki/base_site.html:93 msgid "Deleted articles" msgstr "" -#: templates/wiki/base_site.html:108 +#: templates/wiki/base_site.html:121 msgid "Home" msgstr "" -#: templates/wiki/base_site.html:109 +#: templates/wiki/base_site.html:122 msgid "About" msgstr "" -#: templates/wiki/base_site.html:134 +#: templates/wiki/base_site.html:147 msgid "" "Powered by <a href=\"http://www.django-wiki.org\">django-wiki</a>, an open " "source application under the <a href=\"http://www.gnu.org/licenses/quick-" @@ -1620,11 +1645,11 @@ msgid "" " " msgstr "" -#: templates/wiki/dir.html:50 templates/wiki/search.html:31 +#: templates/wiki/dir.html:50 templates/wiki/search.html:36 msgid "Last modified" msgstr "" -#: templates/wiki/dir.html:74 templates/wiki/search.html:40 +#: templates/wiki/dir.html:74 templates/wiki/search.html:45 msgid "There are no articles in this level" msgstr "" @@ -1775,11 +1800,13 @@ msgid "Browse articles in this level" msgstr "" #: templates/wiki/includes/breadcrumbs.html:48 -msgid "New article next to" +#, python-format +msgid "New article next to %(title)s" msgstr "" #: templates/wiki/includes/breadcrumbs.html:53 -msgid "New article below" +#, python-format +msgid "New article below %(title)s" msgstr "" #: templates/wiki/includes/revision_info.html:10 @@ -1835,7 +1862,7 @@ msgstr "" msgid "and" msgstr "" -#: templates/wiki/preview_inline.html:22 views/article.py:917 +#: templates/wiki/preview_inline.html:22 views/article.py:922 msgid "You cannot merge with a deleted revision" msgstr "" @@ -1880,7 +1907,15 @@ msgstr "" msgid "Search results for:" msgstr "" -#: templates/wiki/search.html:23 +#: templates/wiki/search.html:15 +msgid "Searching in" +msgstr "" + +#: templates/wiki/search.html:17 +msgid "Searching whole wiki" +msgstr "" + +#: templates/wiki/search.html:28 #, python-format msgid "Your search returned <strong>%(cnt)s</strong> results." msgstr "" @@ -1893,127 +1928,127 @@ msgstr "" msgid "This article is currently locked for editing." msgstr "" -#: views/accounts.py:45 +#: views/accounts.py:41 msgid "Account signup is only allowed for administrators." msgstr "" -#: views/accounts.py:59 +#: views/accounts.py:55 msgid "You are now signed up... and now you can sign in!" msgstr "" -#: views/accounts.py:72 +#: views/accounts.py:68 msgid "You are no longer logged in. Bye bye!" msgstr "" -#: views/accounts.py:105 +#: views/accounts.py:101 msgid "You are now logged in! Have fun!" msgstr "" -#: views/accounts.py:142 +#: views/accounts.py:138 msgid "Account info saved!" msgstr "" -#: views/article.py:93 +#: views/article.py:89 #, python-format msgid "New article '%s' created." msgstr "" -#: views/article.py:101 +#: views/article.py:97 #, python-format msgid "There was an error creating this article: %s" msgstr "" -#: views/article.py:104 +#: views/article.py:100 msgid "There was an error creating this article." msgstr "" -#: views/article.py:195 +#: views/article.py:190 msgid "" "This article cannot be deleted because it has children or is a root article." msgstr "" -#: views/article.py:205 +#: views/article.py:200 msgid "" "This article together with all its contents are now completely gone! Thanks!" msgstr "" -#: views/article.py:214 +#: views/article.py:209 #, python-format msgid "" "The article \"%s\" is now marked as deleted! Thanks for keeping the site " "free from unwanted material!" msgstr "" -#: views/article.py:326 +#: views/article.py:321 msgid "Your changes were saved." msgstr "" -#: views/article.py:340 +#: views/article.py:335 msgid "Please note that your article text has not yet been saved!" msgstr "" -#: views/article.py:371 +#: views/article.py:366 msgid "A new revision of the article was successfully added." msgstr "" -#: views/article.py:424 +#: views/article.py:415 msgid "This article cannot be moved because it is a root article." msgstr "" -#: views/article.py:438 +#: views/article.py:429 msgid "This article cannot be moved to a child of itself." msgstr "" -#: views/article.py:491 +#: views/article.py:482 #, python-brace-format msgid "Moved: {title}" msgstr "" -#: views/article.py:492 +#: views/article.py:483 #, python-brace-format msgid "Article moved to {link}" msgstr "" -#: views/article.py:493 +#: views/article.py:484 msgid "Created redirect (auto)" msgstr "" -#: views/article.py:501 +#: views/article.py:492 #, python-brace-format msgid "Article successfully moved! Created {n} redirect." msgid_plural "Article successfully moved! Created {n} redirects." msgstr[0] "" msgstr[1] "" -#: views/article.py:510 +#: views/article.py:501 msgid "Article successfully moved!" msgstr "" -#: views/article.py:552 +#: views/article.py:543 msgid "Restoring article" msgstr "" -#: views/article.py:556 +#: views/article.py:547 #, python-format msgid "The article \"%s\" and its children are now restored." msgstr "" -#: views/article.py:812 +#: views/article.py:815 #, python-format msgid "" "The article %(title)s is now set to display revision #%(revision_number)d" msgstr "" -#: views/article.py:883 +#: views/article.py:888 msgid "New title" msgstr "" -#: views/article.py:930 +#: views/article.py:935 #, python-format msgid "Merge between revision #%(r1)d and revision #%(r2)d" msgstr "" -#: views/article.py:941 +#: views/article.py:946 #, python-format msgid "" "A new revision was created: Merge between revision #%(r1)d and revision #"
cobbler__cobbler-3197
Packaging: Provide native packages for Debian & Ubuntu ### Is your feature request related to a problem? Currently we only provide packages for Debian and Ubuntu via Debbuild. This is good for test installations or users how know the OBS already and are familiar with it. ### The Behaviour you'd like `apt install cobbler` ### Alternatives you've considered Well the OBS is nice and all but in the end we will probably have more users when providing Distro-Native packages. Also we will have a simpler specfile. ### Additional context Current OBS project in my home: https://build.opensuse.org/project/show/home:SchoolGuy:cobbler
[ { "content": "#!/usr/bin/env python3\n\nimport os\nimport sys\nimport time\nimport glob as _glob\n\nfrom setuptools import setup\nfrom setuptools import Command\nfrom setuptools.command.install import install as _install\nfrom setuptools import Distribution as _Distribution\nfrom setuptools.command.build_py import build_py as _build_py\nfrom setuptools import dep_util\nfrom distutils.command.build import build as _build\nfrom configparser import ConfigParser\nfrom setuptools import find_packages\nfrom sphinx.setup_command import BuildDoc\n\nimport codecs\nfrom coverage import Coverage\nimport pwd\nimport shutil\nimport subprocess\n\n\nVERSION = \"3.4.0\"\nOUTPUT_DIR = \"config\"\n\n# # Configurable installation roots for various data files.\ndatadir = os.environ.get(\"DATAPATH\", \"/usr/share/cobbler\")\ndocpath = os.environ.get(\"DOCPATH\", \"share/man\")\netcpath = os.environ.get(\"ETCPATH\", \"/etc/cobbler\")\nlibpath = os.environ.get(\"LIBPATH\", \"/var/lib/cobbler\")\nlogpath = os.environ.get(\"LOG_PATH\", \"/var/log\")\ncompletion_path = os.environ.get(\n \"COMPLETION_PATH\", \"/usr/share/bash-completion/completions\"\n)\nstatepath = os.environ.get(\"STATEPATH\", \"/tmp/cobbler_settings/devinstall\")\nhttp_user = os.environ.get(\"HTTP_USER\", \"wwwrun\")\nhttpd_service = os.environ.get(\"HTTPD_SERVICE\", \"apache2.service\")\nwebconfig = os.environ.get(\"WEBCONFIG\", \"/etc/apache2/vhosts.d\")\nwebroot = os.environ.get(\"WEBROOT\", \"/srv/www\")\ntftproot = os.environ.get(\"TFTPROOT\", \"/srv/tftpboot\")\nbind_zonefiles = os.environ.get(\"ZONEFILES\", \"/var/lib/named/\")\nshim_folder = os.environ.get(\"SHIM_FOLDER\", \"/usr/share/efi/*/\")\nshim_file = os.environ.get(\"SHIM_FILE\", r\"shim\\.efi\")\nipxe_folder = os.environ.get(\"IPXE_FOLDER\", \"/usr/share/ipxe/\")\nmemdisk_folder = os.environ.get(\"MEMDISK_FOLDER\", \"/usr/share/syslinux\")\npxelinux_folder = os.environ.get(\"PXELINUX_FOLDER\", \"/usr/share/syslinux\")\nsyslinux_dir = os.environ.get(\"SYSLINUX_DIR\", \"/usr/share/syslinux\")\ngrub_mod_folder = os.environ.get(\"GRUB_MOD_FOLDER\", \"/usr/share/grub2\")\n\n\n#####################################################################\n# # Helper Functions #################################################\n#####################################################################\n\n\ndef glob(*args, **kwargs):\n recursive = kwargs.get(\"recursive\", False)\n results = []\n for arg in args:\n for elem in _glob.glob(arg):\n # Now check if we should handle/check those results.\n if os.path.isdir(elem):\n if os.path.islink(elem):\n # We skip symlinks\n pass\n else:\n # We only handle directories if recursive was specified\n if recursive:\n results.extend(\n # Add the basename of arg (the pattern) to elem and continue\n glob(\n os.path.join(elem, os.path.basename(arg)),\n recursive=True,\n )\n )\n else:\n # Always append normal files\n results.append(elem)\n return results\n\n\ndef read_readme_file():\n # read the contents of your README file\n this_directory = os.path.abspath(os.path.dirname(__file__))\n with open(os.path.join(this_directory, \"README.md\"), encoding=\"utf-8\") as f:\n return f.read()\n\n\n#####################################################################\n\n\n#####################################################################\n\n\ndef gen_build_version():\n buildepoch = int(os.environ.get(\"SOURCE_DATE_EPOCH\", time.time()))\n builddate = time.asctime(time.gmtime(buildepoch))\n\n gitloc = \"/usr/bin/git\"\n gitdate = \"?\"\n gitstamp = \"?\"\n if not os.path.isfile(gitloc):\n print(\"warning: \" + gitloc + \" not found\")\n else:\n cmd = subprocess.Popen(\n [gitloc, \"log\", \"--format=%h%n%ad\", \"-1\"], stdout=subprocess.PIPE\n )\n data = cmd.communicate()[0].strip()\n if cmd.returncode == 0:\n gitstamp, gitdate = data.decode(\"utf8\").split(\"\\n\")\n\n with open(os.path.join(OUTPUT_DIR, \"version\"), \"w+\") as version_file:\n config = ConfigParser()\n config.add_section(\"cobbler\")\n config.set(\"cobbler\", \"gitdate\", str(gitdate))\n config.set(\"cobbler\", \"gitstamp\", str(gitstamp))\n config.set(\"cobbler\", \"builddate\", builddate)\n config.set(\"cobbler\", \"version\", VERSION)\n config.set(\n \"cobbler\", \"version_tuple\", str([int(x) for x in VERSION.split(\".\")])\n )\n config.write(version_file)\n\n\n#####################################################################\n# # Custom Distribution Class ########################################\n#####################################################################\n\n\nclass Distribution(_Distribution):\n def __init__(self, *args, **kwargs):\n self.configure_files = []\n self.configure_values = {}\n self.man_pages = []\n _Distribution.__init__(self, *args, **kwargs)\n\n\n#####################################################################\n# # Modify Build Stage ##############################################\n#####################################################################\n\n\nclass build_py(_build_py):\n \"\"\"Specialized Python source builder.\"\"\"\n\n def run(self):\n gen_build_version()\n _build_py.run(self)\n\n\n#####################################################################\n# # Modify Build Stage ##############################################\n#####################################################################\n\n\nclass build(_build):\n \"\"\"Specialized Python source builder.\"\"\"\n\n def run(self):\n _build.run(self)\n\n\n#####################################################################\n# # Build man pages using Sphinx ###################################\n#####################################################################\n\n\nclass build_man(BuildDoc):\n def initialize_options(self):\n BuildDoc.initialize_options(self)\n self.builder = \"man\"\n\n\n#####################################################################\n# # Configure files ##################################################\n#####################################################################\n\n\nclass build_cfg(Command):\n\n description = \"configure files (copy and substitute options)\"\n\n user_options = [\n (\"install-base=\", None, \"base installation directory\"),\n (\n \"install-platbase=\",\n None,\n \"base installation directory for platform-specific files \",\n ),\n (\n \"install-purelib=\",\n None,\n \"installation directory for pure Python module distributions\",\n ),\n (\n \"install-platlib=\",\n None,\n \"installation directory for non-pure module distributions\",\n ),\n (\n \"install-lib=\",\n None,\n \"installation directory for all module distributions \"\n + \"(overrides --install-purelib and --install-platlib)\",\n ),\n (\"install-headers=\", None, \"installation directory for C/C++ headers\"),\n (\"install-scripts=\", None, \"installation directory for Python scripts\"),\n (\"install-data=\", None, \"installation directory for data files\"),\n (\"force\", \"f\", \"forcibly build everything (ignore file timestamps\"),\n ]\n\n boolean_options = [\"force\"]\n\n def initialize_options(self):\n self.build_dir = None\n self.force = None\n self.install_base = None\n self.install_platbase = None\n self.install_scripts = None\n self.install_data = None\n self.install_purelib = None\n self.install_platlib = None\n self.install_lib = None\n self.install_headers = None\n self.root = None\n\n def finalize_options(self):\n self.set_undefined_options(\n \"build\", (\"build_base\", \"build_dir\"), (\"force\", \"force\")\n )\n self.set_undefined_options(\n \"install\",\n (\"install_base\", \"install_base\"),\n (\"install_platbase\", \"install_platbase\"),\n (\"install_scripts\", \"install_scripts\"),\n (\"install_data\", \"install_data\"),\n (\"install_purelib\", \"install_purelib\"),\n (\"install_platlib\", \"install_platlib\"),\n (\"install_lib\", \"install_lib\"),\n (\"install_headers\", \"install_headers\"),\n (\"root\", \"root\"),\n )\n\n if self.root:\n # We need the unrooted versions of this values\n for name in (\"lib\", \"purelib\", \"platlib\", \"scripts\", \"data\", \"headers\"):\n attr = \"install_\" + name\n setattr(\n self, attr, \"/\" + os.path.relpath(getattr(self, attr), self.root)\n )\n\n # Check if we are running under a virtualenv\n if hasattr(sys, \"real_prefix\"):\n virtualenv = sys.prefix\n else:\n virtualenv = \"\"\n\n # The values to expand.\n self.configure_values = {\n \"python_executable\": sys.executable,\n \"virtualenv\": virtualenv,\n \"install_base\": os.path.normpath(self.install_base),\n \"install_platbase\": os.path.normpath(self.install_platbase),\n \"install_scripts\": os.path.normpath(self.install_scripts),\n \"install_data\": os.path.normpath(self.install_data),\n \"install_purelib\": os.path.normpath(self.install_purelib),\n \"install_platlib\": os.path.normpath(self.install_platlib),\n \"install_lib\": os.path.normpath(self.install_lib),\n \"install_headers\": os.path.normpath(self.install_headers),\n }\n self.configure_values.update(self.distribution.configure_values)\n\n def run(self):\n # On dry-run ignore missing source files.\n if self.dry_run:\n mode = \"newer\"\n else:\n mode = \"error\"\n # Work on all files\n for infile in self.distribution.configure_files:\n # We copy the files to build/\n outfile = os.path.join(self.build_dir, infile)\n # check if the file is out of date\n if self.force or dep_util.newer_group([infile, \"setup.py\"], outfile, mode):\n # It is. Configure it\n self.configure_one_file(infile, outfile)\n\n def configure_one_file(self, infile, outfile):\n self.announce(\"configuring %s\" % infile, 3)\n if not self.dry_run:\n # Read the file\n with codecs.open(infile, \"r\", \"utf-8\") as fh:\n before = fh.read()\n # Substitute the variables\n # Create the output directory if necessary\n outdir = os.path.dirname(outfile)\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n # Write it into build/\n with codecs.open(outfile, \"w\", \"utf-8\") as fh:\n fh.write(self.substitute_values(before, self.configure_values))\n # The last step is to copy the permission bits\n shutil.copymode(infile, outfile)\n\n def substitute_values(self, string, values):\n for name, val in list(values.items()):\n # print(\"replacing @@%s@@ with %s\" % (name, val))\n string = string.replace(\"@@%s@@\" % (name), val)\n return string\n\n\ndef has_configure_files(build):\n \"\"\"Check if the distribution has configuration files to work on.\"\"\"\n return bool(build.distribution.configure_files)\n\n\ndef has_man_pages(build):\n \"\"\"Check if the distribution has configuration files to work on.\"\"\"\n return bool(build.distribution.man_pages)\n\n\nbuild.sub_commands.extend(\n ((\"build_man\", has_man_pages), (\"build_cfg\", has_configure_files))\n)\n\n\n#####################################################################\n# # Modify Install Stage ############################################\n#####################################################################\n\n\nclass install(_install):\n \"\"\"Specialised python package installer.\n\n It does some required chown calls in addition to the usual stuff.\n \"\"\"\n\n def __init__(self, *args):\n _install.__init__(self, *args)\n\n def change_owner(self, path, owner):\n user = pwd.getpwnam(owner)\n try:\n self.announce(\"changing mode of %s\" % path, 3)\n if not self.dry_run:\n # os.walk does not include the toplevel directory\n os.lchown(path, user.pw_uid, -1)\n # Now walk the directory and change them all\n for root, dirs, files in os.walk(path):\n for dirname in dirs:\n os.lchown(os.path.join(root, dirname), user.pw_uid, -1)\n for filename in files:\n os.lchown(os.path.join(root, filename), user.pw_uid, -1)\n except OSError as e:\n # We only check for errno = 1 (EPERM) here because its kinda\n # expected when installing as a non root user.\n if e.errno == 1:\n self.warn(\"Could not change owner: You have insufficient permissions.\")\n else:\n raise e\n\n def run(self):\n # Run the usual stuff.\n _install.run(self)\n\n # If --root wasn't specified default to /usr/local\n if self.root is None:\n self.root = \"/usr/local\"\n\n\n#####################################################################\n# # Test Command #####################################################\n#####################################################################\n\n\nclass test_command(Command):\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n import pytest\n\n cov = Coverage()\n cov.erase()\n cov.start()\n\n result = pytest.main()\n\n cov.stop()\n cov.save()\n cov.html_report(directory=\"covhtml\")\n sys.exit(int(bool(len(result.failures) > 0 or len(result.errors) > 0)))\n\n\n#####################################################################\n# # state command base class #########################################\n#####################################################################\n\n\nclass statebase(Command):\n\n user_options = [\n (\"statepath=\", None, \"directory to backup configuration\"),\n (\"root=\", None, \"install everything relative to this alternate root directory\"),\n ]\n\n def initialize_options(self):\n self.statepath = statepath\n self.root = None\n\n def finalize_options(self):\n pass\n\n def _copy(self, frm, to):\n if os.path.isdir(frm):\n to = os.path.join(to, os.path.basename(frm))\n self.announce(\"copying %s/ to %s/\" % (frm, to), 3)\n if not self.dry_run:\n if os.path.exists(to):\n shutil.rmtree(to)\n shutil.copytree(frm, to)\n else:\n self.announce(\n \"copying %s to %s\" % (frm, os.path.join(to, os.path.basename(frm))), 3\n )\n if not self.dry_run:\n shutil.copy2(frm, to)\n\n\n#####################################################################\n# # restorestate command #############################################\n#####################################################################\n\n\nclass restorestate(statebase):\n def _copy(self, frm, to):\n if self.root:\n to = self.root + to\n statebase._copy(self, frm, to)\n\n def run(self):\n self.announce(\"restoring the current configuration from %s\" % self.statepath, 3)\n if not os.path.exists(self.statepath):\n self.warn(\"%s does not exist. Skipping\" % self.statepath)\n return\n self._copy(os.path.join(self.statepath, \"collections\"), libpath)\n self._copy(os.path.join(self.statepath, \"cobbler.conf\"), webconfig)\n self._copy(os.path.join(self.statepath, \"settings.yaml\"), etcpath)\n self._copy(os.path.join(self.statepath, \"users.conf\"), etcpath)\n self._copy(os.path.join(self.statepath, \"users.digest\"), etcpath)\n self._copy(os.path.join(self.statepath, \"dhcp.template\"), etcpath)\n self._copy(os.path.join(self.statepath, \"dhcp6.template\"), etcpath)\n self._copy(os.path.join(self.statepath, \"rsync.template\"), etcpath)\n\n\n#####################################################################\n# # savestate command ################################################\n#####################################################################\n\n\nclass savestate(statebase):\n\n description = \"Backup the current configuration to /tmp/cobbler_settings.\"\n\n def _copy(self, frm, to):\n if self.root:\n frm = self.root + frm\n statebase._copy(self, frm, to)\n\n def run(self):\n self.announce(\"backing up the current configuration to %s\" % self.statepath, 3)\n if os.path.exists(self.statepath):\n self.announce(\"deleting existing %s\" % self.statepath, 3)\n if not self.dry_run:\n shutil.rmtree(self.statepath)\n if not self.dry_run:\n os.makedirs(self.statepath)\n self._copy(os.path.join(libpath, \"collections\"), self.statepath)\n self._copy(os.path.join(webconfig, \"cobbler.conf\"), self.statepath)\n self._copy(os.path.join(etcpath, \"settings.yaml\"), self.statepath)\n self._copy(os.path.join(etcpath, \"users.conf\"), self.statepath)\n self._copy(os.path.join(etcpath, \"users.digest\"), self.statepath)\n self._copy(os.path.join(etcpath, \"dhcp.template\"), self.statepath)\n self._copy(os.path.join(etcpath, \"dhcp6.template\"), self.statepath)\n self._copy(os.path.join(etcpath, \"rsync.template\"), self.statepath)\n\n\n#####################################################################\n# # Actual Setup.py Script ###########################################\n#####################################################################\n\n\nif __name__ == \"__main__\":\n setup(\n distclass=Distribution,\n cmdclass={\n \"build\": build,\n \"build_py\": build_py,\n \"test\": test_command,\n \"install\": install,\n \"savestate\": savestate,\n \"restorestate\": restorestate,\n \"build_cfg\": build_cfg,\n \"build_man\": build_man,\n },\n name=\"cobbler\",\n version=VERSION,\n description=\"Network Boot and Update Server\",\n long_description=read_readme_file(),\n long_description_content_type=\"text/markdown\",\n author=\"Team Cobbler\",\n author_email=\"[email protected]\",\n project_urls={\n \"Website\": \"https://cobbler.github.io\",\n \"Documentation (Users)\": \"https://cobbler.readthedocs.io/en/latest\",\n \"Documentation (Devs)\": \"https://github.com/cobbler/cobbler/wiki\",\n \"Source\": \"https://github.com/cobbler/cobbler\",\n \"Tracker\": \"https://github.com/cobbler/cobbler/issues\",\n },\n license=\"GPLv2+\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)\",\n \"Programming Language :: Python :: 3.6\",\n \"Topic :: System :: Installation/Setup\",\n \"Topic :: System :: Systems Administration\",\n \"Intended Audience :: System Administrators\",\n \"Natural Language :: English\",\n \"Operating System :: POSIX :: Linux\",\n ],\n keywords=[\"pxe\", \"autoinstallation\", \"dhcp\", \"tftp\", \"provisioning\"],\n install_requires=[\n \"requests\",\n \"pyyaml\",\n \"netaddr\",\n \"Cheetah3\",\n \"pymongo\",\n \"distro\",\n \"python-ldap\",\n \"dnspython\",\n \"file-magic\",\n \"schema\",\n ],\n extras_require={\n \"lint\": [\"pyflakes\", \"pycodestyle\"],\n \"test\": [\"pytest\", \"pytest-cov\", \"codecov\", \"pytest-mock\"],\n },\n packages=find_packages(exclude=[\"*tests*\"]),\n scripts=[\n \"bin/cobbler\",\n \"bin/cobblerd\",\n \"bin/cobbler-ext-nodes\",\n \"bin/cobbler-settings\",\n ],\n configure_values={\n \"webroot\": os.path.normpath(webroot),\n \"tftproot\": os.path.normpath(tftproot),\n \"httpd_service\": httpd_service,\n \"bind_zonefiles\": bind_zonefiles,\n \"shim_folder\": shim_folder,\n \"shim_file\": shim_file,\n \"ipxe_folder\": ipxe_folder,\n \"memdisk_folder\": memdisk_folder,\n \"pxelinux_folder\": pxelinux_folder,\n \"syslinux_dir\": syslinux_dir,\n \"grub_mod_folder\": grub_mod_folder,\n },\n configure_files=[\n \"config/apache/cobbler.conf\",\n \"config/nginx/cobbler.conf\",\n \"config/cobbler/settings.yaml\",\n \"config/service/cobblerd.service\",\n \"templates/etc/named.template\",\n \"templates/etc/secondary.template\",\n ],\n man_pages=[\"docs/cobblerd.rst\", \"docs/cobbler-conf.rst\", \"docs/cobbler.rst\"],\n data_files=[\n (\"%s\" % webconfig, [\"build/config/apache/cobbler.conf\"]),\n (\"%s/templates\" % libpath, glob(\"autoinstall_templates/*\")),\n (\n \"%s/templates/install_profiles\" % libpath,\n glob(\"autoinstall_templates/install_profiles/*\"),\n ),\n (\"%s/snippets\" % libpath, glob(\"autoinstall_snippets/*\", recursive=True)),\n (\"%s/scripts\" % libpath, glob(\"autoinstall_scripts/*\")),\n (\"%s\" % libpath, [\"config/cobbler/distro_signatures.json\"]),\n (\"share/cobbler/bin\", glob(\"scripts/*\")),\n (\"%s/loaders\" % libpath, []),\n (\"%s/misc\" % libpath, glob(\"misc/*\")),\n # Configuration\n (f\"{etcpath}/apache\", [\"build/config/apache/cobbler.conf\"]),\n (f\"{etcpath}/nginx\", [\"build/config/nginx/cobbler.conf\"]),\n (\n \"%s\" % etcpath,\n [\n \"build/config/service/cobblerd.service\",\n \"build/config/cobbler/settings.yaml\",\n ],\n ),\n (\n \"%s\" % etcpath,\n [\n \"config/cobbler/auth.conf\",\n \"config/cobbler/users.conf\",\n \"config/cobbler/users.digest\",\n \"config/cheetah/cheetah_macros\",\n \"config/rotate/cobblerd_rotate\",\n \"config/rsync/import_rsync_whitelist\",\n \"config/rsync/rsync.exclude\",\n \"config/version\",\n ],\n ),\n (\"%s\" % etcpath, glob(\"cobbler/etc/*\")),\n (\n \"%s\" % etcpath,\n [\n \"templates/etc/named.template\",\n \"templates/etc/genders.template\",\n \"templates/etc/secondary.template\",\n \"templates/etc/zone.template\",\n \"templates/etc/dnsmasq.template\",\n \"templates/etc/rsync.template\",\n \"templates/etc/dhcp.template\",\n \"templates/etc/dhcp6.template\",\n \"templates/etc/ndjbdns.template\",\n ],\n ),\n (\"%s/iso\" % etcpath, glob(\"templates/iso/*\")),\n (\"%s/boot_loader_conf\" % etcpath, glob(\"templates/boot_loader_conf/*\")),\n # completion_file\n (\"%s\" % completion_path, [\"config/bash/completion/cobbler\"]),\n (\"%s/grub_config\" % libpath, glob(\"config/grub/*\")),\n # ToDo: Find a nice way to copy whole config/grub structure recursively\n # files\n (\"%s/grub_config/grub\" % libpath, glob(\"config/grub/grub/*\")),\n # dirs\n (\"%s/grub_config/grub/system\" % libpath, []),\n (\"%s/grub_config/grub/system_link\" % libpath, []),\n (\"%s/reporting\" % etcpath, glob(\"templates/reporting/*\")),\n # logfiles\n (\"%s/cobbler/kicklog\" % logpath, []),\n (\"%s/cobbler/syslog\" % logpath, []),\n (\"%s/httpd/cobbler\" % logpath, []),\n (\"%s/cobbler/anamon\" % logpath, []),\n (\"%s/cobbler/tasks\" % logpath, []),\n # zone-specific templates directory\n (\"%s/zone_templates\" % etcpath, glob(\"templates/zone_templates/*\")),\n # windows-specific templates directory\n (\"%s/windows\" % etcpath, glob(\"templates/windows/*\")),\n (\"%s\" % etcpath, [\"config/cobbler/logging_config.conf\"]),\n # man pages\n (\"%s/man1\" % docpath, glob(\"build/sphinx/man/*.1\")),\n (\"%s/man5\" % docpath, glob(\"build/sphinx/man/*.5\")),\n (\"%s/man8\" % docpath, glob(\"build/sphinx/man/*.8\")),\n (\"%s/tests\" % datadir, glob(\"tests/*.py\")),\n (\"%s/tests/cli\" % datadir, glob(\"tests/cli/*.py\")),\n (\"%s/tests/modules\" % datadir, glob(\"tests/modules/*.py\")),\n (\n \"%s/tests/modules/authentication\" % datadir,\n glob(\"tests/modules/authentication/*.py\"),\n ),\n (\"%s/tests/xmlrpcapi\" % datadir, glob(\"tests/xmlrpcapi/*.py\")),\n ],\n )\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python3\n\nimport os\nimport sys\nimport time\nimport glob as _glob\n\nfrom setuptools import setup\nfrom setuptools import Command\nfrom setuptools.command.install import install as _install\nfrom setuptools import Distribution as _Distribution\nfrom setuptools.command.build_py import build_py as _build_py\nfrom setuptools import dep_util\nfrom distutils.command.build import build as _build\nfrom configparser import ConfigParser\nfrom setuptools import find_packages\nfrom sphinx.setup_command import BuildDoc\n\nimport codecs\nfrom coverage import Coverage\nimport pwd\nimport shutil\nimport subprocess\n\n\nVERSION = \"3.4.0\"\nOUTPUT_DIR = \"config\"\n\n# # Configurable installation roots for various data files.\ndatadir = os.environ.get(\"DATAPATH\", \"/usr/share/cobbler\")\ndocpath = os.environ.get(\"DOCPATH\", \"share/man\")\netcpath = os.environ.get(\"ETCPATH\", \"/etc/cobbler\")\nlibpath = os.environ.get(\"LIBPATH\", \"/var/lib/cobbler\")\nlogpath = os.environ.get(\"LOG_PATH\", \"/var/log\")\ncompletion_path = os.environ.get(\n \"COMPLETION_PATH\", \"/usr/share/bash-completion/completions\"\n)\nstatepath = os.environ.get(\"STATEPATH\", \"/tmp/cobbler_settings/devinstall\")\nhttp_user = os.environ.get(\"HTTP_USER\", \"wwwrun\")\nhttpd_service = os.environ.get(\"HTTPD_SERVICE\", \"apache2.service\")\nwebconfig = os.environ.get(\"WEBCONFIG\", \"/etc/apache2/vhosts.d\")\nwebroot = os.environ.get(\"WEBROOT\", \"/srv/www\")\ntftproot = os.environ.get(\"TFTPROOT\", \"/srv/tftpboot\")\nbind_zonefiles = os.environ.get(\"ZONEFILES\", \"/var/lib/named/\")\nshim_folder = os.environ.get(\"SHIM_FOLDER\", \"/usr/share/efi/*/\")\nshim_file = os.environ.get(\"SHIM_FILE\", r\"shim\\.efi\")\nipxe_folder = os.environ.get(\"IPXE_FOLDER\", \"/usr/share/ipxe/\")\nmemdisk_folder = os.environ.get(\"MEMDISK_FOLDER\", \"/usr/share/syslinux\")\npxelinux_folder = os.environ.get(\"PXELINUX_FOLDER\", \"/usr/share/syslinux\")\nsyslinux_dir = os.environ.get(\"SYSLINUX_DIR\", \"/usr/share/syslinux\")\ngrub_mod_folder = os.environ.get(\"GRUB_MOD_FOLDER\", \"/usr/share/grub2\")\n\n\n#####################################################################\n# # Helper Functions #################################################\n#####################################################################\n\n\ndef glob(*args, **kwargs):\n recursive = kwargs.get(\"recursive\", False)\n results = []\n for arg in args:\n for elem in _glob.glob(arg):\n # Now check if we should handle/check those results.\n if os.path.isdir(elem):\n if os.path.islink(elem):\n # We skip symlinks\n pass\n else:\n # We only handle directories if recursive was specified\n if recursive:\n results.extend(\n # Add the basename of arg (the pattern) to elem and continue\n glob(\n os.path.join(elem, os.path.basename(arg)),\n recursive=True,\n )\n )\n else:\n # Always append normal files\n results.append(elem)\n return results\n\n\ndef read_readme_file():\n # read the contents of your README file\n this_directory = os.path.abspath(os.path.dirname(__file__))\n with open(os.path.join(this_directory, \"README.md\"), encoding=\"utf-8\") as f:\n return f.read()\n\n\n#####################################################################\n\n\n#####################################################################\n\n\ndef gen_build_version():\n buildepoch = int(os.environ.get(\"SOURCE_DATE_EPOCH\", time.time()))\n builddate = time.asctime(time.gmtime(buildepoch))\n\n gitloc = \"/usr/bin/git\"\n gitdate = \"?\"\n gitstamp = \"?\"\n if not os.path.isfile(gitloc):\n print(\"warning: \" + gitloc + \" not found\")\n else:\n cmd = subprocess.Popen(\n [gitloc, \"log\", \"--format=%h%n%ad\", \"-1\"], stdout=subprocess.PIPE\n )\n data = cmd.communicate()[0].strip()\n if cmd.returncode == 0:\n gitstamp, gitdate = data.decode(\"utf8\").split(\"\\n\")\n\n with open(os.path.join(OUTPUT_DIR, \"version\"), \"w+\") as version_file:\n config = ConfigParser()\n config.add_section(\"cobbler\")\n config.set(\"cobbler\", \"gitdate\", str(gitdate))\n config.set(\"cobbler\", \"gitstamp\", str(gitstamp))\n config.set(\"cobbler\", \"builddate\", builddate)\n config.set(\"cobbler\", \"version\", VERSION)\n config.set(\n \"cobbler\", \"version_tuple\", str([int(x) for x in VERSION.split(\".\")])\n )\n config.write(version_file)\n\n\n#####################################################################\n# # Custom Distribution Class ########################################\n#####################################################################\n\n\nclass Distribution(_Distribution):\n def __init__(self, *args, **kwargs):\n self.configure_files = []\n self.configure_values = {}\n self.man_pages = []\n _Distribution.__init__(self, *args, **kwargs)\n\n\n#####################################################################\n# # Modify Build Stage ##############################################\n#####################################################################\n\n\nclass build_py(_build_py):\n \"\"\"Specialized Python source builder.\"\"\"\n\n def run(self):\n gen_build_version()\n _build_py.run(self)\n\n\n#####################################################################\n# # Modify Build Stage ##############################################\n#####################################################################\n\n\nclass build(_build):\n \"\"\"Specialized Python source builder.\"\"\"\n\n def run(self):\n _build.run(self)\n\n\n#####################################################################\n# # Build man pages using Sphinx ###################################\n#####################################################################\n\n\nclass build_man(BuildDoc):\n def initialize_options(self):\n BuildDoc.initialize_options(self)\n self.builder = \"man\"\n\n\n#####################################################################\n# # Configure files ##################################################\n#####################################################################\n\n\nclass build_cfg(Command):\n\n description = \"configure files (copy and substitute options)\"\n\n user_options = [\n (\"install-base=\", None, \"base installation directory\"),\n (\n \"install-platbase=\",\n None,\n \"base installation directory for platform-specific files \",\n ),\n (\n \"install-purelib=\",\n None,\n \"installation directory for pure Python module distributions\",\n ),\n (\n \"install-platlib=\",\n None,\n \"installation directory for non-pure module distributions\",\n ),\n (\n \"install-lib=\",\n None,\n \"installation directory for all module distributions \"\n + \"(overrides --install-purelib and --install-platlib)\",\n ),\n (\"install-headers=\", None, \"installation directory for C/C++ headers\"),\n (\"install-scripts=\", None, \"installation directory for Python scripts\"),\n (\"install-data=\", None, \"installation directory for data files\"),\n (\"force\", \"f\", \"forcibly build everything (ignore file timestamps\"),\n ]\n\n boolean_options = [\"force\"]\n\n def initialize_options(self):\n self.build_dir = None\n self.force = None\n self.install_base = None\n self.install_platbase = None\n self.install_scripts = None\n self.install_data = None\n self.install_purelib = None\n self.install_platlib = None\n self.install_lib = None\n self.install_headers = None\n self.root = None\n\n def finalize_options(self):\n self.set_undefined_options(\n \"build\", (\"build_base\", \"build_dir\"), (\"force\", \"force\")\n )\n self.set_undefined_options(\n \"install\",\n (\"install_base\", \"install_base\"),\n (\"install_platbase\", \"install_platbase\"),\n (\"install_scripts\", \"install_scripts\"),\n (\"install_data\", \"install_data\"),\n (\"install_purelib\", \"install_purelib\"),\n (\"install_platlib\", \"install_platlib\"),\n (\"install_lib\", \"install_lib\"),\n (\"install_headers\", \"install_headers\"),\n (\"root\", \"root\"),\n )\n\n if self.root:\n # We need the unrooted versions of this values\n for name in (\"lib\", \"purelib\", \"platlib\", \"scripts\", \"data\", \"headers\"):\n attr = \"install_\" + name\n setattr(\n self, attr, \"/\" + os.path.relpath(getattr(self, attr), self.root)\n )\n\n # Check if we are running under a virtualenv\n if hasattr(sys, \"real_prefix\"):\n virtualenv = sys.prefix\n else:\n virtualenv = \"\"\n\n # The values to expand.\n self.configure_values = {\n \"python_executable\": sys.executable,\n \"virtualenv\": virtualenv,\n \"install_base\": os.path.normpath(self.install_base),\n \"install_platbase\": os.path.normpath(self.install_platbase),\n \"install_scripts\": os.path.normpath(self.install_scripts),\n \"install_data\": os.path.normpath(self.install_data),\n \"install_purelib\": os.path.normpath(self.install_purelib),\n \"install_platlib\": os.path.normpath(self.install_platlib),\n \"install_lib\": os.path.normpath(self.install_lib),\n \"install_headers\": os.path.normpath(self.install_headers),\n }\n self.configure_values.update(self.distribution.configure_values)\n\n def run(self):\n # On dry-run ignore missing source files.\n if self.dry_run:\n mode = \"newer\"\n else:\n mode = \"error\"\n # Work on all files\n for infile in self.distribution.configure_files:\n # We copy the files to build/\n outfile = os.path.join(self.build_dir, infile)\n # check if the file is out of date\n if self.force or dep_util.newer_group([infile, \"setup.py\"], outfile, mode):\n # It is. Configure it\n self.configure_one_file(infile, outfile)\n\n def configure_one_file(self, infile, outfile):\n self.announce(\"configuring %s\" % infile, 3)\n if not self.dry_run:\n # Read the file\n with codecs.open(infile, \"r\", \"utf-8\") as fh:\n before = fh.read()\n # Substitute the variables\n # Create the output directory if necessary\n outdir = os.path.dirname(outfile)\n if not os.path.exists(outdir):\n os.makedirs(outdir)\n # Write it into build/\n with codecs.open(outfile, \"w\", \"utf-8\") as fh:\n fh.write(self.substitute_values(before, self.configure_values))\n # The last step is to copy the permission bits\n shutil.copymode(infile, outfile)\n\n def substitute_values(self, string, values):\n for name, val in list(values.items()):\n # print(\"replacing @@%s@@ with %s\" % (name, val))\n string = string.replace(\"@@%s@@\" % (name), val)\n return string\n\n\ndef has_configure_files(build):\n \"\"\"Check if the distribution has configuration files to work on.\"\"\"\n return bool(build.distribution.configure_files)\n\n\ndef has_man_pages(build):\n \"\"\"Check if the distribution has configuration files to work on.\"\"\"\n return bool(build.distribution.man_pages)\n\n\nbuild.sub_commands.extend(\n ((\"build_man\", has_man_pages), (\"build_cfg\", has_configure_files))\n)\n\n\n#####################################################################\n# # Modify Install Stage ############################################\n#####################################################################\n\n\nclass install(_install):\n \"\"\"Specialised python package installer.\n\n It does some required chown calls in addition to the usual stuff.\n \"\"\"\n\n def __init__(self, *args):\n _install.__init__(self, *args)\n\n def change_owner(self, path, owner):\n user = pwd.getpwnam(owner)\n try:\n self.announce(\"changing mode of %s\" % path, 3)\n if not self.dry_run:\n # os.walk does not include the toplevel directory\n os.lchown(path, user.pw_uid, -1)\n # Now walk the directory and change them all\n for root, dirs, files in os.walk(path):\n for dirname in dirs:\n os.lchown(os.path.join(root, dirname), user.pw_uid, -1)\n for filename in files:\n os.lchown(os.path.join(root, filename), user.pw_uid, -1)\n except OSError as e:\n # We only check for errno = 1 (EPERM) here because its kinda\n # expected when installing as a non root user.\n if e.errno == 1:\n self.warn(\"Could not change owner: You have insufficient permissions.\")\n else:\n raise e\n\n def run(self):\n # Run the usual stuff.\n _install.run(self)\n\n # If --root wasn't specified default to /usr/local\n if self.root is None:\n self.root = \"/usr/local\"\n\n\n#####################################################################\n# # Test Command #####################################################\n#####################################################################\n\n\nclass test_command(Command):\n user_options = []\n\n def initialize_options(self):\n pass\n\n def finalize_options(self):\n pass\n\n def run(self):\n import pytest\n\n cov = Coverage()\n cov.erase()\n cov.start()\n\n result = pytest.main()\n\n cov.stop()\n cov.save()\n cov.html_report(directory=\"covhtml\")\n sys.exit(int(bool(len(result.failures) > 0 or len(result.errors) > 0)))\n\n\n#####################################################################\n# # state command base class #########################################\n#####################################################################\n\n\nclass statebase(Command):\n\n user_options = [\n (\"statepath=\", None, \"directory to backup configuration\"),\n (\"root=\", None, \"install everything relative to this alternate root directory\"),\n ]\n\n def initialize_options(self):\n self.statepath = statepath\n self.root = None\n\n def finalize_options(self):\n pass\n\n def _copy(self, frm, to):\n if os.path.isdir(frm):\n to = os.path.join(to, os.path.basename(frm))\n self.announce(\"copying %s/ to %s/\" % (frm, to), 3)\n if not self.dry_run:\n if os.path.exists(to):\n shutil.rmtree(to)\n shutil.copytree(frm, to)\n else:\n self.announce(\n \"copying %s to %s\" % (frm, os.path.join(to, os.path.basename(frm))), 3\n )\n if not self.dry_run:\n shutil.copy2(frm, to)\n\n\n#####################################################################\n# # restorestate command #############################################\n#####################################################################\n\n\nclass restorestate(statebase):\n def _copy(self, frm, to):\n if self.root:\n to = self.root + to\n statebase._copy(self, frm, to)\n\n def run(self):\n self.announce(\"restoring the current configuration from %s\" % self.statepath, 3)\n if not os.path.exists(self.statepath):\n self.warn(\"%s does not exist. Skipping\" % self.statepath)\n return\n self._copy(os.path.join(self.statepath, \"collections\"), libpath)\n self._copy(os.path.join(self.statepath, \"cobbler.conf\"), webconfig)\n self._copy(os.path.join(self.statepath, \"settings.yaml\"), etcpath)\n self._copy(os.path.join(self.statepath, \"users.conf\"), etcpath)\n self._copy(os.path.join(self.statepath, \"users.digest\"), etcpath)\n self._copy(os.path.join(self.statepath, \"dhcp.template\"), etcpath)\n self._copy(os.path.join(self.statepath, \"dhcp6.template\"), etcpath)\n self._copy(os.path.join(self.statepath, \"rsync.template\"), etcpath)\n\n\n#####################################################################\n# # savestate command ################################################\n#####################################################################\n\n\nclass savestate(statebase):\n\n description = \"Backup the current configuration to /tmp/cobbler_settings.\"\n\n def _copy(self, frm, to):\n if self.root:\n frm = self.root + frm\n statebase._copy(self, frm, to)\n\n def run(self):\n self.announce(\"backing up the current configuration to %s\" % self.statepath, 3)\n if os.path.exists(self.statepath):\n self.announce(\"deleting existing %s\" % self.statepath, 3)\n if not self.dry_run:\n shutil.rmtree(self.statepath)\n if not self.dry_run:\n os.makedirs(self.statepath)\n self._copy(os.path.join(libpath, \"collections\"), self.statepath)\n self._copy(os.path.join(webconfig, \"cobbler.conf\"), self.statepath)\n self._copy(os.path.join(etcpath, \"settings.yaml\"), self.statepath)\n self._copy(os.path.join(etcpath, \"users.conf\"), self.statepath)\n self._copy(os.path.join(etcpath, \"users.digest\"), self.statepath)\n self._copy(os.path.join(etcpath, \"dhcp.template\"), self.statepath)\n self._copy(os.path.join(etcpath, \"dhcp6.template\"), self.statepath)\n self._copy(os.path.join(etcpath, \"rsync.template\"), self.statepath)\n\n\n#####################################################################\n# # Actual Setup.py Script ###########################################\n#####################################################################\n\n\nif __name__ == \"__main__\":\n setup(\n distclass=Distribution,\n cmdclass={\n \"build\": build,\n \"build_py\": build_py,\n \"test\": test_command,\n \"install\": install,\n \"savestate\": savestate,\n \"restorestate\": restorestate,\n \"build_cfg\": build_cfg,\n \"build_man\": build_man,\n },\n name=\"cobbler\",\n version=VERSION,\n description=\"Network Boot and Update Server\",\n long_description=read_readme_file(),\n long_description_content_type=\"text/markdown\",\n author=\"Team Cobbler\",\n author_email=\"[email protected]\",\n project_urls={\n \"Website\": \"https://cobbler.github.io\",\n \"Documentation (Users)\": \"https://cobbler.readthedocs.io/en/latest\",\n \"Documentation (Devs)\": \"https://github.com/cobbler/cobbler/wiki\",\n \"Source\": \"https://github.com/cobbler/cobbler\",\n \"Tracker\": \"https://github.com/cobbler/cobbler/issues\",\n },\n license=\"GPLv2+\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)\",\n \"Programming Language :: Python :: 3.6\",\n \"Topic :: System :: Installation/Setup\",\n \"Topic :: System :: Systems Administration\",\n \"Intended Audience :: System Administrators\",\n \"Natural Language :: English\",\n \"Operating System :: POSIX :: Linux\",\n ],\n keywords=[\"pxe\", \"autoinstallation\", \"dhcp\", \"tftp\", \"provisioning\"],\n install_requires=[\n \"requests\",\n \"pyyaml\",\n \"netaddr\",\n \"Cheetah3\",\n \"pymongo\",\n \"distro\",\n \"python-ldap\",\n \"dnspython\",\n \"file-magic\",\n \"schema\",\n \"gunicorn\",\n ],\n extras_require={\n \"lint\": [\"pyflakes\", \"pycodestyle\"],\n \"test\": [\"pytest\", \"pytest-cov\", \"codecov\", \"pytest-mock\"],\n },\n packages=find_packages(exclude=[\"*tests*\"]),\n scripts=[\n \"bin/cobbler\",\n \"bin/cobblerd\",\n \"bin/cobbler-ext-nodes\",\n \"bin/cobbler-settings\",\n ],\n configure_values={\n \"webroot\": os.path.normpath(webroot),\n \"tftproot\": os.path.normpath(tftproot),\n \"httpd_service\": httpd_service,\n \"bind_zonefiles\": bind_zonefiles,\n \"shim_folder\": shim_folder,\n \"shim_file\": shim_file,\n \"ipxe_folder\": ipxe_folder,\n \"memdisk_folder\": memdisk_folder,\n \"pxelinux_folder\": pxelinux_folder,\n \"syslinux_dir\": syslinux_dir,\n \"grub_mod_folder\": grub_mod_folder,\n },\n configure_files=[\n \"config/apache/cobbler.conf\",\n \"config/nginx/cobbler.conf\",\n \"config/cobbler/settings.yaml\",\n \"config/service/cobblerd.service\",\n \"templates/etc/named.template\",\n \"templates/etc/secondary.template\",\n ],\n man_pages=[\"docs/cobblerd.rst\", \"docs/cobbler-conf.rst\", \"docs/cobbler.rst\"],\n data_files=[\n (\"%s\" % webconfig, [\"build/config/apache/cobbler.conf\"]),\n (\"%s/templates\" % libpath, glob(\"autoinstall_templates/*\")),\n (\n \"%s/templates/install_profiles\" % libpath,\n glob(\"autoinstall_templates/install_profiles/*\"),\n ),\n (\"%s/snippets\" % libpath, glob(\"autoinstall_snippets/*\", recursive=True)),\n (\"%s/scripts\" % libpath, glob(\"autoinstall_scripts/*\")),\n (\"%s\" % libpath, [\"config/cobbler/distro_signatures.json\"]),\n (\"share/cobbler/bin\", glob(\"scripts/*\")),\n (\"%s/loaders\" % libpath, []),\n (\"%s/misc\" % libpath, glob(\"misc/*\")),\n # Configuration\n (f\"{etcpath}/apache\", [\"build/config/apache/cobbler.conf\"]),\n (f\"{etcpath}/nginx\", [\"build/config/nginx/cobbler.conf\"]),\n (\n \"%s\" % etcpath,\n [\n \"build/config/service/cobblerd.service\",\n \"build/config/cobbler/settings.yaml\",\n ],\n ),\n (\n \"%s\" % etcpath,\n [\n \"config/cobbler/auth.conf\",\n \"config/cobbler/users.conf\",\n \"config/cobbler/users.digest\",\n \"config/cheetah/cheetah_macros\",\n \"config/rotate/cobblerd_rotate\",\n \"config/rsync/import_rsync_whitelist\",\n \"config/rsync/rsync.exclude\",\n \"config/version\",\n ],\n ),\n (\"%s\" % etcpath, glob(\"cobbler/etc/*\")),\n (\n \"%s\" % etcpath,\n [\n \"templates/etc/named.template\",\n \"templates/etc/genders.template\",\n \"templates/etc/secondary.template\",\n \"templates/etc/zone.template\",\n \"templates/etc/dnsmasq.template\",\n \"templates/etc/rsync.template\",\n \"templates/etc/dhcp.template\",\n \"templates/etc/dhcp6.template\",\n \"templates/etc/ndjbdns.template\",\n ],\n ),\n (\"%s/iso\" % etcpath, glob(\"templates/iso/*\")),\n (\"%s/boot_loader_conf\" % etcpath, glob(\"templates/boot_loader_conf/*\")),\n # completion_file\n (\"%s\" % completion_path, [\"config/bash/completion/cobbler\"]),\n (\"%s/grub_config\" % libpath, glob(\"config/grub/*\")),\n # ToDo: Find a nice way to copy whole config/grub structure recursively\n # files\n (\"%s/grub_config/grub\" % libpath, glob(\"config/grub/grub/*\")),\n # dirs\n (\"%s/grub_config/grub/system\" % libpath, []),\n (\"%s/grub_config/grub/system_link\" % libpath, []),\n (\"%s/reporting\" % etcpath, glob(\"templates/reporting/*\")),\n # logfiles\n (\"%s/cobbler/kicklog\" % logpath, []),\n (\"%s/cobbler/syslog\" % logpath, []),\n (\"%s/httpd/cobbler\" % logpath, []),\n (\"%s/cobbler/anamon\" % logpath, []),\n (\"%s/cobbler/tasks\" % logpath, []),\n # zone-specific templates directory\n (\"%s/zone_templates\" % etcpath, glob(\"templates/zone_templates/*\")),\n # windows-specific templates directory\n (\"%s/windows\" % etcpath, glob(\"templates/windows/*\")),\n (\"%s\" % etcpath, [\"config/cobbler/logging_config.conf\"]),\n # man pages\n (\"%s/man1\" % docpath, glob(\"build/sphinx/man/*.1\")),\n (\"%s/man5\" % docpath, glob(\"build/sphinx/man/*.5\")),\n (\"%s/man8\" % docpath, glob(\"build/sphinx/man/*.8\")),\n (\"%s/tests\" % datadir, glob(\"tests/*.py\")),\n (\"%s/tests/cli\" % datadir, glob(\"tests/cli/*.py\")),\n (\"%s/tests/modules\" % datadir, glob(\"tests/modules/*.py\")),\n (\n \"%s/tests/modules/authentication\" % datadir,\n glob(\"tests/modules/authentication/*.py\"),\n ),\n (\"%s/tests/xmlrpcapi\" % datadir, glob(\"tests/xmlrpcapi/*.py\")),\n ],\n )\n", "path": "setup.py" } ]
diff --git a/Makefile b/Makefile index f26007007c..09fc79f393 100644 --- a/Makefile +++ b/Makefile @@ -148,15 +148,11 @@ rpms: release ## Runs the target release and then creates via rpmbuild the rpms -ba cobbler.spec # Only build a binary package -debs: release ## Runs the target release and then creates via debbuild the debs in a directory called deb-build. - mkdir -p deb-build - mkdir -p deb-build/{BUILD,BUILDROOT,DEBS,SDEBS,SOURCES} - cp dist/*.gz deb-build/ - debbuild --define "_topdir %(pwd)/deb-build" \ - --define "_builddir %{_topdir}" \ - --define "_specdir %{_topdir}" \ - --define "_sourcedir %{_topdir}" \ - -vv -bb cobbler.spec +debs: authors ## Creates native debs in a directory called deb-build. The release target is called during the build process. + @source distro_build_configs.sh; \ + debuild -us -uc + @mkdir -p deb-build; \ + cp ../cobbler_* deb-build/ eraseconfig: ## Deletes the cobbler data jsons which are created when using the file provider. -rm /var/lib/cobbler/cobbler_collections/distros/* diff --git a/cobbler.spec b/cobbler.spec index 4b8adc8cd7..b2fe60fdf9 100644 --- a/cobbler.spec +++ b/cobbler.spec @@ -13,14 +13,10 @@ # published by the Open Source Initiative. # # Supported/tested build targets: -# - Fedora: 30, 31, Rawhide -# - CentOS + EPEL: 7, 8 +# - Fedora: 34 +# - CentOS + EPEL: 8 # - SLE: 15sp1 -# - openSUSE: Leap 15.1, Tumbleweed -# - Debian: 10 -# - Ubuntu: 18.04 -# -# If it doesn't build on the Open Build Service (OBS) it's a bug. +# - openSUSE: Leap 15.4, Tumbleweed # # Force bash instead of Debian dash @@ -87,27 +83,6 @@ # endif SUSE %endif -# UBUNTU -%if 0%{?debian} || 0%{?ubuntu} -%define apache_user www-data -%define apache_group www-data - -%define apache_webconfigdir /etc/apache2/conf-available - -%define tftpsrv_pkg tftpd-hpa -%define createrepo_pkg createrepo -%define grub2_x64_efi_pkg grub-efi-amd64 -%define grub2_ia32_efi_pkg grub-efi-ia32 -%define system_release_pkg base-files - -# Debian 11 moved to the C implementation of createrepo -%if 0%{?debian} == 11 -%define createrepo_pkg createrepo-c -%endif - -#endif UBUNTU -%endif - #FEDORA %if 0%{?fedora} || 0%{?rhel} %define apache_user apache @@ -141,22 +116,12 @@ # To ensure correct byte compilation %global __python %{__python3} -%if "%{_vendor}" == "debbuild" -%global devsuffix dev -%else -%global devsuffix devel -%endif - Name: cobbler Version: 3.4.0 Release: 1%{?dist} Summary: Boot server configurator URL: https://cobbler.github.io/ -%if "%{_vendor}" == "debbuild" -Packager: Cobbler Developers <[email protected]> -Group: admin -%endif %if 0%{?suse_version} Group: Productivity/Networking/Boot/Servers %else @@ -169,14 +134,9 @@ BuildArch: noarch BuildRequires: git-core BuildRequires: %{system_release_pkg} -BuildRequires: python%{python3_pkgversion}-%{devsuffix} +BuildRequires: python%{python3_pkgversion}-devel %if 0%{?suse_version} BuildRequires: python-rpm-macros -%endif -%if "%{_vendor}" == "debbuild" -BuildRequires: python3-deb-macros -BuildRequires: apache2-deb-macros - %endif BuildRequires: %{py3_module_coverage} BuildRequires: python%{python3_pkgversion}-distro @@ -202,12 +162,6 @@ BuildRequires: systemd %if 0%{?fedora} >= 30 || 0%{?rhel} >= 9 || 0%{?suse_version} BuildRequires: systemd-rpm-macros %endif -%if "%{_vendor}" == "debbuild" -BuildRequires: systemd-deb-macros -Requires: systemd-sysv -Requires(post): python3-minimal -Requires(preun): python3-minimal -%endif Requires(post): systemd Requires(preun): systemd Requires(postun): systemd @@ -253,11 +207,7 @@ Recommends: logrotate Recommends: python%{python3_pkgversion}-librepo %endif # https://github.com/cobbler/cobbler/issues/1685 -%if "%{_vendor}" == "debbuild" -Requires: init-system-helpers -%else Requires: /sbin/service -%endif # No point in having this split out... Obsoletes: cobbler-nsupdate < 3.0.99 Provides: cobbler-nsupdate = %{version}-%{release} @@ -279,11 +229,6 @@ Unit test files from the Cobbler project %prep %setup -%if 0%{?suse_version} -# Set tftpboot location correctly for SUSE distributions -sed -e "s|/var/lib/tftpboot|%{tftpboot_dir}|g" -i config/cobbler/settings.yaml -%endif - %build . distro_build_configs.sh @@ -323,11 +268,7 @@ ln -sf service %{buildroot}%{_sbindir}/rccobblerd %pre -%if "%{_vendor}" == "debbuild" -if [ "$1" = "upgrade" ]; then -%else if [ $1 -ge 2 ]; then -%endif # package upgrade: backup configuration DATE=$(date "+%%Y%%m%%d-%%H%%M%%S") if [ ! -d "%{_sharedstatedir}/cobbler/backup/upgrade-${DATE}" ]; then @@ -343,25 +284,6 @@ if [ $1 -ge 2 ]; then fi fi -%if "%{_vendor}" == "debbuild" -%post -%{py3_bytecompile_post %{name}} -%{systemd_post cobblerd.service} -%{apache2_module_post proxy_http} -# Fixup permission for world readable settings files -chmod 640 %{_sysconfdir}/cobbler/settings.yaml -chmod 640 %{_sysconfdir}/cobbler/users.conf -chmod 640 %{_sysconfdir}/cobbler/users.digest -chgrp %{apache_group} %{_sysconfdir}/cobbler/settings.yaml - -%preun -%{py3_bytecompile_preun %{name}} -%{systemd_preun cobblerd.service} - -%postun -%{systemd_postun_with_restart cobblerd.service} - -%else %post %systemd_post cobblerd.service # Fixup permission for world readable settings files @@ -375,7 +297,6 @@ chgrp %{apache_group} %{_sysconfdir}/cobbler/settings.yaml %postun %systemd_postun_with_restart cobblerd.service -%endif %files %license COPYING @@ -410,17 +331,9 @@ chgrp %{apache_group} %{_sysconfdir}/cobbler/settings.yaml %config(noreplace) %{_sysconfdir}/cobbler/rsync.exclude %config(noreplace) %{_sysconfdir}/cobbler/rsync.template %config(noreplace) %{_sysconfdir}/cobbler/secondary.template -%if "%{_vendor}" == "debbuild" -# Work around broken attr support -# Cf. https://github.com/debbuild/debbuild/issues/160 -%attr(640, root, root) %config(noreplace) %{_sysconfdir}/cobbler/settings.yaml -%attr(640, root, root) %config(noreplace) %{_sysconfdir}/cobbler/users.conf -%attr(640, root, root) %config(noreplace) %{_sysconfdir}/cobbler/users.digest -%else %attr(640, root, %{apache_group}) %config(noreplace) %{_sysconfdir}/cobbler/settings.yaml %attr(640, root, root) %config(noreplace) %{_sysconfdir}/cobbler/users.conf %attr(640, root, root) %config(noreplace) %{_sysconfdir}/cobbler/users.digest -%endif %config(noreplace) %{_sysconfdir}/cobbler/version %config(noreplace) %{_sysconfdir}/cobbler/zone.template %dir %{_sysconfdir}/cobbler/zone_templates diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000000..d8b5bc19b5 --- /dev/null +++ b/debian/changelog @@ -0,0 +1,73 @@ +cobbler (3.4.0) unstable; urgency=low + + * Main Upgrade to Cobbler 3.4.0 + + -- Enno Gotthold <[email protected]> Wed, 21 Sep 2022 09:30:00 +0200 + +cobbler (2.8.2) unstable; urgency=low + + * Maintenance release in the Cobbler 2.8 series + + -- Jeremy Brown <[email protected]> Wed, 21 Sep 2017 09:30:00 -0700 + +cobbler (2.6.7-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Wed, 31 Dec 2014 00:19:50 +0200 + +cobbler (2.6.6-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Sun, 19 Oct 2014 00:19:50 +0200 + +cobbler (2.6.5-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Fri, 15 Aug 2014 00:19:50 +0200 + +cobbler (2.6.4-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Fri, 08 Aug 2014 00:19:50 +0200 + +cobbler (2.6.3-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Fri, 18 Jul 2014 00:19:50 +0200 + +cobbler (2.6.2-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Thu, 15 Jul 2014 00:19:50 +0200 + +cobbler (2.6.1-1) unstable; urgency=low + + * Maintenance release in the Cobbler 2.6 series + + -- Jorgen Maas <[email protected]> Thu, 22 May 2014 00:19:50 +0200 + +cobbler (2.6.0-1.2) unstable; urgency=low + + * Set proper paths in config files + + -- Adrian Brzezinski <[email protected]> Wed, 23 Apr 2014 00:19:50 +0200 + +cobbler (2.6.0-1.1) unstable; urgency=low + + * Fixed virt-install version check - support for non rpm based distributions + + -- Adrian Brzezinski <[email protected]> Fri, 18 Apr 2014 19:09:14 +0200 + +cobbler (2.6.0-1) unstable; urgency=low + + * Initial release. Orginal package splitted into two: cobbler and koan. + + -- Adrian Brzezinski <[email protected]> Fri, 18 Apr 2014 18:24:22 +0200 + + diff --git a/debian/cobbler.docs b/debian/cobbler.docs new file mode 100644 index 0000000000..42061c01a1 --- /dev/null +++ b/debian/cobbler.docs @@ -0,0 +1 @@ +README.md \ No newline at end of file diff --git a/debian/cobbler.postinst b/debian/cobbler.postinst new file mode 100644 index 0000000000..b5ced89fc2 --- /dev/null +++ b/debian/cobbler.postinst @@ -0,0 +1,9 @@ +#!/bin/sh + +# This directory is required because per default we log to it. +mkdir -p "/var/log/cobbler" + +# dh_installdeb will replace this with shell code automatically +# generated by other debhelper scripts. + +#DEBHELPER# diff --git a/debian/compat b/debian/compat new file mode 100644 index 0000000000..3cacc0b93c --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +12 \ No newline at end of file diff --git a/debian/control b/debian/control new file mode 100644 index 0000000000..f55209516a --- /dev/null +++ b/debian/control @@ -0,0 +1,45 @@ +Source: cobbler +Section: admin +Priority: optional +Maintainer: The Cobbler Authors <[email protected]> +Build-Depends: + git-core, + python3 (>=3.6), + python3-distro, + python3-setuptools, + python3-netaddr, + python3-requests, + python3-schema, + python3-cheetah, + python3-dns, + python3-sphinx, + dh-python, + debhelper (>=12) +Standards-Version: 4.5.1 +Version: 3.4.0 +Homepage: https://cobbler.github.io/ + +Package: cobbler +Architecture: all +Depends: + systemd, + apache2 | httpd, + tftpd-hpa | atftpd, + fence-agents, + rsync, + xorriso, + python3, + ${python3:Depends}, + ${misc:Depends} +Suggests: + createrepo-c, + createrepo, + logrotate, + python3-librepo +Description: Install server + Cobbler is a PXE based network install server. + Cobbler's advanced features include importing distributions + from DVDs and rsync mirrors, automatic installation templating, + integrated yum mirroring, and built-in DHCP/DNS Management. + Cobbler has a Python and XMLRPC API for integration with other + applications. There is also a web interface available. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000000..84bc587ab4 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,10 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: cobbler +Upstream-Contact: Enno Gotthold <[email protected]> +Source: https://github.com/cobbler/cobbler + +Files: * +Copyright: 2006-2022 The Cobbler Team +License: GPL-2+ + +# TODO: Add individual copyright of all files having explicit spdx headers diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000000..656c6e8cde --- /dev/null +++ b/debian/rules @@ -0,0 +1,27 @@ +#!/usr/bin/make -f +# -*- makefile -*- + +export DH_OPTIONS + +# Verbose mode +#export DH_VERBOSE=1 +export PYBUILD_NAME=cobbler +export PYBUILD_OPTION_INTERPRETER="-B" + +# Use Bash so we can set the required environment variables +SHELL = /bin/bash + +%: + dh $@ --with python3 --buildsystem pybuild + +override_dh_auto_build: + @source ./distro_build_configs.sh; \ + dh_auto_build + +override_dh_auto_clean: + dh_auto_clean + rm -rf docs/_build + +override_dh_auto_install: + @source ./distro_build_configs.sh; \ + dh_auto_install diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000000..89ae9db8f8 --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (native) diff --git a/distro_build_configs.sh b/distro_build_configs.sh index 6c51bf11b2..60c30fc0ad 100644 --- a/distro_build_configs.sh +++ b/distro_build_configs.sh @@ -60,6 +60,8 @@ elif [ "$DISTRO" = "UBUNTU" ];then export MEMDISK_FOLDER="/usr/lib/syslinux/" export SYSLINUX_DIR="/usr/lib/syslinux/modules/bios/" export GRUB_MOD_FOLDER="/usr/lib/grub" + # This is required so that no byte code for Python is generated + export PYTHONDONTWRITEBYTECODE=1 elif [ "$DISTRO" = "FEDORA" ];then export APACHE_USER="apache" export HTTP_USER=$APACHE_USER # overrule setup.py diff --git a/docker/debs/Debian_10/Debian10.dockerfile b/docker/debs/Debian_10/Debian10.dockerfile index 511ac90f5e..935e87b418 100644 --- a/docker/debs/Debian_10/Debian10.dockerfile +++ b/docker/debs/Debian_10/Debian10.dockerfile @@ -12,13 +12,13 @@ ENV OSCODENAME buster # Add repo for debbuild and install all packages required # hadolint ignore=DL3008,DL3015,DL4006 RUN apt-get update -qq && \ - apt-get install -qqy gnupg curl && \ - /bin/sh -c "echo 'deb http://download.opensuse.org/repositories/Debian:/debbuild/Debian_10/ /' > /etc/apt/sources.list.d/debbuild.list" && \ - curl -sL http://download.opensuse.org/repositories/Debian:/debbuild/Debian_10/Release.key | apt-key add - && \ - apt-get update -qq && \ apt-get install -qqy \ - debbuild \ - debbuild-macros \ + build-essential \ + devscripts \ + dh-python \ + debhelper \ + gnupg \ + curl \ wget \ pycodestyle \ pyflakes3 \ @@ -36,6 +36,7 @@ RUN apt-get update -qq && \ python3-netaddr \ python3-pip \ python3-pycodestyle \ + python3-pymongo \ python3-pytest \ python3-setuptools \ python3-sphinx \ diff --git a/docker/debs/Debian_11/Debian11.dockerfile b/docker/debs/Debian_11/Debian11.dockerfile index 662dc388c3..6bd42dc1da 100644 --- a/docker/debs/Debian_11/Debian11.dockerfile +++ b/docker/debs/Debian_11/Debian11.dockerfile @@ -1,6 +1,6 @@ # vim: ft=dockerfile -FROM debian:11 +FROM docker.io/library/debian:11 ENV DEBIAN_FRONTEND noninteractive @@ -12,13 +12,13 @@ ENV OSCODENAME bullseye # Add repo for debbuild and install all packages required # hadolint ignore=DL3008,DL3015,DL4006 RUN apt-get update -qq && \ - apt-get install -qqy gnupg curl && \ - /bin/sh -c "echo 'deb http://download.opensuse.org/repositories/Debian:/debbuild/Debian_11/ /' > /etc/apt/sources.list.d/debbuild.list" && \ - curl -sL http://download.opensuse.org/repositories/Debian:/debbuild/Debian_11/Release.key | apt-key add - && \ - apt-get update -qq && \ apt-get install -qqy \ - debbuild \ - debbuild-macros \ + build-essential \ + devscripts \ + dh-python \ + debhelper \ + gnupg \ + curl \ wget \ pycodestyle \ pyflakes3 \ @@ -36,6 +36,7 @@ RUN apt-get update -qq && \ python3-netaddr \ python3-pip \ python3-pycodestyle \ + python3-pymongo \ python3-pytest \ python3-setuptools \ python3-simplejson \ diff --git a/docker/debs/build-and-install-debs.sh b/docker/debs/build-and-install-debs.sh index 3350b9636c..ad4dabb745 100755 --- a/docker/debs/build-and-install-debs.sh +++ b/docker/debs/build-and-install-debs.sh @@ -3,6 +3,7 @@ set -euo pipefail +SKIP_BUILD=true RUN_TESTS=false RUN_SYSTEM_TESTS=false EXECUTOR=docker @@ -22,6 +23,11 @@ if [ "${1}" == "--with-podman" ]; then shift fi +if [ "${1}" == "--skip-build" ]; then + SKIP_BUILD=false + shift +fi + TAG=$1 DOCKERFILE=$2 @@ -29,19 +35,27 @@ IMAGE=cobbler:$TAG # Build container echo "==> Build container ..." -$EXECUTOR build -t "$IMAGE" -f "$DOCKERFILE" . +if [[ "$EXECUTOR" == "podman" ]] +then + podman build --format docker -t "$IMAGE" -f "$DOCKERFILE" . +else + docker build -t "$IMAGE" -f "$DOCKERFILE" . +fi -# Build DEBs -echo "==> Build packages ..." -mkdir -p deb-build tmp -$EXECUTOR run -ti -v "$PWD/deb-build:/usr/src/cobbler/deb-build" -v "$PWD/tmp:/var/tmp" "$IMAGE" +if $SKIP_BUILD +then + # Build DEBs + echo "==> Build packages ..." + mkdir -p deb-build + $EXECUTOR run -ti -v "$PWD/deb-build:/usr/src/cobbler/deb-build" "$IMAGE" +fi # Launch container and install cobbler echo "==> Start container ..." $EXECUTOR run --cap-add=NET_ADMIN -t -d --name cobbler -v "$PWD/deb-build:/usr/src/cobbler/deb-build" "$IMAGE" /bin/bash echo "==> Install fresh packages ..." -$EXECUTOR exec -it cobbler bash -c 'dpkg -i deb-build/DEBS/all/cobbler*.deb' +$EXECUTOR exec -it cobbler bash -c 'dpkg -i deb-build/cobbler*.deb' echo "==> Restart Apache and Cobbler daemon ..." $EXECUTOR exec -it cobbler bash -c 'a2enmod proxy && a2enmod proxy_http' @@ -56,8 +70,8 @@ $EXECUTOR exec -it cobbler bash -c 'mkdir /var/www/cobbler' echo "==> Start Supervisor" $EXECUTOR exec -it cobbler bash -c 'supervisord -c /etc/supervisor/supervisord.conf' -echo "==> Wait 20 sec. and show Cobbler version ..." -$EXECUTOR exec -it cobbler bash -c 'sleep 20 && cobbler --version' +echo "==> Wait 10 sec. and show Cobbler version ..." +$EXECUTOR exec -it cobbler bash -c 'sleep 10 && cobbler --version' if $RUN_TESTS then @@ -84,4 +98,3 @@ echo "==> Stop Cobbler container ..." $EXECUTOR stop cobbler echo "==> Delete Cobbler container ..." $EXECUTOR rm cobbler -rm -rf ./tmp diff --git a/docker/debs/install-debs.sh b/docker/debs/install-debs.sh deleted file mode 100755 index 05dcf1fd7d..0000000000 --- a/docker/debs/install-debs.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Utility script to run Docker container without building the DEBs, -# just install them. So make sure they are in deb-build dir! - -if [ "$1" == "--with-tests" ] -then - RUN_TESTS=true - shift -else - RUN_TESTS=false -fi - -TAG=$1 -IMAGE=cobbler:$TAG - -# Launch container and install cobbler -docker run -d --privileged -v /sys/fs/cgroup:/sys/fs/cgroup:ro --name cobbler -v "$PWD/deb-build:/usr/src/cobbler/deb-build" "$IMAGE" /lib/systemd/systemd --system - -docker exec -it cobbler bash -c 'dpkg -i deb-build/DEBS/all/cobbler*.deb' -docker exec -it cobbler bash -c 'a2enmod proxy proxy_http && a2enconf cobbler' -docker exec -it cobbler bash -c 'systemctl daemon-reload && systemctl restart apache2 cobblerd' -docker exec -it cobbler bash -c 'sleep 3 && cobbler --version' - -if $RUN_TESTS -then - # Most of these requirement are already satisfied in the Dockerfiles! - docker exec -it cobbler bash -c 'pip3 install coverage distro future setuptools sphinx requests future' - docker exec -it cobbler bash -c 'pip3 install pyyaml netaddr Cheetah3 pymongo distro librepo' - docker exec -it cobbler bash -c 'pip3 install dnspython pyflakes pycodestyle pytest pytest-cov codecov' - docker exec -it cobbler bash -c 'pytest-3' -fi - -# Entering the running container -docker exec -ti cobbler bash diff --git a/docker/rpms/Fedora_34/Fedora34.dockerfile b/docker/rpms/Fedora_34/Fedora34.dockerfile index ac131baf64..f6a3e4a607 100644 --- a/docker/rpms/Fedora_34/Fedora34.dockerfile +++ b/docker/rpms/Fedora_34/Fedora34.dockerfile @@ -38,6 +38,7 @@ RUN yum install -y \ python3-ldap \ python3-librepo \ python3-pymongo \ + python3-gunicorn \ python3-schema \ createrepo_c \ dnf-plugins-core \ diff --git a/docker/rpms/build-and-install-rpms.sh b/docker/rpms/build-and-install-rpms.sh index ec6b702ec9..d9ae77cd40 100755 --- a/docker/rpms/build-and-install-rpms.sh +++ b/docker/rpms/build-and-install-rpms.sh @@ -3,6 +3,7 @@ set -eo pipefail +SKIP_BUILD=true RUN_TESTS=false RUN_SYSTEM_TESTS=false EXECUTOR=docker @@ -22,6 +23,10 @@ if [ "${1}" == "--with-podman" ]; then shift fi +if [ "${1}" == "--skip-build" ]; then + SKIP_BUILD=false + shift +fi TAG=$1 DOCKERFILE=$2 @@ -30,12 +35,20 @@ IMAGE=cobbler:$TAG # Build container echo "==> Build container ..." -$EXECUTOR build -t "$IMAGE" -f "$DOCKERFILE" . +if [[ "$EXECUTOR" == "podman" ]] +then + podman build --format docker -t "$IMAGE" -f "$DOCKERFILE" . +else + docker build -t "$IMAGE" -f "$DOCKERFILE" . +fi -# Build RPMs -echo "==> Build RPMs ..." -mkdir -p rpm-build -$EXECUTOR run -t -v "$PWD/rpm-build:/usr/src/cobbler/rpm-build" "$IMAGE" +if $SKIP_BUILD +then + # Build RPMs + echo "==> Build RPMs ..." + mkdir -p rpm-build + $EXECUTOR run -t -v "$PWD/rpm-build:/usr/src/cobbler/rpm-build" "$IMAGE" +fi # Launch container and install cobbler echo "==> Start container ..." diff --git a/docker/rpms/install-rpms.sh b/docker/rpms/install-rpms.sh deleted file mode 100755 index de8a0db66a..0000000000 --- a/docker/rpms/install-rpms.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# Utility script to run Docker container without building the RPMs, -# just install them. So make sure they are in rpm-build dir! - -if [ "$1" == "--with-tests" ] -then - RUN_TESTS=true - shift -else - RUN_TESTS=false -fi - -TAG=$1 -IMAGE=cobbler:$TAG - -# Launch container and install cobbler -echo "==> Start privileged container with systemd ..." -docker run -d --privileged -v /sys/fs/cgroup:/sys/fs/cgroup:ro --name cobbler -v "$PWD/rpm-build:/usr/src/cobbler/rpm-build" "$IMAGE" /usr/lib/systemd/systemd --system -echo "==> Install fresh RPMs ..." -docker exec -it cobbler bash -c 'rpm -Uvh rpm-build/cobbler-*.noarch.rpm' - -echo "==> Wait 3 sec. and show Cobbler version ..." -docker exec -it cobbler bash -c 'sleep 3 && cobbler version' - -if $RUN_TESTS -then - echo "==> Running tests ..." - docker exec -it cobbler bash -c 'pip3 install coverage distro future setuptools sphinx requests future' - docker exec -it cobbler bash -c 'pip3 install pyyaml netaddr Cheetah3 pymongo distro' - docker exec -it cobbler bash -c 'pip3 install dnspython pyflakes pycodestyle pytest pytest-cov codecov' - docker exec -it cobbler bash -c 'pytest' -fi - -# Clean up -echo "==> Stop Cobbler container ..." -docker stop cobbler -echo "==> Delete Cobbler container ..." -docker rm cobbler diff --git a/misc/anamon b/misc/anamon old mode 100644 new mode 100755 index 411abc4f1d..240030d6ec --- a/misc/anamon +++ b/misc/anamon @@ -30,6 +30,7 @@ import time import re import base64 import shlex + try: from xmlrpc.client import Server except ImportError: @@ -41,6 +42,7 @@ except ImportError: if not hasattr(shlex, "split"): shlex.split = lambda s: s.split(" ") + class WatchedFile: def __init__(self, fn, alias): self.fn = fn @@ -50,18 +52,18 @@ class WatchedFile: def reset(self): self.where = 0 self.last_size = 0 - self.lfrag='' - self.re_list={} - self.seen_line={} + self.lfrag = "" + self.re_list = {} + self.seen_line = {} def exists(self): return os.access(self.fn, os.F_OK) - def lookfor(self,pattern): - self.re_list[pattern] = re.compile(pattern,re.MULTILINE) + def lookfor(self, pattern): + self.re_list[pattern] = re.compile(pattern, re.MULTILINE) self.seen_line[pattern] = 0 - def seen(self,pattern): + def seen(self, pattern): if pattern in self.seen_line: return self.seen_line[pattern] else: @@ -77,7 +79,7 @@ class WatchedFile: else: return 0 - def uploadWrapper(self, blocksize = 262144): + def uploadWrapper(self, blocksize=262144): """upload a file in chunks using the uploadFile call""" retries = 3 fo = open(self.fn, "rb") @@ -97,7 +99,10 @@ class WatchedFile: del contents tries = 0 while tries <= retries: - debug("upload_log_data('%s', '%s', %s, %s, ...)\n" % (name, self.alias, sz, offset)) + debug( + "upload_log_data('%s', '%s', %s, %s, ...)\n" + % (name, self.alias, sz, offset) + ) if session.upload_log_data(name, self.alias, sz, offset, data.decode()): break else: @@ -117,20 +122,20 @@ class WatchedFile: except: raise -class MountWatcher: - def __init__(self,mp): +class MountWatcher: + def __init__(self, mp): self.mountpoint = mp self.zero() def zero(self): - self.line='' + self.line = "" self.time = time.time() def update(self): found = 0 - if os.path.exists('/proc/mounts'): - fd = open('/proc/mounts') + if os.path.exists("/proc/mounts"): + fd = open("/proc/mounts") while 1: line = fd.readline() if not line: @@ -153,6 +158,7 @@ class MountWatcher: else: return 0 + def anamon_loop(): alog = WatchedFile("/tmp/anaconda.log", "anaconda.log") alog.lookfor("step installpackages$") @@ -181,8 +187,12 @@ def anamon_loop(): # Monitor for bootloader configuration changes bootloader_cfgs = list() - bootloader_cfgs.append(WatchedFile("/mnt/sysimage/boot/grub/grub.conf", "grub.conf")) - bootloader_cfgs.append(WatchedFile("/mnt/sysimage/boot/efi/efi/redhat/elilo.conf", "elilo.conf")) + bootloader_cfgs.append( + WatchedFile("/mnt/sysimage/boot/grub/grub.conf", "grub.conf") + ) + bootloader_cfgs.append( + WatchedFile("/mnt/sysimage/boot/efi/efi/redhat/elilo.conf", "elilo.conf") + ) bootloader_cfgs.append(WatchedFile("/mnt/sysimage/etc/zipl.conf", "zipl.conf")) # Were we asked to watch specific files? @@ -200,7 +210,20 @@ def anamon_loop(): # Use the default watchlist and waitlist else: - watchlist = [alog, slog, dump, scrlog, mod, llog, kcfg, storage_log, prgm_log, vnc_log, xlog, kspre] + watchlist = [ + alog, + slog, + dump, + scrlog, + mod, + llog, + kcfg, + storage_log, + prgm_log, + vnc_log, + xlog, + kspre, + ] waitlist.extend(package_logs) waitlist.extend(bootloader_cfgs) @@ -211,7 +234,9 @@ def anamon_loop(): # Not all log files are available at the start, we'll loop through the # waitlist to determine when each file can be added to the watchlist for watch in waitlist: - if alog.seen("step installpackages$") or (sysimage.stable() and watch.exists()): + if alog.seen("step installpackages$") or ( + sysimage.stable() and watch.exists() + ): debug("Adding %s to watch list\n" % watch.alias) watchlist.append(watch) waitlist.remove(watch) @@ -224,12 +249,13 @@ def anamon_loop(): if exit: break + # Establish some defaults name = "" server = "" port = "80" daemon = 1 -debug = lambda x,**y: None +debug = lambda x, **y: None watchfiles = [] exit = False @@ -237,25 +263,25 @@ exit = False n = 0 while n < len(sys.argv): arg = sys.argv[n] - if arg == '--name': - n = n+1 + if arg == "--name": + n = n + 1 name = sys.argv[n] - elif arg == '--watchfile': - n = n+1 + elif arg == "--watchfile": + n = n + 1 watchfiles.extend(shlex.split(sys.argv[n])) - elif arg == '--exit': + elif arg == "--exit": exit = True - elif arg == '--server': - n = n+1 + elif arg == "--server": + n = n + 1 server = sys.argv[n] - elif arg == '--port': - n = n+1 + elif arg == "--port": + n = n + 1 port = sys.argv[n] - elif arg == '--debug': - debug = lambda x,**y: sys.stderr.write(x % y) - elif arg == '--fg': + elif arg == "--debug": + debug = lambda x, **y: sys.stderr.write(x % y) + elif arg == "--fg": daemon = 0 - n = n+1 + n = n + 1 # Create an xmlrpc session handle session = Server("http://%s:%s/cobbler_api" % (server, port)) @@ -265,9 +291,9 @@ if daemon: if not os.fork(): # Redirect the standard I/O file descriptors to the specified file. DEVNULL = getattr(os, "devnull", "/dev/null") - os.open(DEVNULL, os.O_RDWR) # standard input (0) - os.dup2(0, 1) # Duplicate standard input to standard output (1) - os.dup2(0, 2) # Duplicate standard input to standard error (2) + os.open(DEVNULL, os.O_RDWR) # standard input (0) + os.dup2(0, 1) # Duplicate standard input to standard output (1) + os.dup2(0, 2) # Duplicate standard input to standard error (2) anamon_loop() sys.exit(1) diff --git a/setup.py b/setup.py index a1d947a43f..6dd5a861f9 100644 --- a/setup.py +++ b/setup.py @@ -548,6 +548,7 @@ def run(self): "dnspython", "file-magic", "schema", + "gunicorn", ], extras_require={ "lint": ["pyflakes", "pycodestyle"],
comic__grand-challenge.org-2049
Incorrect values in Archive Item List The Archive Item list view displays the correct archive items, but for some reason, the `archive_item.values` are duplicated.
[ { "content": "from celery import chain, chord, group\nfrom django.contrib.auth.mixins import PermissionRequiredMixin\nfrom django.contrib.messages.views import SuccessMessageMixin\nfrom django.core.exceptions import (\n NON_FIELD_ERRORS,\n PermissionDenied,\n ValidationError,\n)\nfrom django.core.files import File\nfrom django.db.transaction import on_commit\nfrom django.forms.utils import ErrorList\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import get_object_or_404\nfrom django.utils.functional import cached_property\nfrom django.utils.html import format_html\nfrom django.utils.text import get_valid_filename\nfrom django.utils.timezone import now\nfrom django.views.generic import (\n CreateView,\n DetailView,\n FormView,\n ListView,\n UpdateView,\n)\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom guardian.mixins import (\n LoginRequiredMixin,\n PermissionListMixin,\n PermissionRequiredMixin as ObjectPermissionRequiredMixin,\n)\nfrom rest_framework.permissions import DjangoObjectPermissions\nfrom rest_framework.settings import api_settings\nfrom rest_framework.viewsets import ReadOnlyModelViewSet\nfrom rest_framework_guardian.filters import ObjectPermissionsFilter\n\nfrom grandchallenge.algorithms.tasks import (\n add_images_to_component_interface_value,\n)\nfrom grandchallenge.archives.filters import ArchiveFilter\nfrom grandchallenge.archives.forms import (\n AddCasesForm,\n ArchiveCasesToReaderStudyForm,\n ArchiveForm,\n ArchiveItemForm,\n ArchivePermissionRequestUpdateForm,\n UploadersForm,\n UsersForm,\n)\nfrom grandchallenge.archives.models import (\n Archive,\n ArchiveItem,\n ArchivePermissionRequest,\n)\nfrom grandchallenge.archives.serializers import ArchiveSerializer\nfrom grandchallenge.archives.tasks import (\n add_images_to_archive,\n update_archive_item_values,\n)\nfrom grandchallenge.cases.models import (\n Image,\n RawImageFile,\n RawImageUploadSession,\n)\nfrom grandchallenge.cases.tasks import build_images\nfrom grandchallenge.components.models import (\n ComponentInterface,\n ComponentInterfaceValue,\n InterfaceKind,\n)\nfrom grandchallenge.core.filters import FilterMixin\nfrom grandchallenge.core.forms import UserFormKwargsMixin\nfrom grandchallenge.core.renderers import PaginatedCSVRenderer\nfrom grandchallenge.core.templatetags.random_encode import random_encode\nfrom grandchallenge.core.views import PermissionRequestUpdate\nfrom grandchallenge.datatables.views import Column, PaginatedTableListView\nfrom grandchallenge.groups.forms import EditorsForm\nfrom grandchallenge.groups.views import UserGroupUpdateMixin\nfrom grandchallenge.reader_studies.models import ReaderStudy\nfrom grandchallenge.subdomains.utils import reverse\n\n\nclass ArchiveList(FilterMixin, PermissionListMixin, ListView):\n model = Archive\n permission_required = (\n f\"{model._meta.app_label}.view_{model._meta.model_name}\"\n )\n ordering = \"-created\"\n filter_class = ArchiveFilter\n paginate_by = 40\n\n def get_context_data(self, *args, **kwargs):\n context = super().get_context_data(*args, **kwargs)\n\n context.update(\n {\n \"jumbotron_title\": \"Archives\",\n \"jumbotron_description\": format_html(\n (\n \"An archive can be used to collect set of medical \"\n \"images, which can later be used in a reader study, \"\n \"challenge or algorithm. Please <a href='{}'>contact \"\n \"us</a> if you would like to set up your own archive.\"\n ),\n random_encode(\"mailto:[email protected]\"),\n ),\n }\n )\n\n return context\n\n\nclass ArchiveCreate(\n PermissionRequiredMixin, UserFormKwargsMixin, CreateView,\n):\n model = Archive\n form_class = ArchiveForm\n permission_required = (\n f\"{model._meta.app_label}.add_{model._meta.model_name}\"\n )\n\n def form_valid(self, form):\n response = super().form_valid(form=form)\n self.object.add_editor(self.request.user)\n return response\n\n\nclass ArchiveDetail(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView\n):\n model = Archive\n permission_required = (\n f\"{model._meta.app_label}.use_{model._meta.model_name}\"\n )\n raise_exception = True\n\n def on_permission_check_fail(self, request, response, obj=None):\n response = self.get(request)\n return response\n\n def check_permissions(self, request):\n try:\n return super().check_permissions(request)\n except PermissionDenied:\n return HttpResponseRedirect(\n reverse(\n \"archives:permission-request-create\",\n kwargs={\"slug\": self.object.slug},\n )\n )\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n user_remove_form = UsersForm()\n user_remove_form.fields[\"action\"].initial = UsersForm.REMOVE\n\n uploader_remove_form = UploadersForm()\n uploader_remove_form.fields[\"action\"].initial = UploadersForm.REMOVE\n\n editor_remove_form = EditorsForm()\n editor_remove_form.fields[\"action\"].initial = EditorsForm.REMOVE\n\n limit = 1000\n\n context.update(\n {\n \"user_remove_form\": user_remove_form,\n \"uploader_remove_form\": uploader_remove_form,\n \"editor_remove_form\": editor_remove_form,\n \"now\": now().isoformat(),\n \"limit\": limit,\n \"offsets\": range(\n 0,\n Image.objects.filter(\n componentinterfacevalue__archive_items__archive=context[\n \"object\"\n ]\n ).count(),\n limit,\n ),\n }\n )\n\n pending_permission_requests = ArchivePermissionRequest.objects.filter(\n archive=context[\"object\"], status=ArchivePermissionRequest.PENDING,\n ).count()\n context.update(\n {\"pending_permission_requests\": pending_permission_requests}\n )\n\n return context\n\n\nclass ArchiveUpdate(\n LoginRequiredMixin,\n UserFormKwargsMixin,\n ObjectPermissionRequiredMixin,\n UpdateView,\n):\n model = Archive\n form_class = ArchiveForm\n permission_required = (\n f\"{model._meta.app_label}.change_{model._meta.model_name}\"\n )\n raise_exception = True\n\n\nclass ArchiveGroupUpdateMixin(UserGroupUpdateMixin):\n template_name = \"archives/archive_user_groups_form.html\"\n permission_required = (\n f\"{Archive._meta.app_label}.change_{Archive._meta.model_name}\"\n )\n\n @property\n def obj(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n\nclass ArchiveEditorsUpdate(ArchiveGroupUpdateMixin):\n form_class = EditorsForm\n success_message = \"Editors successfully updated\"\n\n\nclass ArchiveUploadersUpdate(ArchiveGroupUpdateMixin):\n form_class = UploadersForm\n success_message = \"Uploaders successfully updated\"\n\n\nclass ArchiveUsersUpdate(ArchiveGroupUpdateMixin):\n form_class = UsersForm\n success_message = \"Users successfully updated\"\n\n\nclass ArchivePermissionRequestCreate(\n LoginRequiredMixin, SuccessMessageMixin, CreateView\n):\n model = ArchivePermissionRequest\n fields = ()\n\n @property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_success_url(self):\n return self.archive.get_absolute_url()\n\n def get_success_message(self, cleaned_data):\n return self.object.status_to_string()\n\n def form_valid(self, form):\n form.instance.user = self.request.user\n form.instance.archive = self.archive\n try:\n redirect = super().form_valid(form)\n return redirect\n\n except ValidationError as e:\n form._errors[NON_FIELD_ERRORS] = ErrorList(e.messages)\n return super().form_invalid(form)\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n permission_request = ArchivePermissionRequest.objects.filter(\n archive=self.archive, user=self.request.user\n ).first()\n context.update(\n {\n \"permission_request\": permission_request,\n \"archive\": self.archive,\n }\n )\n return context\n\n\nclass ArchivePermissionRequestList(ObjectPermissionRequiredMixin, ListView):\n model = ArchivePermissionRequest\n permission_required = (\n f\"{Archive._meta.app_label}.change_{Archive._meta.model_name}\"\n )\n raise_exception = True\n\n @property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_queryset(self):\n queryset = super().get_queryset()\n queryset = (\n queryset.filter(archive=self.archive)\n .exclude(status=ArchivePermissionRequest.ACCEPTED)\n .select_related(\"user__user_profile\", \"user__verification\")\n )\n return queryset\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n\nclass ArchivePermissionRequestUpdate(PermissionRequestUpdate):\n model = ArchivePermissionRequest\n form_class = ArchivePermissionRequestUpdateForm\n base_model = Archive\n redirect_namespace = \"archives\"\n user_check_attrs = [\"is_user\", \"is_uploader\", \"is_editor\"]\n permission_required = (\n f\"{Archive._meta.app_label}.change_{Archive._meta.model_name}\"\n )\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.base_object})\n return context\n\n\nclass ArchiveUploadSessionCreate(\n LoginRequiredMixin,\n UserFormKwargsMixin,\n ObjectPermissionRequiredMixin,\n CreateView,\n):\n model = RawImageUploadSession\n form_class = AddCasesForm\n template_name = \"archives/archive_upload_session_create.html\"\n permission_required = (\n f\"{Archive._meta.app_label}.upload_{Archive._meta.model_name}\"\n )\n raise_exception = True\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs.update(\n {\n \"linked_task\": add_images_to_archive.signature(\n kwargs={\"archive_pk\": self.archive.pk}, immutable=True\n )\n }\n )\n return kwargs\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def form_valid(self, form):\n form.instance.creator = self.request.user\n return super().form_valid(form)\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n\nclass ArchiveEditArchiveItem(\n LoginRequiredMixin,\n UserFormKwargsMixin,\n ObjectPermissionRequiredMixin,\n FormView,\n):\n form_class = ArchiveItemForm\n template_name = \"archives/archive_item_form.html\"\n permission_required = (\n f\"{Archive._meta.app_label}.upload_{Archive._meta.model_name}\"\n )\n raise_exception = True\n\n def get_permission_object(self):\n return self.archive\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n @cached_property\n def archive_item(self):\n return get_object_or_404(ArchiveItem, pk=self.kwargs[\"id\"])\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs.update(\n {\"archive\": self.archive, \"archive_item\": self.archive_item}\n )\n return kwargs\n\n def get_context_data(self, *args, **kwargs):\n context = super().get_context_data(*args, **kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def form_valid(self, form): # noqa: C901\n def create_upload(image_files):\n raw_files = []\n upload_session = RawImageUploadSession.objects.create(\n creator=self.request.user\n )\n for image_file in image_files:\n raw_files.append(\n RawImageFile(\n upload_session=upload_session,\n filename=image_file.name,\n staged_file_id=image_file.uuid,\n )\n )\n RawImageFile.objects.bulk_create(list(raw_files))\n return upload_session.pk\n\n upload_pks = {}\n civ_pks_to_remove = set()\n civ_pks_to_add = set()\n\n for slug, value in form.cleaned_data.items():\n if value is None:\n continue\n\n ci = ComponentInterface.objects.get(slug=slug)\n civ = self.archive_item.values.filter(interface=ci).first()\n\n if civ:\n if civ.value == value:\n continue\n civ_pks_to_remove.add(civ.pk)\n\n if ci.kind in InterfaceKind.interface_type_image():\n civ = ComponentInterfaceValue.objects.create(interface=ci)\n civ_pks_to_add.add(civ.pk)\n upload_pks[civ.pk] = create_upload(value)\n elif ci.kind in InterfaceKind.interface_type_file():\n civ = ComponentInterfaceValue(interface=ci)\n name = get_valid_filename(value[0].name)\n with value[0].open() as f:\n civ.file = File(f, name=name)\n civ.full_clean()\n civ.save()\n civ_pks_to_add.add(civ.pk)\n else:\n civ = ci.create_instance(value=value)\n civ_pks_to_add.add(civ.pk)\n\n tasks = update_archive_item_values.signature(\n kwargs={\n \"archive_item_pk\": self.archive_item.pk,\n \"civ_pks_to_add\": list(civ_pks_to_add),\n \"civ_pks_to_remove\": list(civ_pks_to_remove),\n },\n immutable=True,\n )\n\n if len(upload_pks) > 0:\n image_tasks = group(\n chain(\n build_images.signature(\n kwargs={\"upload_session_pk\": upload_pk}\n ),\n add_images_to_component_interface_value.signature(\n kwargs={\n \"component_interface_value_pk\": civ_pk,\n \"upload_session_pk\": upload_pk,\n },\n immutable=True,\n ),\n )\n for civ_pk, upload_pk in upload_pks.items()\n )\n tasks = chord(image_tasks, tasks)\n\n on_commit(tasks.apply_async)\n\n return HttpResponseRedirect(\n reverse(\n \"archives:items-list\", kwargs={\"slug\": self.kwargs[\"slug\"]},\n )\n )\n\n\nclass ArchiveItemsList(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, PaginatedTableListView,\n):\n model = ArchiveItem\n permission_required = (\n f\"{Archive._meta.app_label}.use_{Archive._meta.model_name}\"\n )\n raise_exception = True\n template_name = \"archives/archive_items_list.html\"\n row_template = \"archives/archive_items_row.html\"\n search_fields = [\n \"pk\",\n \"values__interface__title\",\n \"values__value\",\n \"values__image__name\",\n \"values__file\",\n ]\n columns = [\n Column(title=\"Values\", sort_field=\"values\"),\n Column(title=\"Edit\", sort_field=\"pk\"),\n ]\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def get_queryset(self):\n qs = super().get_queryset()\n return qs.filter(archive=self.archive).prefetch_related(\"values\")\n\n\nclass ArchiveCasesList(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, PaginatedTableListView,\n):\n model = Image\n permission_required = (\n f\"{Archive._meta.app_label}.use_{Archive._meta.model_name}\"\n )\n raise_exception = True\n template_name = \"archives/archive_cases_list.html\"\n row_template = \"archives/archive_cases_row.html\"\n search_fields = [\n \"pk\",\n \"name\",\n ]\n columns = [\n Column(title=\"Name\", sort_field=\"name\"),\n Column(title=\"Created\", sort_field=\"created\"),\n Column(title=\"Creator\", sort_field=\"origin__creator__username\"),\n Column(title=\"View\", sort_field=\"pk\"),\n Column(\n title=\"Algorithm Results\",\n sort_field=\"pk\",\n optional_condition=lambda o: any(\n civ.algorithms_jobs_as_input.exists()\n for civ in o.componentinterfacevalue_set.all()\n ),\n ),\n Column(title=\"Download\", sort_field=\"pk\"),\n ]\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def get_queryset(self):\n qs = super().get_queryset()\n return (\n qs.filter(\n componentinterfacevalue__archive_items__archive=self.archive\n )\n .prefetch_related(\n \"files\",\n \"componentinterfacevalue_set__algorithms_jobs_as_input__algorithm_image__algorithm\",\n )\n .select_related(\n \"origin__creator__user_profile\",\n \"origin__creator__verification\",\n )\n )\n\n\nclass ArchiveCasesToReaderStudyUpdate(\n LoginRequiredMixin,\n ObjectPermissionRequiredMixin,\n SuccessMessageMixin,\n FormView,\n):\n form_class = ArchiveCasesToReaderStudyForm\n permission_required = (\n f\"{Archive._meta.app_label}.use_{Archive._meta.model_name}\"\n )\n raise_exception = True\n template_name = \"archives/archive_cases_to_reader_study_form.html\"\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs.update({\"user\": self.request.user, \"archive\": self.archive})\n return kwargs\n\n def form_valid(self, form):\n reader_study: ReaderStudy = form.cleaned_data[\"reader_study\"]\n images = form.cleaned_data[\"images\"]\n\n reader_study.images.add(*images)\n\n self.success_url = reader_study.get_absolute_url()\n self.success_message = f\"Added {len(images)} cases to {reader_study}.\"\n\n return super().form_valid(form)\n\n\nclass ArchiveViewSet(ReadOnlyModelViewSet):\n serializer_class = ArchiveSerializer\n queryset = Archive.objects.all()\n permission_classes = (DjangoObjectPermissions,)\n filter_backends = (\n DjangoFilterBackend,\n ObjectPermissionsFilter,\n )\n filterset_fields = (\"slug\",)\n renderer_classes = (\n *api_settings.DEFAULT_RENDERER_CLASSES,\n PaginatedCSVRenderer,\n )\n", "path": "app/grandchallenge/archives/views.py" } ]
[ { "content": "from celery import chain, chord, group\nfrom django.contrib.auth.mixins import PermissionRequiredMixin\nfrom django.contrib.messages.views import SuccessMessageMixin\nfrom django.core.exceptions import (\n NON_FIELD_ERRORS,\n PermissionDenied,\n ValidationError,\n)\nfrom django.core.files import File\nfrom django.db.transaction import on_commit\nfrom django.forms.utils import ErrorList\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import get_object_or_404\nfrom django.utils.functional import cached_property\nfrom django.utils.html import format_html\nfrom django.utils.text import get_valid_filename\nfrom django.utils.timezone import now\nfrom django.views.generic import (\n CreateView,\n DetailView,\n FormView,\n ListView,\n UpdateView,\n)\nfrom django_filters.rest_framework import DjangoFilterBackend\nfrom guardian.mixins import (\n LoginRequiredMixin,\n PermissionListMixin,\n PermissionRequiredMixin as ObjectPermissionRequiredMixin,\n)\nfrom rest_framework.permissions import DjangoObjectPermissions\nfrom rest_framework.settings import api_settings\nfrom rest_framework.viewsets import ReadOnlyModelViewSet\nfrom rest_framework_guardian.filters import ObjectPermissionsFilter\n\nfrom grandchallenge.algorithms.tasks import (\n add_images_to_component_interface_value,\n)\nfrom grandchallenge.archives.filters import ArchiveFilter\nfrom grandchallenge.archives.forms import (\n AddCasesForm,\n ArchiveCasesToReaderStudyForm,\n ArchiveForm,\n ArchiveItemForm,\n ArchivePermissionRequestUpdateForm,\n UploadersForm,\n UsersForm,\n)\nfrom grandchallenge.archives.models import (\n Archive,\n ArchiveItem,\n ArchivePermissionRequest,\n)\nfrom grandchallenge.archives.serializers import ArchiveSerializer\nfrom grandchallenge.archives.tasks import (\n add_images_to_archive,\n update_archive_item_values,\n)\nfrom grandchallenge.cases.models import (\n Image,\n RawImageFile,\n RawImageUploadSession,\n)\nfrom grandchallenge.cases.tasks import build_images\nfrom grandchallenge.components.models import (\n ComponentInterface,\n ComponentInterfaceValue,\n InterfaceKind,\n)\nfrom grandchallenge.core.filters import FilterMixin\nfrom grandchallenge.core.forms import UserFormKwargsMixin\nfrom grandchallenge.core.renderers import PaginatedCSVRenderer\nfrom grandchallenge.core.templatetags.random_encode import random_encode\nfrom grandchallenge.core.views import PermissionRequestUpdate\nfrom grandchallenge.datatables.views import Column, PaginatedTableListView\nfrom grandchallenge.groups.forms import EditorsForm\nfrom grandchallenge.groups.views import UserGroupUpdateMixin\nfrom grandchallenge.reader_studies.models import ReaderStudy\nfrom grandchallenge.subdomains.utils import reverse\n\n\nclass ArchiveList(FilterMixin, PermissionListMixin, ListView):\n model = Archive\n permission_required = (\n f\"{model._meta.app_label}.view_{model._meta.model_name}\"\n )\n ordering = \"-created\"\n filter_class = ArchiveFilter\n paginate_by = 40\n\n def get_context_data(self, *args, **kwargs):\n context = super().get_context_data(*args, **kwargs)\n\n context.update(\n {\n \"jumbotron_title\": \"Archives\",\n \"jumbotron_description\": format_html(\n (\n \"An archive can be used to collect set of medical \"\n \"images, which can later be used in a reader study, \"\n \"challenge or algorithm. Please <a href='{}'>contact \"\n \"us</a> if you would like to set up your own archive.\"\n ),\n random_encode(\"mailto:[email protected]\"),\n ),\n }\n )\n\n return context\n\n\nclass ArchiveCreate(\n PermissionRequiredMixin, UserFormKwargsMixin, CreateView,\n):\n model = Archive\n form_class = ArchiveForm\n permission_required = (\n f\"{model._meta.app_label}.add_{model._meta.model_name}\"\n )\n\n def form_valid(self, form):\n response = super().form_valid(form=form)\n self.object.add_editor(self.request.user)\n return response\n\n\nclass ArchiveDetail(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView\n):\n model = Archive\n permission_required = (\n f\"{model._meta.app_label}.use_{model._meta.model_name}\"\n )\n raise_exception = True\n\n def on_permission_check_fail(self, request, response, obj=None):\n response = self.get(request)\n return response\n\n def check_permissions(self, request):\n try:\n return super().check_permissions(request)\n except PermissionDenied:\n return HttpResponseRedirect(\n reverse(\n \"archives:permission-request-create\",\n kwargs={\"slug\": self.object.slug},\n )\n )\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n\n user_remove_form = UsersForm()\n user_remove_form.fields[\"action\"].initial = UsersForm.REMOVE\n\n uploader_remove_form = UploadersForm()\n uploader_remove_form.fields[\"action\"].initial = UploadersForm.REMOVE\n\n editor_remove_form = EditorsForm()\n editor_remove_form.fields[\"action\"].initial = EditorsForm.REMOVE\n\n limit = 1000\n\n context.update(\n {\n \"user_remove_form\": user_remove_form,\n \"uploader_remove_form\": uploader_remove_form,\n \"editor_remove_form\": editor_remove_form,\n \"now\": now().isoformat(),\n \"limit\": limit,\n \"offsets\": range(\n 0,\n Image.objects.filter(\n componentinterfacevalue__archive_items__archive=context[\n \"object\"\n ]\n ).count(),\n limit,\n ),\n }\n )\n\n pending_permission_requests = ArchivePermissionRequest.objects.filter(\n archive=context[\"object\"], status=ArchivePermissionRequest.PENDING,\n ).count()\n context.update(\n {\"pending_permission_requests\": pending_permission_requests}\n )\n\n return context\n\n\nclass ArchiveUpdate(\n LoginRequiredMixin,\n UserFormKwargsMixin,\n ObjectPermissionRequiredMixin,\n UpdateView,\n):\n model = Archive\n form_class = ArchiveForm\n permission_required = (\n f\"{model._meta.app_label}.change_{model._meta.model_name}\"\n )\n raise_exception = True\n\n\nclass ArchiveGroupUpdateMixin(UserGroupUpdateMixin):\n template_name = \"archives/archive_user_groups_form.html\"\n permission_required = (\n f\"{Archive._meta.app_label}.change_{Archive._meta.model_name}\"\n )\n\n @property\n def obj(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n\nclass ArchiveEditorsUpdate(ArchiveGroupUpdateMixin):\n form_class = EditorsForm\n success_message = \"Editors successfully updated\"\n\n\nclass ArchiveUploadersUpdate(ArchiveGroupUpdateMixin):\n form_class = UploadersForm\n success_message = \"Uploaders successfully updated\"\n\n\nclass ArchiveUsersUpdate(ArchiveGroupUpdateMixin):\n form_class = UsersForm\n success_message = \"Users successfully updated\"\n\n\nclass ArchivePermissionRequestCreate(\n LoginRequiredMixin, SuccessMessageMixin, CreateView\n):\n model = ArchivePermissionRequest\n fields = ()\n\n @property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_success_url(self):\n return self.archive.get_absolute_url()\n\n def get_success_message(self, cleaned_data):\n return self.object.status_to_string()\n\n def form_valid(self, form):\n form.instance.user = self.request.user\n form.instance.archive = self.archive\n try:\n redirect = super().form_valid(form)\n return redirect\n\n except ValidationError as e:\n form._errors[NON_FIELD_ERRORS] = ErrorList(e.messages)\n return super().form_invalid(form)\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n permission_request = ArchivePermissionRequest.objects.filter(\n archive=self.archive, user=self.request.user\n ).first()\n context.update(\n {\n \"permission_request\": permission_request,\n \"archive\": self.archive,\n }\n )\n return context\n\n\nclass ArchivePermissionRequestList(ObjectPermissionRequiredMixin, ListView):\n model = ArchivePermissionRequest\n permission_required = (\n f\"{Archive._meta.app_label}.change_{Archive._meta.model_name}\"\n )\n raise_exception = True\n\n @property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_queryset(self):\n queryset = super().get_queryset()\n queryset = (\n queryset.filter(archive=self.archive)\n .exclude(status=ArchivePermissionRequest.ACCEPTED)\n .select_related(\"user__user_profile\", \"user__verification\")\n )\n return queryset\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n\nclass ArchivePermissionRequestUpdate(PermissionRequestUpdate):\n model = ArchivePermissionRequest\n form_class = ArchivePermissionRequestUpdateForm\n base_model = Archive\n redirect_namespace = \"archives\"\n user_check_attrs = [\"is_user\", \"is_uploader\", \"is_editor\"]\n permission_required = (\n f\"{Archive._meta.app_label}.change_{Archive._meta.model_name}\"\n )\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.base_object})\n return context\n\n\nclass ArchiveUploadSessionCreate(\n LoginRequiredMixin,\n UserFormKwargsMixin,\n ObjectPermissionRequiredMixin,\n CreateView,\n):\n model = RawImageUploadSession\n form_class = AddCasesForm\n template_name = \"archives/archive_upload_session_create.html\"\n permission_required = (\n f\"{Archive._meta.app_label}.upload_{Archive._meta.model_name}\"\n )\n raise_exception = True\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs.update(\n {\n \"linked_task\": add_images_to_archive.signature(\n kwargs={\"archive_pk\": self.archive.pk}, immutable=True\n )\n }\n )\n return kwargs\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def form_valid(self, form):\n form.instance.creator = self.request.user\n return super().form_valid(form)\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n\nclass ArchiveEditArchiveItem(\n LoginRequiredMixin,\n UserFormKwargsMixin,\n ObjectPermissionRequiredMixin,\n FormView,\n):\n form_class = ArchiveItemForm\n template_name = \"archives/archive_item_form.html\"\n permission_required = (\n f\"{Archive._meta.app_label}.upload_{Archive._meta.model_name}\"\n )\n raise_exception = True\n\n def get_permission_object(self):\n return self.archive\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n @cached_property\n def archive_item(self):\n return get_object_or_404(ArchiveItem, pk=self.kwargs[\"id\"])\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs.update(\n {\"archive\": self.archive, \"archive_item\": self.archive_item}\n )\n return kwargs\n\n def get_context_data(self, *args, **kwargs):\n context = super().get_context_data(*args, **kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def form_valid(self, form): # noqa: C901\n def create_upload(image_files):\n raw_files = []\n upload_session = RawImageUploadSession.objects.create(\n creator=self.request.user\n )\n for image_file in image_files:\n raw_files.append(\n RawImageFile(\n upload_session=upload_session,\n filename=image_file.name,\n staged_file_id=image_file.uuid,\n )\n )\n RawImageFile.objects.bulk_create(list(raw_files))\n return upload_session.pk\n\n upload_pks = {}\n civ_pks_to_remove = set()\n civ_pks_to_add = set()\n\n for slug, value in form.cleaned_data.items():\n if value is None:\n continue\n\n ci = ComponentInterface.objects.get(slug=slug)\n civ = self.archive_item.values.filter(interface=ci).first()\n\n if civ:\n if civ.value == value:\n continue\n civ_pks_to_remove.add(civ.pk)\n\n if ci.kind in InterfaceKind.interface_type_image():\n civ = ComponentInterfaceValue.objects.create(interface=ci)\n civ_pks_to_add.add(civ.pk)\n upload_pks[civ.pk] = create_upload(value)\n elif ci.kind in InterfaceKind.interface_type_file():\n civ = ComponentInterfaceValue(interface=ci)\n name = get_valid_filename(value[0].name)\n with value[0].open() as f:\n civ.file = File(f, name=name)\n civ.full_clean()\n civ.save()\n civ_pks_to_add.add(civ.pk)\n else:\n civ = ci.create_instance(value=value)\n civ_pks_to_add.add(civ.pk)\n\n tasks = update_archive_item_values.signature(\n kwargs={\n \"archive_item_pk\": self.archive_item.pk,\n \"civ_pks_to_add\": list(civ_pks_to_add),\n \"civ_pks_to_remove\": list(civ_pks_to_remove),\n },\n immutable=True,\n )\n\n if len(upload_pks) > 0:\n image_tasks = group(\n chain(\n build_images.signature(\n kwargs={\"upload_session_pk\": upload_pk}\n ),\n add_images_to_component_interface_value.signature(\n kwargs={\n \"component_interface_value_pk\": civ_pk,\n \"upload_session_pk\": upload_pk,\n },\n immutable=True,\n ),\n )\n for civ_pk, upload_pk in upload_pks.items()\n )\n tasks = chord(image_tasks, tasks)\n\n on_commit(tasks.apply_async)\n\n return HttpResponseRedirect(\n reverse(\n \"archives:items-list\", kwargs={\"slug\": self.kwargs[\"slug\"]},\n )\n )\n\n\nclass ArchiveItemsList(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, PaginatedTableListView,\n):\n model = ArchiveItem\n permission_required = (\n f\"{Archive._meta.app_label}.use_{Archive._meta.model_name}\"\n )\n raise_exception = True\n template_name = \"archives/archive_items_list.html\"\n row_template = \"archives/archive_items_row.html\"\n search_fields = [\n \"pk\",\n \"values__interface__title\",\n \"values__value\",\n \"values__image__name\",\n \"values__file\",\n ]\n columns = [\n Column(title=\"Values\", sort_field=\"created\"),\n Column(title=\"Edit\", sort_field=\"pk\"),\n ]\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def get_queryset(self):\n qs = super().get_queryset()\n return qs.filter(archive=self.archive).prefetch_related(\"values\")\n\n\nclass ArchiveCasesList(\n LoginRequiredMixin, ObjectPermissionRequiredMixin, PaginatedTableListView,\n):\n model = Image\n permission_required = (\n f\"{Archive._meta.app_label}.use_{Archive._meta.model_name}\"\n )\n raise_exception = True\n template_name = \"archives/archive_cases_list.html\"\n row_template = \"archives/archive_cases_row.html\"\n search_fields = [\n \"pk\",\n \"name\",\n ]\n columns = [\n Column(title=\"Name\", sort_field=\"name\"),\n Column(title=\"Created\", sort_field=\"created\"),\n Column(title=\"Creator\", sort_field=\"origin__creator__username\"),\n Column(title=\"View\", sort_field=\"pk\"),\n Column(\n title=\"Algorithm Results\",\n sort_field=\"pk\",\n optional_condition=lambda o: any(\n civ.algorithms_jobs_as_input.exists()\n for civ in o.componentinterfacevalue_set.all()\n ),\n ),\n Column(title=\"Download\", sort_field=\"pk\"),\n ]\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def get_queryset(self):\n qs = super().get_queryset()\n return (\n qs.filter(\n componentinterfacevalue__archive_items__archive=self.archive\n )\n .prefetch_related(\n \"files\",\n \"componentinterfacevalue_set__algorithms_jobs_as_input__algorithm_image__algorithm\",\n )\n .select_related(\n \"origin__creator__user_profile\",\n \"origin__creator__verification\",\n )\n )\n\n\nclass ArchiveCasesToReaderStudyUpdate(\n LoginRequiredMixin,\n ObjectPermissionRequiredMixin,\n SuccessMessageMixin,\n FormView,\n):\n form_class = ArchiveCasesToReaderStudyForm\n permission_required = (\n f\"{Archive._meta.app_label}.use_{Archive._meta.model_name}\"\n )\n raise_exception = True\n template_name = \"archives/archive_cases_to_reader_study_form.html\"\n\n @cached_property\n def archive(self):\n return get_object_or_404(Archive, slug=self.kwargs[\"slug\"])\n\n def get_permission_object(self):\n return self.archive\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context.update({\"archive\": self.archive})\n return context\n\n def get_form_kwargs(self):\n kwargs = super().get_form_kwargs()\n kwargs.update({\"user\": self.request.user, \"archive\": self.archive})\n return kwargs\n\n def form_valid(self, form):\n reader_study: ReaderStudy = form.cleaned_data[\"reader_study\"]\n images = form.cleaned_data[\"images\"]\n\n reader_study.images.add(*images)\n\n self.success_url = reader_study.get_absolute_url()\n self.success_message = f\"Added {len(images)} cases to {reader_study}.\"\n\n return super().form_valid(form)\n\n\nclass ArchiveViewSet(ReadOnlyModelViewSet):\n serializer_class = ArchiveSerializer\n queryset = Archive.objects.all()\n permission_classes = (DjangoObjectPermissions,)\n filter_backends = (\n DjangoFilterBackend,\n ObjectPermissionsFilter,\n )\n filterset_fields = (\"slug\",)\n renderer_classes = (\n *api_settings.DEFAULT_RENDERER_CLASSES,\n PaginatedCSVRenderer,\n )\n", "path": "app/grandchallenge/archives/views.py" } ]
diff --git a/app/grandchallenge/archives/views.py b/app/grandchallenge/archives/views.py index 5e987c2769..aee105b4eb 100644 --- a/app/grandchallenge/archives/views.py +++ b/app/grandchallenge/archives/views.py @@ -498,7 +498,7 @@ class ArchiveItemsList( "values__file", ] columns = [ - Column(title="Values", sort_field="values"), + Column(title="Values", sort_field="created"), Column(title="Edit", sort_field="pk"), ]
ipython__ipython-2186
oct2py v >= 0.3.1 doesn't need h5py anymore The octave magic docs/examples should update this information.
[ { "content": "# -*- coding: utf-8 -*-\n\"\"\"\n===========\noctavemagic\n===========\n\nMagics for interacting with Octave via oct2py.\n\n.. note::\n\n The ``oct2py`` module needs to be installed separately, and in turn depends\n on ``h5py``. Both can be obtained using ``easy_install`` or ``pip``.\n\nUsage\n=====\n\n``%octave``\n\n{OCTAVE_DOC}\n\n``%octave_push``\n\n{OCTAVE_PUSH_DOC}\n\n``%octave_pull``\n\n{OCTAVE_PULL_DOC}\n\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (C) 2012 The IPython Development Team\n#\n# Distributed under the terms of the BSD License. The full license is in\n# the file COPYING, distributed as part of this software.\n#-----------------------------------------------------------------------------\n\nimport tempfile\nfrom glob import glob\nfrom shutil import rmtree\n\nimport numpy as np\nimport oct2py\nfrom xml.dom import minidom\n\nfrom IPython.core.displaypub import publish_display_data\nfrom IPython.core.magic import (Magics, magics_class, line_magic,\n line_cell_magic)\nfrom IPython.testing.skipdoctest import skip_doctest\nfrom IPython.core.magic_arguments import (\n argument, magic_arguments, parse_argstring\n)\nfrom IPython.utils.py3compat import unicode_to_str\n\nclass OctaveMagicError(oct2py.Oct2PyError):\n pass\n\n_mimetypes = {'png' : 'image/png',\n 'svg' : 'image/svg+xml',\n 'jpg' : 'image/jpeg',\n 'jpeg': 'image/jpeg'}\n\n@magics_class\nclass OctaveMagics(Magics):\n \"\"\"A set of magics useful for interactive work with Octave via oct2py.\n \"\"\"\n def __init__(self, shell):\n \"\"\"\n Parameters\n ----------\n shell : IPython shell\n\n \"\"\"\n super(OctaveMagics, self).__init__(shell)\n self._oct = oct2py.Oct2Py()\n self._plot_format = 'png'\n\n # Allow publish_display_data to be overridden for\n # testing purposes.\n self._publish_display_data = publish_display_data\n\n\n def _fix_gnuplot_svg_size(self, image, size=None):\n \"\"\"\n GnuPlot SVGs do not have height/width attributes. Set\n these to be the same as the viewBox, so that the browser\n scales the image correctly.\n\n Parameters\n ----------\n image : str\n SVG data.\n size : tuple of int\n Image width, height.\n\n \"\"\"\n (svg,) = minidom.parseString(image).getElementsByTagName('svg')\n viewbox = svg.getAttribute('viewBox').split(' ')\n\n if size is not None:\n width, height = size\n else:\n width, height = viewbox[2:]\n\n svg.setAttribute('width', '%dpx' % width)\n svg.setAttribute('height', '%dpx' % height)\n return svg.toxml()\n\n\n @skip_doctest\n @line_magic\n def octave_push(self, line):\n '''\n Line-level magic that pushes a variable to Octave.\n\n `line` should be made up of whitespace separated variable names in the\n IPython namespace::\n\n In [7]: import numpy as np\n\n In [8]: X = np.arange(5)\n\n In [9]: X.mean()\n Out[9]: 2.0\n\n In [10]: %octave_push X\n\n In [11]: %octave mean(X)\n Out[11]: 2.0\n\n '''\n inputs = line.split(' ')\n for input in inputs:\n input = unicode_to_str(input)\n self._oct.put(input, self.shell.user_ns[input])\n\n\n @skip_doctest\n @line_magic\n def octave_pull(self, line):\n '''\n Line-level magic that pulls a variable from Octave.\n\n In [18]: _ = %octave x = [1 2; 3 4]; y = 'hello'\n\n In [19]: %octave_pull x y\n\n In [20]: x\n Out[20]:\n array([[ 1., 2.],\n [ 3., 4.]])\n\n In [21]: y\n Out[21]: 'hello'\n\n '''\n outputs = line.split(' ')\n for output in outputs:\n output = unicode_to_str(output)\n self.shell.push({output: self._oct.get(output)})\n\n\n @skip_doctest\n @magic_arguments()\n @argument(\n '-i', '--input', action='append',\n help='Names of input variables to be pushed to Octave. Multiple names '\n 'can be passed, separated by commas with no whitespace.'\n )\n @argument(\n '-o', '--output', action='append',\n help='Names of variables to be pulled from Octave after executing cell '\n 'body. Multiple names can be passed, separated by commas with no '\n 'whitespace.'\n )\n @argument(\n '-s', '--size', action='store',\n help='Pixel size of plots, \"width,height\". Default is \"-s 400,250\".'\n )\n @argument(\n '-f', '--format', action='store',\n help='Plot format (png, svg or jpg).'\n )\n\n @argument(\n 'code',\n nargs='*',\n )\n @line_cell_magic\n def octave(self, line, cell=None):\n '''\n Execute code in Octave, and pull some of the results back into the\n Python namespace.\n\n In [9]: %octave X = [1 2; 3 4]; mean(X)\n Out[9]: array([[ 2., 3.]])\n\n As a cell, this will run a block of Octave code, without returning any\n value::\n\n In [10]: %%octave\n ....: p = [-2, -1, 0, 1, 2]\n ....: polyout(p, 'x')\n\n -2*x^4 - 1*x^3 + 0*x^2 + 1*x^1 + 2\n\n In the notebook, plots are published as the output of the cell, e.g.\n\n %octave plot([1 2 3], [4 5 6])\n\n will create a line plot.\n\n Objects can be passed back and forth between Octave and IPython via the\n -i and -o flags in line::\n\n In [14]: Z = np.array([1, 4, 5, 10])\n\n In [15]: %octave -i Z mean(Z)\n Out[15]: array([ 5.])\n\n\n In [16]: %octave -o W W = Z * mean(Z)\n Out[16]: array([ 5., 20., 25., 50.])\n\n In [17]: W\n Out[17]: array([ 5., 20., 25., 50.])\n\n The size and format of output plots can be specified::\n\n In [18]: %%octave -s 600,800 -f svg\n ...: plot([1, 2, 3]);\n\n '''\n args = parse_argstring(self.octave, line)\n\n # arguments 'code' in line are prepended to the cell lines\n if cell is None:\n code = ''\n return_output = True\n line_mode = True\n else:\n code = cell\n return_output = False\n line_mode = False\n\n code = ' '.join(args.code) + code\n\n if args.input:\n for input in ','.join(args.input).split(','):\n input = unicode_to_str(input)\n self._oct.put(input, self.shell.user_ns[input])\n\n # generate plots in a temporary directory\n plot_dir = tempfile.mkdtemp()\n if args.size is not None:\n size = args.size\n else:\n size = '400,240'\n\n if args.format is not None:\n plot_format = args.format\n else:\n plot_format = 'png'\n\n pre_call = '''\n global __ipy_figures = [];\n page_screen_output(0);\n\n function fig_create(src, event)\n global __ipy_figures;\n __ipy_figures(size(__ipy_figures) + 1) = src;\n set(src, \"visible\", \"off\");\n end\n\n set(0, 'DefaultFigureCreateFcn', @fig_create);\n\n close all;\n clear ans;\n\n # ___<end_pre_call>___ #\n '''\n\n post_call = '''\n # ___<start_post_call>___ #\n\n # Save output of the last execution\n if exist(\"ans\") == 1\n _ = ans;\n else\n _ = nan;\n end\n\n for f = __ipy_figures\n outfile = sprintf('%(plot_dir)s/__ipy_oct_fig_%%03d.png', f);\n try\n print(f, outfile, '-d%(plot_format)s', '-tight', '-S%(size)s');\n end\n end\n\n ''' % locals()\n\n code = ' '.join((pre_call, code, post_call))\n try:\n text_output = self._oct.run(code, verbose=False)\n except (oct2py.Oct2PyError) as exception:\n msg = exception.message\n msg = msg.split('# ___<end_pre_call>___ #')[1]\n msg = msg.split('# ___<start_post_call>___ #')[0]\n raise OctaveMagicError('Octave could not complete execution. '\n 'Traceback (currently broken in oct2py): %s'\n % msg)\n\n key = 'OctaveMagic.Octave'\n display_data = []\n\n # Publish text output\n if text_output:\n display_data.append((key, {'text/plain': text_output}))\n\n # Publish images\n images = [open(imgfile, 'rb').read() for imgfile in \\\n glob(\"%s/*\" % plot_dir)]\n rmtree(plot_dir)\n\n plot_mime_type = _mimetypes.get(plot_format, 'image/png')\n width, height = [int(s) for s in size.split(',')]\n for image in images:\n if plot_format == 'svg':\n image = self._fix_gnuplot_svg_size(image, size=(width, height))\n display_data.append((key, {plot_mime_type: image}))\n\n if args.output:\n for output in ','.join(args.output).split(','):\n output = unicode_to_str(output)\n self.shell.push({output: self._oct.get(output)})\n\n for source, data in display_data:\n self._publish_display_data(source, data)\n\n if return_output:\n ans = self._oct.get('_')\n\n # Unfortunately, Octave doesn't have a \"None\" object,\n # so we can't return any NaN outputs\n if np.isscalar(ans) and np.isnan(ans):\n ans = None\n\n return ans\n\n\n__doc__ = __doc__.format(\n OCTAVE_DOC = ' '*8 + OctaveMagics.octave.__doc__,\n OCTAVE_PUSH_DOC = ' '*8 + OctaveMagics.octave_push.__doc__,\n OCTAVE_PULL_DOC = ' '*8 + OctaveMagics.octave_pull.__doc__\n )\n\n\n_loaded = False\ndef load_ipython_extension(ip):\n \"\"\"Load the extension in IPython.\"\"\"\n global _loaded\n if not _loaded:\n ip.register_magics(OctaveMagics)\n _loaded = True\n", "path": "IPython/extensions/octavemagic.py" } ]
[ { "content": "# -*- coding: utf-8 -*-\n\"\"\"\n===========\noctavemagic\n===========\n\nMagics for interacting with Octave via oct2py.\n\n.. note::\n\n The ``oct2py`` module needs to be installed separately and\n can be obtained using ``easy_install`` or ``pip``.\n\nUsage\n=====\n\n``%octave``\n\n{OCTAVE_DOC}\n\n``%octave_push``\n\n{OCTAVE_PUSH_DOC}\n\n``%octave_pull``\n\n{OCTAVE_PULL_DOC}\n\n\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (C) 2012 The IPython Development Team\n#\n# Distributed under the terms of the BSD License. The full license is in\n# the file COPYING, distributed as part of this software.\n#-----------------------------------------------------------------------------\n\nimport tempfile\nfrom glob import glob\nfrom shutil import rmtree\n\nimport numpy as np\nimport oct2py\nfrom xml.dom import minidom\n\nfrom IPython.core.displaypub import publish_display_data\nfrom IPython.core.magic import (Magics, magics_class, line_magic,\n line_cell_magic)\nfrom IPython.testing.skipdoctest import skip_doctest\nfrom IPython.core.magic_arguments import (\n argument, magic_arguments, parse_argstring\n)\nfrom IPython.utils.py3compat import unicode_to_str\n\nclass OctaveMagicError(oct2py.Oct2PyError):\n pass\n\n_mimetypes = {'png' : 'image/png',\n 'svg' : 'image/svg+xml',\n 'jpg' : 'image/jpeg',\n 'jpeg': 'image/jpeg'}\n\n@magics_class\nclass OctaveMagics(Magics):\n \"\"\"A set of magics useful for interactive work with Octave via oct2py.\n \"\"\"\n def __init__(self, shell):\n \"\"\"\n Parameters\n ----------\n shell : IPython shell\n\n \"\"\"\n super(OctaveMagics, self).__init__(shell)\n self._oct = oct2py.Oct2Py()\n self._plot_format = 'png'\n\n # Allow publish_display_data to be overridden for\n # testing purposes.\n self._publish_display_data = publish_display_data\n\n\n def _fix_gnuplot_svg_size(self, image, size=None):\n \"\"\"\n GnuPlot SVGs do not have height/width attributes. Set\n these to be the same as the viewBox, so that the browser\n scales the image correctly.\n\n Parameters\n ----------\n image : str\n SVG data.\n size : tuple of int\n Image width, height.\n\n \"\"\"\n (svg,) = minidom.parseString(image).getElementsByTagName('svg')\n viewbox = svg.getAttribute('viewBox').split(' ')\n\n if size is not None:\n width, height = size\n else:\n width, height = viewbox[2:]\n\n svg.setAttribute('width', '%dpx' % width)\n svg.setAttribute('height', '%dpx' % height)\n return svg.toxml()\n\n\n @skip_doctest\n @line_magic\n def octave_push(self, line):\n '''\n Line-level magic that pushes a variable to Octave.\n\n `line` should be made up of whitespace separated variable names in the\n IPython namespace::\n\n In [7]: import numpy as np\n\n In [8]: X = np.arange(5)\n\n In [9]: X.mean()\n Out[9]: 2.0\n\n In [10]: %octave_push X\n\n In [11]: %octave mean(X)\n Out[11]: 2.0\n\n '''\n inputs = line.split(' ')\n for input in inputs:\n input = unicode_to_str(input)\n self._oct.put(input, self.shell.user_ns[input])\n\n\n @skip_doctest\n @line_magic\n def octave_pull(self, line):\n '''\n Line-level magic that pulls a variable from Octave.\n\n In [18]: _ = %octave x = [1 2; 3 4]; y = 'hello'\n\n In [19]: %octave_pull x y\n\n In [20]: x\n Out[20]:\n array([[ 1., 2.],\n [ 3., 4.]])\n\n In [21]: y\n Out[21]: 'hello'\n\n '''\n outputs = line.split(' ')\n for output in outputs:\n output = unicode_to_str(output)\n self.shell.push({output: self._oct.get(output)})\n\n\n @skip_doctest\n @magic_arguments()\n @argument(\n '-i', '--input', action='append',\n help='Names of input variables to be pushed to Octave. Multiple names '\n 'can be passed, separated by commas with no whitespace.'\n )\n @argument(\n '-o', '--output', action='append',\n help='Names of variables to be pulled from Octave after executing cell '\n 'body. Multiple names can be passed, separated by commas with no '\n 'whitespace.'\n )\n @argument(\n '-s', '--size', action='store',\n help='Pixel size of plots, \"width,height\". Default is \"-s 400,250\".'\n )\n @argument(\n '-f', '--format', action='store',\n help='Plot format (png, svg or jpg).'\n )\n\n @argument(\n 'code',\n nargs='*',\n )\n @line_cell_magic\n def octave(self, line, cell=None):\n '''\n Execute code in Octave, and pull some of the results back into the\n Python namespace.\n\n In [9]: %octave X = [1 2; 3 4]; mean(X)\n Out[9]: array([[ 2., 3.]])\n\n As a cell, this will run a block of Octave code, without returning any\n value::\n\n In [10]: %%octave\n ....: p = [-2, -1, 0, 1, 2]\n ....: polyout(p, 'x')\n\n -2*x^4 - 1*x^3 + 0*x^2 + 1*x^1 + 2\n\n In the notebook, plots are published as the output of the cell, e.g.\n\n %octave plot([1 2 3], [4 5 6])\n\n will create a line plot.\n\n Objects can be passed back and forth between Octave and IPython via the\n -i and -o flags in line::\n\n In [14]: Z = np.array([1, 4, 5, 10])\n\n In [15]: %octave -i Z mean(Z)\n Out[15]: array([ 5.])\n\n\n In [16]: %octave -o W W = Z * mean(Z)\n Out[16]: array([ 5., 20., 25., 50.])\n\n In [17]: W\n Out[17]: array([ 5., 20., 25., 50.])\n\n The size and format of output plots can be specified::\n\n In [18]: %%octave -s 600,800 -f svg\n ...: plot([1, 2, 3]);\n\n '''\n args = parse_argstring(self.octave, line)\n\n # arguments 'code' in line are prepended to the cell lines\n if cell is None:\n code = ''\n return_output = True\n line_mode = True\n else:\n code = cell\n return_output = False\n line_mode = False\n\n code = ' '.join(args.code) + code\n\n if args.input:\n for input in ','.join(args.input).split(','):\n input = unicode_to_str(input)\n self._oct.put(input, self.shell.user_ns[input])\n\n # generate plots in a temporary directory\n plot_dir = tempfile.mkdtemp()\n if args.size is not None:\n size = args.size\n else:\n size = '400,240'\n\n if args.format is not None:\n plot_format = args.format\n else:\n plot_format = 'png'\n\n pre_call = '''\n global __ipy_figures = [];\n page_screen_output(0);\n\n function fig_create(src, event)\n global __ipy_figures;\n __ipy_figures(size(__ipy_figures) + 1) = src;\n set(src, \"visible\", \"off\");\n end\n\n set(0, 'DefaultFigureCreateFcn', @fig_create);\n\n close all;\n clear ans;\n\n # ___<end_pre_call>___ #\n '''\n\n post_call = '''\n # ___<start_post_call>___ #\n\n # Save output of the last execution\n if exist(\"ans\") == 1\n _ = ans;\n else\n _ = nan;\n end\n\n for f = __ipy_figures\n outfile = sprintf('%(plot_dir)s/__ipy_oct_fig_%%03d.png', f);\n try\n print(f, outfile, '-d%(plot_format)s', '-tight', '-S%(size)s');\n end\n end\n\n ''' % locals()\n\n code = ' '.join((pre_call, code, post_call))\n try:\n text_output = self._oct.run(code, verbose=False)\n except (oct2py.Oct2PyError) as exception:\n msg = exception.message\n msg = msg.split('# ___<end_pre_call>___ #')[1]\n msg = msg.split('# ___<start_post_call>___ #')[0]\n raise OctaveMagicError('Octave could not complete execution. '\n 'Traceback (currently broken in oct2py): %s'\n % msg)\n\n key = 'OctaveMagic.Octave'\n display_data = []\n\n # Publish text output\n if text_output:\n display_data.append((key, {'text/plain': text_output}))\n\n # Publish images\n images = [open(imgfile, 'rb').read() for imgfile in \\\n glob(\"%s/*\" % plot_dir)]\n rmtree(plot_dir)\n\n plot_mime_type = _mimetypes.get(plot_format, 'image/png')\n width, height = [int(s) for s in size.split(',')]\n for image in images:\n if plot_format == 'svg':\n image = self._fix_gnuplot_svg_size(image, size=(width, height))\n display_data.append((key, {plot_mime_type: image}))\n\n if args.output:\n for output in ','.join(args.output).split(','):\n output = unicode_to_str(output)\n self.shell.push({output: self._oct.get(output)})\n\n for source, data in display_data:\n self._publish_display_data(source, data)\n\n if return_output:\n ans = self._oct.get('_')\n\n # Unfortunately, Octave doesn't have a \"None\" object,\n # so we can't return any NaN outputs\n if np.isscalar(ans) and np.isnan(ans):\n ans = None\n\n return ans\n\n\n__doc__ = __doc__.format(\n OCTAVE_DOC = ' '*8 + OctaveMagics.octave.__doc__,\n OCTAVE_PUSH_DOC = ' '*8 + OctaveMagics.octave_push.__doc__,\n OCTAVE_PULL_DOC = ' '*8 + OctaveMagics.octave_pull.__doc__\n )\n\n\n_loaded = False\ndef load_ipython_extension(ip):\n \"\"\"Load the extension in IPython.\"\"\"\n global _loaded\n if not _loaded:\n ip.register_magics(OctaveMagics)\n _loaded = True\n", "path": "IPython/extensions/octavemagic.py" } ]
diff --git a/IPython/extensions/octavemagic.py b/IPython/extensions/octavemagic.py index f46e6e935d1..a0114bf6f9e 100644 --- a/IPython/extensions/octavemagic.py +++ b/IPython/extensions/octavemagic.py @@ -8,8 +8,8 @@ .. note:: - The ``oct2py`` module needs to be installed separately, and in turn depends - on ``h5py``. Both can be obtained using ``easy_install`` or ``pip``. + The ``oct2py`` module needs to be installed separately and + can be obtained using ``easy_install`` or ``pip``. Usage ===== diff --git a/docs/examples/notebooks/octavemagic_extension.ipynb b/docs/examples/notebooks/octavemagic_extension.ipynb index ece25c47a97..9779833dc66 100644 --- a/docs/examples/notebooks/octavemagic_extension.ipynb +++ b/docs/examples/notebooks/octavemagic_extension.ipynb @@ -27,7 +27,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The `octavemagic` extension provides the ability to interact with Octave. It depends on the `oct2py` and `h5py` packages,\n", + "The `octavemagic` extension provides the ability to interact with Octave. It depends on the `oct2py` package,\n", "which may be installed using `easy_install`.\n", "\n", "To enable the extension, load it as follows:" diff --git a/docs/source/whatsnew/version0.13.txt b/docs/source/whatsnew/version0.13.txt index 896d6219360..32aa1bc2d7c 100644 --- a/docs/source/whatsnew/version0.13.txt +++ b/docs/source/whatsnew/version0.13.txt @@ -304,7 +304,7 @@ extremely useful. The following extensions are provided: or whole blocks of Octave code, capture both output and figures inline (just like matplotlib plots), and have variables automatically converted between the two languages. To use this extension, you must have Octave - installed as well as the oct2py_ and h5py_ packages. The examples + installed as well as the oct2py_ package. The examples directory in the source distribution ships with a full notebook demonstrating these capabilities: @@ -316,7 +316,6 @@ extremely useful. The following extensions are provided: .. _octave: http://www.gnu.org/software/octave .. _oct2py: http://pypi.python.org/pypi/oct2py -.. _h5py: http://code.google.com/p/h5py **R magics** (extension :ref:`rmagic <extensions_rmagic>`) This extension provides several magics that support calling code written in
open-telemetry__opentelemetry-python-3442
Allow use of "/" in Metrics Instrument Names, and with a 255 char limit As per the recent specs change : * Increase max instrument name length from 63 to 255: https://github.com/open-telemetry/opentelemetry-specification/pull/3648 * Instrument names can have "/" character: https://github.com/open-telemetry/opentelemetry-specification/pull/3684 Then update spec compliance matrix https://github.com/open-telemetry/opentelemetry-specification/blob/main/spec-compliance-matrix.md
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# pylint: disable=too-many-ancestors\n\n\nfrom abc import ABC, abstractmethod\nfrom dataclasses import dataclass\nfrom logging import getLogger\nfrom re import compile as re_compile\nfrom typing import (\n Callable,\n Dict,\n Generator,\n Generic,\n Iterable,\n Optional,\n Sequence,\n TypeVar,\n Union,\n)\n\n# pylint: disable=unused-import; needed for typing and sphinx\nfrom opentelemetry import metrics\nfrom opentelemetry.metrics._internal.observation import Observation\nfrom opentelemetry.util.types import Attributes\n\n_logger = getLogger(__name__)\n\n_name_regex = re_compile(r\"[a-zA-Z][-_.a-zA-Z0-9]{0,62}\")\n_unit_regex = re_compile(r\"[\\x00-\\x7F]{0,63}\")\n\n\n@dataclass(frozen=True)\nclass CallbackOptions:\n \"\"\"Options for the callback\n\n Args:\n timeout_millis: Timeout for the callback's execution. If the callback does asynchronous\n work (e.g. HTTP requests), it should respect this timeout.\n \"\"\"\n\n timeout_millis: float = 10_000\n\n\nInstrumentT = TypeVar(\"InstrumentT\", bound=\"Instrument\")\nCallbackT = Union[\n Callable[[CallbackOptions], Iterable[Observation]],\n Generator[Iterable[Observation], CallbackOptions, None],\n]\n\n\nclass Instrument(ABC):\n \"\"\"Abstract class that serves as base for all instruments.\"\"\"\n\n @abstractmethod\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n pass\n\n @staticmethod\n def _check_name_unit_description(\n name: str, unit: str, description: str\n ) -> Dict[str, Optional[str]]:\n \"\"\"\n Checks the following instrument name, unit and description for\n compliance with the spec.\n\n Returns a dict with keys \"name\", \"unit\" and \"description\", the\n corresponding values will be the checked strings or `None` if the value\n is invalid. If valid, the checked strings should be used instead of the\n original values.\n \"\"\"\n\n result: Dict[str, Optional[str]] = {}\n\n if _name_regex.fullmatch(name) is not None:\n result[\"name\"] = name\n else:\n result[\"name\"] = None\n\n if unit is None:\n unit = \"\"\n if _unit_regex.fullmatch(unit) is not None:\n result[\"unit\"] = unit\n else:\n result[\"unit\"] = None\n\n if description is None:\n result[\"description\"] = \"\"\n else:\n result[\"description\"] = description\n\n return result\n\n\nclass _ProxyInstrument(ABC, Generic[InstrumentT]):\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n self._name = name\n self._unit = unit\n self._description = description\n self._real_instrument: Optional[InstrumentT] = None\n\n def on_meter_set(self, meter: \"metrics.Meter\") -> None:\n \"\"\"Called when a real meter is set on the creating _ProxyMeter\"\"\"\n\n # We don't need any locking on proxy instruments because it's OK if some\n # measurements get dropped while a real backing instrument is being\n # created.\n self._real_instrument = self._create_real_instrument(meter)\n\n @abstractmethod\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> InstrumentT:\n \"\"\"Create an instance of the real instrument. Implement this.\"\"\"\n\n\nclass _ProxyAsynchronousInstrument(_ProxyInstrument[InstrumentT]):\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit, description)\n self._callbacks = callbacks\n\n\nclass Synchronous(Instrument):\n \"\"\"Base class for all synchronous instruments\"\"\"\n\n\nclass Asynchronous(Instrument):\n \"\"\"Base class for all asynchronous instruments\"\"\"\n\n @abstractmethod\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n\nclass Counter(Synchronous):\n \"\"\"A Counter is a synchronous `Instrument` which supports non-negative increments.\"\"\"\n\n @abstractmethod\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n pass\n\n\nclass NoOpCounter(Counter):\n \"\"\"No-op implementation of `Counter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n return super().add(amount, attributes=attributes)\n\n\nclass _ProxyCounter(_ProxyInstrument[Counter], Counter):\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n if self._real_instrument:\n self._real_instrument.add(amount, attributes)\n\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> Counter:\n return meter.create_counter(self._name, self._unit, self._description)\n\n\nclass UpDownCounter(Synchronous):\n \"\"\"An UpDownCounter is a synchronous `Instrument` which supports increments and decrements.\"\"\"\n\n @abstractmethod\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n pass\n\n\nclass NoOpUpDownCounter(UpDownCounter):\n \"\"\"No-op implementation of `UpDownCounter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n return super().add(amount, attributes=attributes)\n\n\nclass _ProxyUpDownCounter(_ProxyInstrument[UpDownCounter], UpDownCounter):\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n if self._real_instrument:\n self._real_instrument.add(amount, attributes)\n\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> UpDownCounter:\n return meter.create_up_down_counter(\n self._name, self._unit, self._description\n )\n\n\nclass ObservableCounter(Asynchronous):\n \"\"\"An ObservableCounter is an asynchronous `Instrument` which reports monotonically\n increasing value(s) when the instrument is being observed.\n \"\"\"\n\n\nclass NoOpObservableCounter(ObservableCounter):\n \"\"\"No-op implementation of `ObservableCounter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, callbacks, unit=unit, description=description)\n\n\nclass _ProxyObservableCounter(\n _ProxyAsynchronousInstrument[ObservableCounter], ObservableCounter\n):\n def _create_real_instrument(\n self, meter: \"metrics.Meter\"\n ) -> ObservableCounter:\n return meter.create_observable_counter(\n self._name, self._callbacks, self._unit, self._description\n )\n\n\nclass ObservableUpDownCounter(Asynchronous):\n \"\"\"An ObservableUpDownCounter is an asynchronous `Instrument` which reports additive value(s) (e.g.\n the process heap size - it makes sense to report the heap size from multiple processes and sum them\n up, so we get the total heap usage) when the instrument is being observed.\n \"\"\"\n\n\nclass NoOpObservableUpDownCounter(ObservableUpDownCounter):\n \"\"\"No-op implementation of `ObservableUpDownCounter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, callbacks, unit=unit, description=description)\n\n\nclass _ProxyObservableUpDownCounter(\n _ProxyAsynchronousInstrument[ObservableUpDownCounter],\n ObservableUpDownCounter,\n):\n def _create_real_instrument(\n self, meter: \"metrics.Meter\"\n ) -> ObservableUpDownCounter:\n return meter.create_observable_up_down_counter(\n self._name, self._callbacks, self._unit, self._description\n )\n\n\nclass Histogram(Synchronous):\n \"\"\"Histogram is a synchronous `Instrument` which can be used to report arbitrary values\n that are likely to be statistically meaningful. It is intended for statistics such as\n histograms, summaries, and percentile.\n \"\"\"\n\n @abstractmethod\n def record(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n pass\n\n\nclass NoOpHistogram(Histogram):\n \"\"\"No-op implementation of `Histogram`.\"\"\"\n\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n def record(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n return super().record(amount, attributes=attributes)\n\n\nclass _ProxyHistogram(_ProxyInstrument[Histogram], Histogram):\n def record(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n if self._real_instrument:\n self._real_instrument.record(amount, attributes)\n\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> Histogram:\n return meter.create_histogram(\n self._name, self._unit, self._description\n )\n\n\nclass ObservableGauge(Asynchronous):\n \"\"\"Asynchronous Gauge is an asynchronous `Instrument` which reports non-additive value(s) (e.g.\n the room temperature - it makes no sense to report the temperature value from multiple rooms\n and sum them up) when the instrument is being observed.\n \"\"\"\n\n\nclass NoOpObservableGauge(ObservableGauge):\n \"\"\"No-op implementation of `ObservableGauge`.\"\"\"\n\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, callbacks, unit=unit, description=description)\n\n\nclass _ProxyObservableGauge(\n _ProxyAsynchronousInstrument[ObservableGauge],\n ObservableGauge,\n):\n def _create_real_instrument(\n self, meter: \"metrics.Meter\"\n ) -> ObservableGauge:\n return meter.create_observable_gauge(\n self._name, self._callbacks, self._unit, self._description\n )\n", "path": "opentelemetry-api/src/opentelemetry/metrics/_internal/instrument.py" } ]
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# pylint: disable=too-many-ancestors\n\n\nfrom abc import ABC, abstractmethod\nfrom dataclasses import dataclass\nfrom logging import getLogger\nfrom re import compile as re_compile\nfrom typing import (\n Callable,\n Dict,\n Generator,\n Generic,\n Iterable,\n Optional,\n Sequence,\n TypeVar,\n Union,\n)\n\n# pylint: disable=unused-import; needed for typing and sphinx\nfrom opentelemetry import metrics\nfrom opentelemetry.metrics._internal.observation import Observation\nfrom opentelemetry.util.types import Attributes\n\n_logger = getLogger(__name__)\n\n_name_regex = re_compile(r\"[a-zA-Z][-_./a-zA-Z0-9]{0,254}\")\n_unit_regex = re_compile(r\"[\\x00-\\x7F]{0,63}\")\n\n\n@dataclass(frozen=True)\nclass CallbackOptions:\n \"\"\"Options for the callback\n\n Args:\n timeout_millis: Timeout for the callback's execution. If the callback does asynchronous\n work (e.g. HTTP requests), it should respect this timeout.\n \"\"\"\n\n timeout_millis: float = 10_000\n\n\nInstrumentT = TypeVar(\"InstrumentT\", bound=\"Instrument\")\nCallbackT = Union[\n Callable[[CallbackOptions], Iterable[Observation]],\n Generator[Iterable[Observation], CallbackOptions, None],\n]\n\n\nclass Instrument(ABC):\n \"\"\"Abstract class that serves as base for all instruments.\"\"\"\n\n @abstractmethod\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n pass\n\n @staticmethod\n def _check_name_unit_description(\n name: str, unit: str, description: str\n ) -> Dict[str, Optional[str]]:\n \"\"\"\n Checks the following instrument name, unit and description for\n compliance with the spec.\n\n Returns a dict with keys \"name\", \"unit\" and \"description\", the\n corresponding values will be the checked strings or `None` if the value\n is invalid. If valid, the checked strings should be used instead of the\n original values.\n \"\"\"\n\n result: Dict[str, Optional[str]] = {}\n\n if _name_regex.fullmatch(name) is not None:\n result[\"name\"] = name\n else:\n result[\"name\"] = None\n\n if unit is None:\n unit = \"\"\n if _unit_regex.fullmatch(unit) is not None:\n result[\"unit\"] = unit\n else:\n result[\"unit\"] = None\n\n if description is None:\n result[\"description\"] = \"\"\n else:\n result[\"description\"] = description\n\n return result\n\n\nclass _ProxyInstrument(ABC, Generic[InstrumentT]):\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n self._name = name\n self._unit = unit\n self._description = description\n self._real_instrument: Optional[InstrumentT] = None\n\n def on_meter_set(self, meter: \"metrics.Meter\") -> None:\n \"\"\"Called when a real meter is set on the creating _ProxyMeter\"\"\"\n\n # We don't need any locking on proxy instruments because it's OK if some\n # measurements get dropped while a real backing instrument is being\n # created.\n self._real_instrument = self._create_real_instrument(meter)\n\n @abstractmethod\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> InstrumentT:\n \"\"\"Create an instance of the real instrument. Implement this.\"\"\"\n\n\nclass _ProxyAsynchronousInstrument(_ProxyInstrument[InstrumentT]):\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit, description)\n self._callbacks = callbacks\n\n\nclass Synchronous(Instrument):\n \"\"\"Base class for all synchronous instruments\"\"\"\n\n\nclass Asynchronous(Instrument):\n \"\"\"Base class for all asynchronous instruments\"\"\"\n\n @abstractmethod\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n\nclass Counter(Synchronous):\n \"\"\"A Counter is a synchronous `Instrument` which supports non-negative increments.\"\"\"\n\n @abstractmethod\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n pass\n\n\nclass NoOpCounter(Counter):\n \"\"\"No-op implementation of `Counter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n return super().add(amount, attributes=attributes)\n\n\nclass _ProxyCounter(_ProxyInstrument[Counter], Counter):\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n if self._real_instrument:\n self._real_instrument.add(amount, attributes)\n\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> Counter:\n return meter.create_counter(self._name, self._unit, self._description)\n\n\nclass UpDownCounter(Synchronous):\n \"\"\"An UpDownCounter is a synchronous `Instrument` which supports increments and decrements.\"\"\"\n\n @abstractmethod\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n pass\n\n\nclass NoOpUpDownCounter(UpDownCounter):\n \"\"\"No-op implementation of `UpDownCounter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n return super().add(amount, attributes=attributes)\n\n\nclass _ProxyUpDownCounter(_ProxyInstrument[UpDownCounter], UpDownCounter):\n def add(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n if self._real_instrument:\n self._real_instrument.add(amount, attributes)\n\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> UpDownCounter:\n return meter.create_up_down_counter(\n self._name, self._unit, self._description\n )\n\n\nclass ObservableCounter(Asynchronous):\n \"\"\"An ObservableCounter is an asynchronous `Instrument` which reports monotonically\n increasing value(s) when the instrument is being observed.\n \"\"\"\n\n\nclass NoOpObservableCounter(ObservableCounter):\n \"\"\"No-op implementation of `ObservableCounter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, callbacks, unit=unit, description=description)\n\n\nclass _ProxyObservableCounter(\n _ProxyAsynchronousInstrument[ObservableCounter], ObservableCounter\n):\n def _create_real_instrument(\n self, meter: \"metrics.Meter\"\n ) -> ObservableCounter:\n return meter.create_observable_counter(\n self._name, self._callbacks, self._unit, self._description\n )\n\n\nclass ObservableUpDownCounter(Asynchronous):\n \"\"\"An ObservableUpDownCounter is an asynchronous `Instrument` which reports additive value(s) (e.g.\n the process heap size - it makes sense to report the heap size from multiple processes and sum them\n up, so we get the total heap usage) when the instrument is being observed.\n \"\"\"\n\n\nclass NoOpObservableUpDownCounter(ObservableUpDownCounter):\n \"\"\"No-op implementation of `ObservableUpDownCounter`.\"\"\"\n\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, callbacks, unit=unit, description=description)\n\n\nclass _ProxyObservableUpDownCounter(\n _ProxyAsynchronousInstrument[ObservableUpDownCounter],\n ObservableUpDownCounter,\n):\n def _create_real_instrument(\n self, meter: \"metrics.Meter\"\n ) -> ObservableUpDownCounter:\n return meter.create_observable_up_down_counter(\n self._name, self._callbacks, self._unit, self._description\n )\n\n\nclass Histogram(Synchronous):\n \"\"\"Histogram is a synchronous `Instrument` which can be used to report arbitrary values\n that are likely to be statistically meaningful. It is intended for statistics such as\n histograms, summaries, and percentile.\n \"\"\"\n\n @abstractmethod\n def record(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n pass\n\n\nclass NoOpHistogram(Histogram):\n \"\"\"No-op implementation of `Histogram`.\"\"\"\n\n def __init__(\n self,\n name: str,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, unit=unit, description=description)\n\n def record(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n return super().record(amount, attributes=attributes)\n\n\nclass _ProxyHistogram(_ProxyInstrument[Histogram], Histogram):\n def record(\n self,\n amount: Union[int, float],\n attributes: Optional[Attributes] = None,\n ) -> None:\n if self._real_instrument:\n self._real_instrument.record(amount, attributes)\n\n def _create_real_instrument(self, meter: \"metrics.Meter\") -> Histogram:\n return meter.create_histogram(\n self._name, self._unit, self._description\n )\n\n\nclass ObservableGauge(Asynchronous):\n \"\"\"Asynchronous Gauge is an asynchronous `Instrument` which reports non-additive value(s) (e.g.\n the room temperature - it makes no sense to report the temperature value from multiple rooms\n and sum them up) when the instrument is being observed.\n \"\"\"\n\n\nclass NoOpObservableGauge(ObservableGauge):\n \"\"\"No-op implementation of `ObservableGauge`.\"\"\"\n\n def __init__(\n self,\n name: str,\n callbacks: Optional[Sequence[CallbackT]] = None,\n unit: str = \"\",\n description: str = \"\",\n ) -> None:\n super().__init__(name, callbacks, unit=unit, description=description)\n\n\nclass _ProxyObservableGauge(\n _ProxyAsynchronousInstrument[ObservableGauge],\n ObservableGauge,\n):\n def _create_real_instrument(\n self, meter: \"metrics.Meter\"\n ) -> ObservableGauge:\n return meter.create_observable_gauge(\n self._name, self._callbacks, self._unit, self._description\n )\n", "path": "opentelemetry-api/src/opentelemetry/metrics/_internal/instrument.py" } ]
diff --git a/CHANGELOG.md b/CHANGELOG.md index 50db7d0bd03..bf7db9c04ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ([#3423](https://github.com/open-telemetry/opentelemetry-python/pull/3423)) - Make `opentelemetry_metrics_exporter` entrypoint support pull exporters ([#3428](https://github.com/open-telemetry/opentelemetry-python/pull/3428)) +- Allow instrument names to have '/' and up to 255 characters + ([#3442](https://github.com/open-telemetry/opentelemetry-python/pull/3442)) ## Version 1.20.0/0.41b0 (2023-09-04) diff --git a/opentelemetry-api/src/opentelemetry/metrics/_internal/instrument.py b/opentelemetry-api/src/opentelemetry/metrics/_internal/instrument.py index fec2879ef6c..54b2fb7597e 100644 --- a/opentelemetry-api/src/opentelemetry/metrics/_internal/instrument.py +++ b/opentelemetry-api/src/opentelemetry/metrics/_internal/instrument.py @@ -38,7 +38,7 @@ _logger = getLogger(__name__) -_name_regex = re_compile(r"[a-zA-Z][-_.a-zA-Z0-9]{0,62}") +_name_regex = re_compile(r"[a-zA-Z][-_./a-zA-Z0-9]{0,254}") _unit_regex = re_compile(r"[\x00-\x7F]{0,63}") diff --git a/opentelemetry-api/tests/metrics/test_instruments.py b/opentelemetry-api/tests/metrics/test_instruments.py index 4a3d3d448b3..e66460de354 100644 --- a/opentelemetry-api/tests/metrics/test_instruments.py +++ b/opentelemetry-api/tests/metrics/test_instruments.py @@ -564,14 +564,13 @@ def test_observable_up_down_counter_callback(self): ) def test_name_check(self): - instrument = ChildInstrument("name") self.assertEqual( instrument._check_name_unit_description( - "a" * 63, "unit", "description" + "a" * 255, "unit", "description" )["name"], - "a" * 63, + "a" * 255, ) self.assertEqual( instrument._check_name_unit_description( @@ -591,12 +590,24 @@ def test_name_check(self): )["name"], "a_", ) + self.assertEqual( + instrument._check_name_unit_description( + "a/", "unit", "description" + )["name"], + "a/", + ) - self.assertIsNone( + # the old max length + self.assertIsNotNone( instrument._check_name_unit_description( "a" * 64, "unit", "description" )["name"] ) + self.assertIsNone( + instrument._check_name_unit_description( + "a" * 256, "unit", "description" + )["name"] + ) self.assertIsNone( instrument._check_name_unit_description( "Ñ", "unit", "description"
ivy-llc__ivy-18982
imag
[ { "content": "# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import (\n to_ivy_arrays_and_back,\n)\nfrom ivy.func_wrapper import with_unsupported_dtypes\nfrom ivy.functional.frontends.jax.numpy import promote_types_of_jax_inputs\nfrom ivy.functional.frontends.numpy.manipulation_routines import trim_zeros\n\n\n# sign\n@to_ivy_arrays_and_back\ndef sign(x, /):\n return ivy.sign(x, out=None)\n\n\n@to_ivy_arrays_and_back\ndef absolute(x, /):\n return ivy.abs(x)\n\n\nabs = absolute\n\n\n@to_ivy_arrays_and_back\ndef add(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.add(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef angle(z, deg=False):\n return ivy.angle(z, deg=deg)\n\n\n@to_ivy_arrays_and_back\ndef diff(a, n=1, axis=-1, prepend=None, append=None):\n return ivy.diff(a, n=n, axis=axis, prepend=prepend, append=append, out=None)\n\n\n@to_ivy_arrays_and_back\ndef ediff1d(ary, to_end=None, to_begin=None):\n diffs = ivy.diff(ary)\n diffs_dtype = diffs.dtype\n if to_begin is not None:\n if not isinstance(to_begin, (list, tuple)):\n to_begin = [to_begin]\n to_begin = ivy.array(to_begin, dtype=diffs_dtype)\n diffs = ivy.concat((to_begin, diffs))\n if to_end is not None:\n if not isinstance(to_end, (list, tuple)):\n to_end = [to_end]\n to_end = ivy.array(to_end, dtype=diffs_dtype)\n diffs = ivy.concat((diffs, to_end))\n return diffs\n\n\n@to_ivy_arrays_and_back\ndef arctan(x, /):\n return ivy.atan(x)\n\n\n@to_ivy_arrays_and_back\ndef arctan2(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.atan2(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef convolve(a, v, mode=\"full\", *, precision=None):\n a, v = promote_types_of_jax_inputs(a, v)\n if ivy.get_num_dims(a) != 1:\n raise ValueError(\"convolve() only support 1-dimensional inputs.\")\n if len(a) == 0 or len(v) == 0:\n raise ValueError(\n f\"convolve: inputs cannot be empty, got shapes {a.shape} and {v.shape}.\"\n )\n if len(a) < len(v):\n a, v = v, a\n v = ivy.flip(v)\n\n out_order = slice(None)\n\n if mode == \"valid\":\n padding = [(0, 0)]\n elif mode == \"same\":\n padding = [(v.shape[0] // 2, v.shape[0] - v.shape[0] // 2 - 1)]\n elif mode == \"full\":\n padding = [(v.shape[0] - 1, v.shape[0] - 1)]\n\n result = ivy.conv_general_dilated(\n a[None, None, :],\n v[:, None, None],\n (1,),\n padding,\n dims=1,\n data_format=\"channel_first\",\n )\n return result[0, 0, out_order]\n\n\n@to_ivy_arrays_and_back\ndef cos(x, /):\n return ivy.cos(x)\n\n\n@to_ivy_arrays_and_back\ndef cosh(x, /):\n return ivy.cosh(x)\n\n\n@to_ivy_arrays_and_back\ndef dot(a, b, *, precision=None):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.matmul(a, b)\n\n\n@to_ivy_arrays_and_back\ndef floor(x, /):\n return ivy.floor(x)\n\n\n@to_ivy_arrays_and_back\ndef mod(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.remainder(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef modf(x, /, out=None):\n y1 = ivy.where(x >= 0, ivy.floor(x), ivy.ceil(x)) # integral part\n y2 = x - y1 # fractional part\n dtype_str = str(x.dtype)\n if \"float\" in dtype_str:\n return y2, y1\n # floats return as they were. u/ints (8, 16, 32) return as float32, 64 as float64.\n dtype_size = x.itemsize * 8\n if \"int8\" in dtype_str or \"int16\" in dtype_str:\n dtype_size = 32\n ret_type = \"float{}\".format(dtype_size)\n return y2.astype(ret_type), y1.astype(ret_type)\n\n\n@to_ivy_arrays_and_back\ndef divmod(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return tuple([ivy.floor_divide(x1, x2), ivy.remainder(x1, x2)])\n\n\n@to_ivy_arrays_and_back\ndef sinh(x, /):\n return ivy.sinh(x)\n\n\n@to_ivy_arrays_and_back\ndef sin(x, /):\n return ivy.sin(x)\n\n\n@to_ivy_arrays_and_back\ndef tan(x, /):\n return ivy.tan(x)\n\n\n@to_ivy_arrays_and_back\ndef tanh(x, /):\n return ivy.tanh(x)\n\n\n@to_ivy_arrays_and_back\ndef arccos(x, /):\n return ivy.acos(x)\n\n\n@to_ivy_arrays_and_back\ndef arccosh(x, /):\n return ivy.acosh(x)\n\n\n@to_ivy_arrays_and_back\ndef arcsin(x, /):\n return ivy.asin(x)\n\n\n@to_ivy_arrays_and_back\ndef arcsinh(x, /):\n return ivy.asinh(x)\n\n\n@to_ivy_arrays_and_back\ndef power(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.pow(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef trunc(x):\n return ivy.trunc(x)\n\n\n@to_ivy_arrays_and_back\ndef ceil(x, /):\n return ivy.ceil(x)\n\n\n@to_ivy_arrays_and_back\ndef float_power(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.float_power(x1, x2).astype(x1.dtype, copy=False)\n\n\n@to_ivy_arrays_and_back\ndef deg2rad(x, /):\n return ivy.deg2rad(x)\n\n\n@to_ivy_arrays_and_back\ndef radians(x, /):\n return ivy.deg2rad(x)\n\n\n@to_ivy_arrays_and_back\ndef exp2(x, /):\n return ivy.exp2(x)\n\n\n@to_ivy_arrays_and_back\ndef gcd(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.gcd(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef i0(x):\n return ivy.i0(x)\n\n\n@to_ivy_arrays_and_back\ndef kron(a, b):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.kron(a, b)\n\n\n@to_ivy_arrays_and_back\ndef lcm(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.lcm(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef logaddexp2(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.logaddexp2(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef trapz(y, x=None, dx=1.0, axis=-1, out=None):\n return ivy.trapz(y, x=x, dx=dx, axis=axis, out=out)\n\n\n@to_ivy_arrays_and_back\ndef sqrt(x, /):\n return ivy.sqrt(x)\n\n\n@to_ivy_arrays_and_back\ndef square(x, /):\n return ivy.square(x)\n\n\n@to_ivy_arrays_and_back\ndef arctanh(x, /):\n return ivy.atanh(x)\n\n\n@to_ivy_arrays_and_back\ndef multiply(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.multiply(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef matmul(a, b, *, precision=None):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.matmul(a, b)\n\n\n@to_ivy_arrays_and_back\ndef log10(x, /):\n return ivy.log10(x)\n\n\n@to_ivy_arrays_and_back\ndef logaddexp(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.logaddexp(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef degrees(x, /):\n return ivy.rad2deg(x)\n\n\n@to_ivy_arrays_and_back\ndef negative(\n x,\n /,\n):\n return ivy.negative(x)\n\n\n@to_ivy_arrays_and_back\ndef positive(\n x,\n /,\n):\n return ivy.positive(x)\n\n\n@to_ivy_arrays_and_back\ndef rad2deg(\n x,\n /,\n):\n return ivy.rad2deg(x)\n\n\n@to_ivy_arrays_and_back\ndef tensordot(a, b, axes=2):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.tensordot(a, b, axes=axes)\n\n\n@to_ivy_arrays_and_back\ndef divide(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n if ivy.dtype(x1) in [\"int64\", \"uint64\"]:\n x1 = ivy.astype(x1, ivy.float64)\n elif ivy.is_int_dtype(x1):\n x1 = ivy.astype(x1, ivy.float32)\n\n return ivy.divide(x1, x2).astype(x1.dtype)\n\n\ntrue_divide = divide\n\n\n@to_ivy_arrays_and_back\ndef exp(\n x,\n /,\n):\n return ivy.exp(x)\n\n\n@to_ivy_arrays_and_back\ndef expm1(\n x,\n /,\n):\n return ivy.expm1(x)\n\n\n@to_ivy_arrays_and_back\ndef fmax(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n ret = ivy.where(\n ivy.bitwise_or(ivy.greater(x1, x2), ivy.isnan(x2)),\n x1,\n x2,\n )\n return ret\n\n\n@to_ivy_arrays_and_back\ndef fmin(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n ret = ivy.where(\n ivy.bitwise_or(ivy.less(x1, x2), ivy.isnan(x2)),\n x1,\n x2,\n )\n print(\"jax-frontend\", ret)\n return ret\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"uint16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef fabs(x, /):\n return ivy.abs(x)\n\n\n@to_ivy_arrays_and_back\ndef fmod(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.fmod(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef maximum(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.maximum(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef minimum(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.minimum(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef heaviside(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.heaviside(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef log(x, /):\n return ivy.log(x)\n\n\n@to_ivy_arrays_and_back\ndef log1p(x, /):\n return ivy.log1p(x)\n\n\n@to_ivy_arrays_and_back\ndef copysign(x1, x2, /):\n return ivy.copysign(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef sinc(x, /):\n return ivy.sinc(x)\n\n\n@with_unsupported_dtypes(\n {\n \"0.4.13 and below\": (\n \"bfloat16\",\n \"float16\",\n )\n },\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef nextafter(x1, x2, /):\n return ivy.nextafter(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef remainder(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.remainder(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef trace(a, offset=0, axis1=0, axis2=1, out=None):\n return ivy.trace(a, offset=offset, axis1=axis1, axis2=axis2, out=out)\n\n\n@to_ivy_arrays_and_back\ndef log2(x, /):\n return ivy.log2(x)\n\n\n@to_ivy_arrays_and_back\ndef vdot(a, b):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.multiply(a, b).sum()\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"bfloat16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef cbrt(x, /):\n all_positive = ivy.pow(ivy.abs(x), 1.0 / 3.0)\n return ivy.where(ivy.less(x, 0.0), ivy.negative(all_positive), all_positive)\n\n\n@to_ivy_arrays_and_back\ndef nan_to_num(x, copy=True, nan=0.0, posinf=None, neginf=None):\n return ivy.nan_to_num(x, copy=copy, nan=nan, posinf=posinf, neginf=neginf)\n\n\n@to_ivy_arrays_and_back\ndef fix(x, out=None):\n return ivy.fix(x, out=out)\n\n\n@to_ivy_arrays_and_back\ndef real(val, /):\n return ivy.real(val)\n\n\n@to_ivy_arrays_and_back\ndef hypot(x1, x2, /):\n return ivy.hypot(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef floor_divide(x1, x2, /, out=None):\n return ivy.floor_divide(x1, x2, out=out)\n\n\n@to_ivy_arrays_and_back\ndef inner(a, b):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.inner(a, b)\n\n\n@to_ivy_arrays_and_back\ndef outer(a, b, out=None):\n return ivy.outer(a, b, out=out)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal(x, /):\n return ivy.reciprocal(x)\n\n\n@to_ivy_arrays_and_back\ndef conj(x, /):\n return ivy.conj(x)\n\n\n@to_ivy_arrays_and_back\ndef subtract(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.subtract(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef around(a, decimals=0, out=None):\n ret_dtype = a.dtype\n return ivy.round(a, decimals=decimals, out=out).astype(ret_dtype, copy=False)\n\n\n@to_ivy_arrays_and_back\ndef round(a, decimals=0, out=None):\n return ivy.round(a, decimals=decimals, out=out)\n\n\n@to_ivy_arrays_and_back\ndef frexp(x, /):\n return ivy.frexp(x)\n\n\n@to_ivy_arrays_and_back\ndef ldexp(x1, x2, /):\n return ivy.ldexp(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef poly(seq_of_zeros):\n seq_of_zeros = ivy.atleast_1d(seq_of_zeros)\n sh = seq_of_zeros.shape\n if len(sh) == 2 and sh[0] == sh[1] and sh[0] != 0:\n seq_of_zeros = ivy.eigvals(seq_of_zeros)\n if seq_of_zeros.ndim != 1:\n raise ValueError(\"input must be 1d or non-empty square 2d array.\")\n dt = seq_of_zeros.dtype\n if len(seq_of_zeros) == 0:\n return ivy.ones((), dtype=dt)\n a = ivy.ones((1,), dtype=dt)\n for k in range(len(seq_of_zeros)):\n a = convolve(\n a, ivy.asarray([ivy.array(1), -seq_of_zeros[k]], dtype=dt), mode=\"full\"\n )\n return a\n\n\n@to_ivy_arrays_and_back\ndef polyadd(a1, a2):\n d = max(a1.size, a2.size)\n a1 = ivy.pad(a1, (d - a1.size, 0), mode=\"constant\")\n a2 = ivy.pad(a2, (d - a2.size, 0), mode=\"constant\")\n return a1 + a2\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"float16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef polyder(p, m=1):\n if m < 0:\n raise ValueError(\"Order of derivative must be positive.\")\n\n if m == 0:\n return p\n p_dtype = p.dtype\n coeff = ivy.prod(\n ivy.expand_dims(ivy.arange(m, len(p), dtype=p_dtype))\n - ivy.expand_dims(ivy.arange(m, dtype=p_dtype), axis=1),\n axis=0,\n )\n return (p[:-m] * coeff[::-1]).astype(p_dtype)\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"float16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef polyint(p, m=1, k=None):\n p = ivy.asarray(p)\n m = int(m)\n if m == 0:\n return p\n if k is None:\n k_arr = ivy.zeros((m,), dtype=p.dtype)\n elif isinstance(k, (int, float)):\n k_arr = ivy.full((m,), k, dtype=p.dtype)\n elif ivy.asarray(k).shape == (1,):\n k_arr = ivy.full((m,), ivy.asarray(k)[0], dtype=p.dtype)\n elif ivy.asarray(k).shape == (m,):\n k_arr = ivy.asarray(k, dtype=p.dtype)\n else:\n raise ValueError(\"k must be a scalar or a rank-1 array of length 1 or m.\")\n grid = (\n ivy.arange(p.size + m, dtype=p.dtype)[ivy.newaxis]\n - ivy.arange(m, dtype=p.dtype)[:, ivy.newaxis]\n )\n coeff = ivy.maximum(1, grid).prod(axis=0)[::-1]\n return ivy.divide(ivy.concat((p, k_arr)), coeff).astype(p.dtype)\n\n\n@with_unsupported_dtypes(\n {\"0.3.14 and below\": (\"float16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef polydiv(u, v, *, trim_leading_zeros=False):\n u, v_arr = ivy.promote_types_of_inputs(u, v)\n n = v_arr.shape[0] - 1\n m = u.shape[0] - 1\n scale = 1.0 / v_arr[0]\n q = ivy.zeros((max(m - n + 1, 1),), dtype=u.dtype)\n r = ivy.copy_array(u)\n for k in range(0, m - n + 1):\n d = scale * r[k]\n q[k] = d\n r[k : k + n + 1] = r[k : k + n + 1] - (d * v_arr)\n # if trim_leading_zeros:\n # r = trim_zeros_tol(r, trim='f')\n # TODO: need to control tolerance of this function to handle the argument\n return q, r\n\n\n@to_ivy_arrays_and_back\ndef polysub(a1, a2):\n n = max(a1.size, a2.size) - 1\n a1 = ivy.pad(a1, (0, n - a1.size + 1), mode=\"constant\")\n a2 = ivy.pad(a2, (0, n - a2.size + 1), mode=\"constant\")\n return a1 - a2\n\n\n@to_ivy_arrays_and_back\ndef polymul(a1, a2, *, trim_leading_zeros=False):\n a1, a2 = ivy.atleast_1d(a1), ivy.atleast_1d(a2)\n if trim_leading_zeros and (len(a1) > 1 or len(a2) > 1):\n a1, a2 = trim_zeros(a1, trim=\"f\"), trim_zeros(a2, trim=\"f\")\n if len(a1) == 0:\n a1 = ivy.asarray([0], dtype=a1.dtype)\n if len(a2) == 0:\n a2 = ivy.asarray([0], dtype=a2.dtype)\n return convolve(a1, a2, mode=\"full\")\n\n\n@to_ivy_arrays_and_back\ndef signbit(x, /):\n x = ivy.array(x)\n return ivy.signbit(x)\n\n\n@to_ivy_arrays_and_back\ndef product(\n a,\n *,\n axis=None,\n dtype=None,\n keepdims=False,\n initial=None,\n where=None,\n promote_integers=True,\n out=None,\n):\n if ivy.is_array(where):\n a = ivy.where(where, a, ivy.default(out, ivy.ones_like(a)), out=out)\n if promote_integers:\n if dtype is None:\n dtype = a.dtype\n if initial is not None:\n if axis is not None:\n s = ivy.to_list(ivy.shape(a, as_array=True))\n s[axis] = 1\n header = ivy.full(ivy.Shape(tuple(s)), initial)\n a = ivy.concat([header, a], axis=axis)\n else:\n a[0] *= initial\n return ivy.prod(a, axis=axis, dtype=dtype, keepdims=keepdims, out=out)\n\n\n@to_ivy_arrays_and_back\ndef conjugate(x, /):\n return ivy.conj(x)\n", "path": "ivy/functional/frontends/jax/numpy/mathematical_functions.py" } ]
[ { "content": "# local\nimport ivy\nfrom ivy.functional.frontends.jax.func_wrapper import (\n to_ivy_arrays_and_back,\n)\nfrom ivy.func_wrapper import with_unsupported_dtypes\nfrom ivy.functional.frontends.jax.numpy import promote_types_of_jax_inputs\nfrom ivy.functional.frontends.numpy.manipulation_routines import trim_zeros\n\n\n# sign\n@to_ivy_arrays_and_back\ndef sign(x, /):\n return ivy.sign(x, out=None)\n\n\n@to_ivy_arrays_and_back\ndef absolute(x, /):\n return ivy.abs(x)\n\n\nabs = absolute\n\n\n@to_ivy_arrays_and_back\ndef add(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.add(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef imag(val, /):\n return ivy.imag(val)\n\n\n@to_ivy_arrays_and_back\ndef angle(z, deg=False):\n return ivy.angle(z, deg=deg)\n\n\n@to_ivy_arrays_and_back\ndef diff(a, n=1, axis=-1, prepend=None, append=None):\n return ivy.diff(a, n=n, axis=axis, prepend=prepend, append=append, out=None)\n\n\n@to_ivy_arrays_and_back\ndef ediff1d(ary, to_end=None, to_begin=None):\n diffs = ivy.diff(ary)\n diffs_dtype = diffs.dtype\n if to_begin is not None:\n if not isinstance(to_begin, (list, tuple)):\n to_begin = [to_begin]\n to_begin = ivy.array(to_begin, dtype=diffs_dtype)\n diffs = ivy.concat((to_begin, diffs))\n if to_end is not None:\n if not isinstance(to_end, (list, tuple)):\n to_end = [to_end]\n to_end = ivy.array(to_end, dtype=diffs_dtype)\n diffs = ivy.concat((diffs, to_end))\n return diffs\n\n\n@to_ivy_arrays_and_back\ndef arctan(x, /):\n return ivy.atan(x)\n\n\n@to_ivy_arrays_and_back\ndef arctan2(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.atan2(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef convolve(a, v, mode=\"full\", *, precision=None):\n a, v = promote_types_of_jax_inputs(a, v)\n if ivy.get_num_dims(a) != 1:\n raise ValueError(\"convolve() only support 1-dimensional inputs.\")\n if len(a) == 0 or len(v) == 0:\n raise ValueError(\n f\"convolve: inputs cannot be empty, got shapes {a.shape} and {v.shape}.\"\n )\n if len(a) < len(v):\n a, v = v, a\n v = ivy.flip(v)\n\n out_order = slice(None)\n\n if mode == \"valid\":\n padding = [(0, 0)]\n elif mode == \"same\":\n padding = [(v.shape[0] // 2, v.shape[0] - v.shape[0] // 2 - 1)]\n elif mode == \"full\":\n padding = [(v.shape[0] - 1, v.shape[0] - 1)]\n\n result = ivy.conv_general_dilated(\n a[None, None, :],\n v[:, None, None],\n (1,),\n padding,\n dims=1,\n data_format=\"channel_first\",\n )\n return result[0, 0, out_order]\n\n\n@to_ivy_arrays_and_back\ndef cos(x, /):\n return ivy.cos(x)\n\n\n@to_ivy_arrays_and_back\ndef cosh(x, /):\n return ivy.cosh(x)\n\n\n@to_ivy_arrays_and_back\ndef dot(a, b, *, precision=None):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.matmul(a, b)\n\n\n@to_ivy_arrays_and_back\ndef floor(x, /):\n return ivy.floor(x)\n\n\n@to_ivy_arrays_and_back\ndef mod(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.remainder(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef modf(x, /, out=None):\n y1 = ivy.where(x >= 0, ivy.floor(x), ivy.ceil(x)) # integral part\n y2 = x - y1 # fractional part\n dtype_str = str(x.dtype)\n if \"float\" in dtype_str:\n return y2, y1\n # floats return as they were. u/ints (8, 16, 32) return as float32, 64 as float64.\n dtype_size = x.itemsize * 8\n if \"int8\" in dtype_str or \"int16\" in dtype_str:\n dtype_size = 32\n ret_type = \"float{}\".format(dtype_size)\n return y2.astype(ret_type), y1.astype(ret_type)\n\n\n@to_ivy_arrays_and_back\ndef divmod(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return tuple([ivy.floor_divide(x1, x2), ivy.remainder(x1, x2)])\n\n\n@to_ivy_arrays_and_back\ndef sinh(x, /):\n return ivy.sinh(x)\n\n\n@to_ivy_arrays_and_back\ndef sin(x, /):\n return ivy.sin(x)\n\n\n@to_ivy_arrays_and_back\ndef tan(x, /):\n return ivy.tan(x)\n\n\n@to_ivy_arrays_and_back\ndef tanh(x, /):\n return ivy.tanh(x)\n\n\n@to_ivy_arrays_and_back\ndef arccos(x, /):\n return ivy.acos(x)\n\n\n@to_ivy_arrays_and_back\ndef arccosh(x, /):\n return ivy.acosh(x)\n\n\n@to_ivy_arrays_and_back\ndef arcsin(x, /):\n return ivy.asin(x)\n\n\n@to_ivy_arrays_and_back\ndef arcsinh(x, /):\n return ivy.asinh(x)\n\n\n@to_ivy_arrays_and_back\ndef power(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.pow(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef trunc(x):\n return ivy.trunc(x)\n\n\n@to_ivy_arrays_and_back\ndef ceil(x, /):\n return ivy.ceil(x)\n\n\n@to_ivy_arrays_and_back\ndef float_power(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.float_power(x1, x2).astype(x1.dtype, copy=False)\n\n\n@to_ivy_arrays_and_back\ndef deg2rad(x, /):\n return ivy.deg2rad(x)\n\n\n@to_ivy_arrays_and_back\ndef radians(x, /):\n return ivy.deg2rad(x)\n\n\n@to_ivy_arrays_and_back\ndef exp2(x, /):\n return ivy.exp2(x)\n\n\n@to_ivy_arrays_and_back\ndef gcd(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.gcd(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef i0(x):\n return ivy.i0(x)\n\n\n@to_ivy_arrays_and_back\ndef kron(a, b):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.kron(a, b)\n\n\n@to_ivy_arrays_and_back\ndef lcm(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.lcm(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef logaddexp2(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.logaddexp2(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef trapz(y, x=None, dx=1.0, axis=-1, out=None):\n return ivy.trapz(y, x=x, dx=dx, axis=axis, out=out)\n\n\n@to_ivy_arrays_and_back\ndef sqrt(x, /):\n return ivy.sqrt(x)\n\n\n@to_ivy_arrays_and_back\ndef square(x, /):\n return ivy.square(x)\n\n\n@to_ivy_arrays_and_back\ndef arctanh(x, /):\n return ivy.atanh(x)\n\n\n@to_ivy_arrays_and_back\ndef multiply(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.multiply(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef matmul(a, b, *, precision=None):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.matmul(a, b)\n\n\n@to_ivy_arrays_and_back\ndef log10(x, /):\n return ivy.log10(x)\n\n\n@to_ivy_arrays_and_back\ndef logaddexp(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.logaddexp(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef degrees(x, /):\n return ivy.rad2deg(x)\n\n\n@to_ivy_arrays_and_back\ndef negative(\n x,\n /,\n):\n return ivy.negative(x)\n\n\n@to_ivy_arrays_and_back\ndef positive(\n x,\n /,\n):\n return ivy.positive(x)\n\n\n@to_ivy_arrays_and_back\ndef rad2deg(\n x,\n /,\n):\n return ivy.rad2deg(x)\n\n\n@to_ivy_arrays_and_back\ndef tensordot(a, b, axes=2):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.tensordot(a, b, axes=axes)\n\n\n@to_ivy_arrays_and_back\ndef divide(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n if ivy.dtype(x1) in [\"int64\", \"uint64\"]:\n x1 = ivy.astype(x1, ivy.float64)\n elif ivy.is_int_dtype(x1):\n x1 = ivy.astype(x1, ivy.float32)\n\n return ivy.divide(x1, x2).astype(x1.dtype)\n\n\ntrue_divide = divide\n\n\n@to_ivy_arrays_and_back\ndef exp(\n x,\n /,\n):\n return ivy.exp(x)\n\n\n@to_ivy_arrays_and_back\ndef expm1(\n x,\n /,\n):\n return ivy.expm1(x)\n\n\n@to_ivy_arrays_and_back\ndef fmax(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n ret = ivy.where(\n ivy.bitwise_or(ivy.greater(x1, x2), ivy.isnan(x2)),\n x1,\n x2,\n )\n return ret\n\n\n@to_ivy_arrays_and_back\ndef fmin(x1, x2):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n ret = ivy.where(\n ivy.bitwise_or(ivy.less(x1, x2), ivy.isnan(x2)),\n x1,\n x2,\n )\n print(\"jax-frontend\", ret)\n return ret\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"uint16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef fabs(x, /):\n return ivy.abs(x)\n\n\n@to_ivy_arrays_and_back\ndef fmod(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.fmod(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef maximum(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.maximum(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef minimum(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.minimum(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef heaviside(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.heaviside(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef log(x, /):\n return ivy.log(x)\n\n\n@to_ivy_arrays_and_back\ndef log1p(x, /):\n return ivy.log1p(x)\n\n\n@to_ivy_arrays_and_back\ndef copysign(x1, x2, /):\n return ivy.copysign(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef sinc(x, /):\n return ivy.sinc(x)\n\n\n@with_unsupported_dtypes(\n {\n \"0.4.13 and below\": (\n \"bfloat16\",\n \"float16\",\n )\n },\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef nextafter(x1, x2, /):\n return ivy.nextafter(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef remainder(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.remainder(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef trace(a, offset=0, axis1=0, axis2=1, out=None):\n return ivy.trace(a, offset=offset, axis1=axis1, axis2=axis2, out=out)\n\n\n@to_ivy_arrays_and_back\ndef log2(x, /):\n return ivy.log2(x)\n\n\n@to_ivy_arrays_and_back\ndef vdot(a, b):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.multiply(a, b).sum()\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"bfloat16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef cbrt(x, /):\n all_positive = ivy.pow(ivy.abs(x), 1.0 / 3.0)\n return ivy.where(ivy.less(x, 0.0), ivy.negative(all_positive), all_positive)\n\n\n@to_ivy_arrays_and_back\ndef nan_to_num(x, copy=True, nan=0.0, posinf=None, neginf=None):\n return ivy.nan_to_num(x, copy=copy, nan=nan, posinf=posinf, neginf=neginf)\n\n\n@to_ivy_arrays_and_back\ndef fix(x, out=None):\n return ivy.fix(x, out=out)\n\n\n@to_ivy_arrays_and_back\ndef real(val, /):\n return ivy.real(val)\n\n\n@to_ivy_arrays_and_back\ndef hypot(x1, x2, /):\n return ivy.hypot(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef floor_divide(x1, x2, /, out=None):\n return ivy.floor_divide(x1, x2, out=out)\n\n\n@to_ivy_arrays_and_back\ndef inner(a, b):\n a, b = promote_types_of_jax_inputs(a, b)\n return ivy.inner(a, b)\n\n\n@to_ivy_arrays_and_back\ndef outer(a, b, out=None):\n return ivy.outer(a, b, out=out)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal(x, /):\n return ivy.reciprocal(x)\n\n\n@to_ivy_arrays_and_back\ndef conj(x, /):\n return ivy.conj(x)\n\n\n@to_ivy_arrays_and_back\ndef subtract(x1, x2, /):\n x1, x2 = promote_types_of_jax_inputs(x1, x2)\n return ivy.subtract(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef around(a, decimals=0, out=None):\n ret_dtype = a.dtype\n return ivy.round(a, decimals=decimals, out=out).astype(ret_dtype, copy=False)\n\n\n@to_ivy_arrays_and_back\ndef round(a, decimals=0, out=None):\n return ivy.round(a, decimals=decimals, out=out)\n\n\n@to_ivy_arrays_and_back\ndef frexp(x, /):\n return ivy.frexp(x)\n\n\n@to_ivy_arrays_and_back\ndef ldexp(x1, x2, /):\n return ivy.ldexp(x1, x2)\n\n\n@to_ivy_arrays_and_back\ndef poly(seq_of_zeros):\n seq_of_zeros = ivy.atleast_1d(seq_of_zeros)\n sh = seq_of_zeros.shape\n if len(sh) == 2 and sh[0] == sh[1] and sh[0] != 0:\n seq_of_zeros = ivy.eigvals(seq_of_zeros)\n if seq_of_zeros.ndim != 1:\n raise ValueError(\"input must be 1d or non-empty square 2d array.\")\n dt = seq_of_zeros.dtype\n if len(seq_of_zeros) == 0:\n return ivy.ones((), dtype=dt)\n a = ivy.ones((1,), dtype=dt)\n for k in range(len(seq_of_zeros)):\n a = convolve(\n a, ivy.asarray([ivy.array(1), -seq_of_zeros[k]], dtype=dt), mode=\"full\"\n )\n return a\n\n\n@to_ivy_arrays_and_back\ndef polyadd(a1, a2):\n d = max(a1.size, a2.size)\n a1 = ivy.pad(a1, (d - a1.size, 0), mode=\"constant\")\n a2 = ivy.pad(a2, (d - a2.size, 0), mode=\"constant\")\n return a1 + a2\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"float16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef polyder(p, m=1):\n if m < 0:\n raise ValueError(\"Order of derivative must be positive.\")\n\n if m == 0:\n return p\n p_dtype = p.dtype\n coeff = ivy.prod(\n ivy.expand_dims(ivy.arange(m, len(p), dtype=p_dtype))\n - ivy.expand_dims(ivy.arange(m, dtype=p_dtype), axis=1),\n axis=0,\n )\n return (p[:-m] * coeff[::-1]).astype(p_dtype)\n\n\n@with_unsupported_dtypes(\n {\"0.4.13 and below\": (\"float16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef polyint(p, m=1, k=None):\n p = ivy.asarray(p)\n m = int(m)\n if m == 0:\n return p\n if k is None:\n k_arr = ivy.zeros((m,), dtype=p.dtype)\n elif isinstance(k, (int, float)):\n k_arr = ivy.full((m,), k, dtype=p.dtype)\n elif ivy.asarray(k).shape == (1,):\n k_arr = ivy.full((m,), ivy.asarray(k)[0], dtype=p.dtype)\n elif ivy.asarray(k).shape == (m,):\n k_arr = ivy.asarray(k, dtype=p.dtype)\n else:\n raise ValueError(\"k must be a scalar or a rank-1 array of length 1 or m.\")\n grid = (\n ivy.arange(p.size + m, dtype=p.dtype)[ivy.newaxis]\n - ivy.arange(m, dtype=p.dtype)[:, ivy.newaxis]\n )\n coeff = ivy.maximum(1, grid).prod(axis=0)[::-1]\n return ivy.divide(ivy.concat((p, k_arr)), coeff).astype(p.dtype)\n\n\n@with_unsupported_dtypes(\n {\"0.3.14 and below\": (\"float16\",)},\n \"jax\",\n)\n@to_ivy_arrays_and_back\ndef polydiv(u, v, *, trim_leading_zeros=False):\n u, v_arr = ivy.promote_types_of_inputs(u, v)\n n = v_arr.shape[0] - 1\n m = u.shape[0] - 1\n scale = 1.0 / v_arr[0]\n q = ivy.zeros((max(m - n + 1, 1),), dtype=u.dtype)\n r = ivy.copy_array(u)\n for k in range(0, m - n + 1):\n d = scale * r[k]\n q[k] = d\n r[k : k + n + 1] = r[k : k + n + 1] - (d * v_arr)\n # if trim_leading_zeros:\n # r = trim_zeros_tol(r, trim='f')\n # TODO: need to control tolerance of this function to handle the argument\n return q, r\n\n\n@to_ivy_arrays_and_back\ndef polysub(a1, a2):\n n = max(a1.size, a2.size) - 1\n a1 = ivy.pad(a1, (0, n - a1.size + 1), mode=\"constant\")\n a2 = ivy.pad(a2, (0, n - a2.size + 1), mode=\"constant\")\n return a1 - a2\n\n\n@to_ivy_arrays_and_back\ndef polymul(a1, a2, *, trim_leading_zeros=False):\n a1, a2 = ivy.atleast_1d(a1), ivy.atleast_1d(a2)\n if trim_leading_zeros and (len(a1) > 1 or len(a2) > 1):\n a1, a2 = trim_zeros(a1, trim=\"f\"), trim_zeros(a2, trim=\"f\")\n if len(a1) == 0:\n a1 = ivy.asarray([0], dtype=a1.dtype)\n if len(a2) == 0:\n a2 = ivy.asarray([0], dtype=a2.dtype)\n return convolve(a1, a2, mode=\"full\")\n\n\n@to_ivy_arrays_and_back\ndef signbit(x, /):\n x = ivy.array(x)\n return ivy.signbit(x)\n\n\n@to_ivy_arrays_and_back\ndef product(\n a,\n *,\n axis=None,\n dtype=None,\n keepdims=False,\n initial=None,\n where=None,\n promote_integers=True,\n out=None,\n):\n if ivy.is_array(where):\n a = ivy.where(where, a, ivy.default(out, ivy.ones_like(a)), out=out)\n if promote_integers:\n if dtype is None:\n dtype = a.dtype\n if initial is not None:\n if axis is not None:\n s = ivy.to_list(ivy.shape(a, as_array=True))\n s[axis] = 1\n header = ivy.full(ivy.Shape(tuple(s)), initial)\n a = ivy.concat([header, a], axis=axis)\n else:\n a[0] *= initial\n return ivy.prod(a, axis=axis, dtype=dtype, keepdims=keepdims, out=out)\n\n\n@to_ivy_arrays_and_back\ndef conjugate(x, /):\n return ivy.conj(x)\n", "path": "ivy/functional/frontends/jax/numpy/mathematical_functions.py" } ]
diff --git a/ivy/functional/frontends/jax/numpy/mathematical_functions.py b/ivy/functional/frontends/jax/numpy/mathematical_functions.py index ff37565745cd0..62d588b079ea5 100644 --- a/ivy/functional/frontends/jax/numpy/mathematical_functions.py +++ b/ivy/functional/frontends/jax/numpy/mathematical_functions.py @@ -28,6 +28,11 @@ def add(x1, x2, /): return ivy.add(x1, x2) +@to_ivy_arrays_and_back +def imag(val, /): + return ivy.imag(val) + + @to_ivy_arrays_and_back def angle(z, deg=False): return ivy.angle(z, deg=deg) diff --git a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy/test_math.py b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy/test_math.py index bad3697419fa9..0b614963c55a7 100644 --- a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy/test_math.py +++ b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy/test_math.py @@ -2515,6 +2515,37 @@ def test_jax_conj( ) +# imag +@handle_frontend_test( + fn_tree="jax.numpy.imag", + dtype_and_x=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float_and_complex"), + min_value=-20, + max_value=20, + ), + test_with_out=st.just(False), +) +def test_jax_imag( + *, + dtype_and_x, + test_flags, + on_device, + fn_tree, + frontend, +): + input_dtype, x = dtype_and_x + helpers.test_frontend_function( + input_dtypes=input_dtype, + test_flags=test_flags, + frontend=frontend, + fn_tree=fn_tree, + on_device=on_device, + rtol=1e-5, + atol=1e-5, + val=x[0], + ) + + # subtract @handle_frontend_test( fn_tree="jax.numpy.subtract", @@ -2960,7 +2991,8 @@ def test_jax_signbit( # input_dtypes, x, axis, dtype, where = dtype_x_axis_dtype_where # if ivy.current_backend_str() == "torch": # assume(not test_flags.as_variable[0]) -# where, input_dtypes, test_flags = np_frontend_helpers.handle_where_and_array_bools( +# where, input_dtypes, test_flags = np_frontend_helpers. +# handle_where_and_array_bools( # where=where, # input_dtype=input_dtypes, # test_flags=test_flags,
mlcommons__GaNDLF-315
Add an easy way to verify installation **Is your feature request related to a problem? Please describe.** Currently, we are asking users to run specific commands to verify installation, which can be cumbursome. **Describe the solution you'd like** It would be great if this could put in a script (and extended/updated as needed). **Describe alternatives you've considered** N.A. **Additional context** N.A.
[ { "content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\nwith open(\"README.md\") as readme_file:\n readme = readme_file.read()\n\n\ndef git_submodule_update():\n ## submodule update\n os.system(\"git submodule update --init --recursive\")\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n git_submodule_update()\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n git_submodule_update()\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n git_submodule_update()\n\n\n# read version.py\nimport sys, re\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\nrequirements = [\n \"black\",\n \"numpy==1.21.0\",\n \"scipy\",\n \"SimpleITK==2.1.0\",\n \"torch>=1.7\",\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.57\",\n \"pandas\",\n \"pylint\",\n \"scikit-learn==0.23.1\",\n \"pickle5==0.0.11\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"openslide-python\",\n \"scikit-image\",\n \"matplotlib\",\n \"requests>=2.25.0\",\n \"pyvips\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics\",\n \"OpenPatchMiner==0.1.6\",\n \"pydicom\",\n]\n\nsetup(\n name=\"GANDLF\",\n version=__version__,\n author=\"Jose Agraz, Vinayak Ahluwalia, Bhakti Baheti, Spyridon Bakas, Ujjwal Baid, Megh Bhalerao, Brandon Edwards, Karol Gotkowski, Caleb Grenko, Orhun Güley, Ibrahim Ethem Hamamci, Sarthak Pati, Micah Sheller, Juliia Skobleva, Siddhesh Thakur, Spiros Thermos\", # alphabetical order\n author_email=\"[email protected]\",\n python_requires=\">=3.6\",\n packages=find_packages(),\n cmdclass={ # this ensures git_submodule_update is called during install\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"BSD-3-Clause License\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging\",\n zip_safe=False,\n)\n\n## windows vips installation\nif os.name == \"nt\": # proceed for windows\n from pathlib import Path\n\n # download and extract if main dll is absent\n if not Path(\"./vips/vips-dev-8.10/bin/libvips-42.dll\").exists():\n print(\"Downloading and extracting VIPS for Windows\")\n url = \"https://github.com/libvips/libvips/releases/download/v8.10.2/vips-dev-w64-all-8.10.2.zip\"\n zip_to_extract = \"./vips.zip\"\n import urllib.request, zipfile\n\n urllib.request.urlretrieve(url, zip_to_extract)\n z = zipfile.ZipFile(zip_to_extract)\n z.extractall(\"./vips\")\n z.close()\n os.remove(zip_to_extract)\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python\n\n\"\"\"The setup script.\"\"\"\n\n\nimport os\nfrom setuptools import setup, find_packages\nfrom setuptools.command.install import install\nfrom setuptools.command.develop import develop\nfrom setuptools.command.egg_info import egg_info\n\nwith open(\"README.md\") as readme_file:\n readme = readme_file.read()\n\n\ndef git_submodule_update():\n ## submodule update\n os.system(\"git submodule update --init --recursive\")\n\n\nclass CustomInstallCommand(install):\n def run(self):\n install.run(self)\n git_submodule_update()\n\n\nclass CustomDevelopCommand(develop):\n def run(self):\n develop.run(self)\n git_submodule_update()\n\n\nclass CustomEggInfoCommand(egg_info):\n def run(self):\n egg_info.run(self)\n git_submodule_update()\n\n\n# read version.py\nimport sys, re\n\ntry:\n filepath = \"GANDLF/version.py\"\n version_file = open(filepath)\n (__version__,) = re.findall('__version__ = \"(.*)\"', version_file.read())\n\nexcept Exception as error:\n __version__ = \"0.0.1\"\n sys.stderr.write(\"Warning: Could not open '%s' due %s\\n\" % (filepath, error))\n\nrequirements = [\n \"black\",\n \"numpy==1.21.0\",\n \"scipy\",\n \"SimpleITK==2.1.0\",\n \"torch>=1.7\",\n \"torchvision\",\n \"tqdm\",\n \"torchio==0.18.57\",\n \"pandas\",\n \"pylint\",\n \"scikit-learn==0.23.1\",\n \"pickle5==0.0.11\",\n \"setuptools\",\n \"seaborn\",\n \"pyyaml\",\n \"openslide-python\",\n \"scikit-image\",\n \"matplotlib\",\n \"requests>=2.25.0\",\n \"pyvips\",\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n \"psutil\",\n \"medcam\",\n \"opencv-python\",\n \"torchmetrics\",\n \"OpenPatchMiner==0.1.6\",\n \"pydicom\",\n]\n\nsetup(\n name=\"GANDLF\",\n version=__version__,\n author=\"Jose Agraz, Vinayak Ahluwalia, Bhakti Baheti, Spyridon Bakas, Ujjwal Baid, Megh Bhalerao, Brandon Edwards, Karol Gotkowski, Caleb Grenko, Orhun Güley, Ibrahim Ethem Hamamci, Sarthak Pati, Micah Sheller, Juliia Skobleva, Siddhesh Thakur, Spiros Thermos\", # alphabetical order\n author_email=\"[email protected]\",\n python_requires=\">=3.6\",\n packages=find_packages(),\n cmdclass={ # this ensures git_submodule_update is called during install\n \"install\": CustomInstallCommand,\n \"develop\": CustomDevelopCommand,\n \"egg_info\": CustomEggInfoCommand,\n },\n scripts=[\n \"gandlf_run\",\n \"gandlf_constructCSV\",\n \"gandlf_collectStats\",\n \"gandlf_patchMiner\",\n \"gandlf_preprocess\",\n \"gandlf_anonymizer\",\n \"gandlf_verifyInstall\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: BSD License\",\n \"Natural Language :: English\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Scientific/Engineering :: Medical Science Apps\",\n ],\n description=(\n \"PyTorch-based framework that handles segmentation/regression/classification using various DL architectures for medical imaging.\"\n ),\n install_requires=requirements,\n license=\"BSD-3-Clause License\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n keywords=\"semantic, segmentation, regression, classification, data-augmentation, medical-imaging\",\n zip_safe=False,\n)\n\n## windows vips installation\nif os.name == \"nt\": # proceed for windows\n from pathlib import Path\n\n # download and extract if main dll is absent\n if not Path(\"./vips/vips-dev-8.10/bin/libvips-42.dll\").exists():\n print(\"Downloading and extracting VIPS for Windows\")\n url = \"https://github.com/libvips/libvips/releases/download/v8.10.2/vips-dev-w64-all-8.10.2.zip\"\n zip_to_extract = \"./vips.zip\"\n import urllib.request, zipfile\n\n urllib.request.urlretrieve(url, zip_to_extract)\n z = zipfile.ZipFile(zip_to_extract)\n z.extractall(\"./vips\")\n z.close()\n os.remove(zip_to_extract)\n", "path": "setup.py" } ]
diff --git a/docs/faq.md b/docs/faq.md index 33379a03f..4ee02a1fd 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -8,6 +8,7 @@ This page contains answers to frequently asked questions about GaNDLF. - [Table of Contents](#table-of-contents) - [Why do I get the error `pkg_resources.DistributionNotFound: The 'GANDLF' distribution was not found`?](#why-do-i-get-the-error-pkg_resourcesdistributionnotfound-the-gandlf-distribution-was-not-found) - [Where do I start?](#where-do-i-start) + - [Why is GaNDLF not working?](#why-is-gandlf-not-working) - [Which parts of a GaNDLF configuration are customizable?](#which-parts-of-a-gandlf-configuration-are-customizable) - [Can I run GaNDLF on a high performance computing (HPC) cluster?](#can-i-run-gandlf-on-a-high-performance-computing-hpc-cluster) - [How can I track the per-epoch training performance?](#how-can-i-track-the-per-epoch-training-performance) @@ -25,6 +26,12 @@ The [usage](https://cbica.github.io/GaNDLF/usage) guide is fairly comprehensive [Back To Top &uarr;](#table-of-contents) +### Why is GaNDLF not working? + +Verify that [the installation](https://cbica.github.io/GaNDLF/setup) has been done correctly by running `python ./gandlf_verifyInstall` after activating the correct virtual environment. If you are still having issues, please feel free to [post a support request](https://github.com/CBICA/GaNDLF/issues/new?assignees=&labels=&template=--questions-help-support.md&title=), and we will do our best to address it ASAP. + +[Back To Top &uarr;](#table-of-contents) + ### Which parts of a GaNDLF configuration are customizable? Virtually all of it! For more details, please see the [usage](https://cbica.github.io/GaNDLF/usage) guide and our extensive [samples](https://github.com/CBICA/GaNDLF/tree/master/samples). All available options are documented in the [config_all_options.yaml file](https://github.com/CBICA/GaNDLF/blob/master/samples/config_all_options.yaml). diff --git a/docs/setup.md b/docs/setup.md index 05d3a5a76..c0de9e895 100644 --- a/docs/setup.md +++ b/docs/setup.md @@ -35,5 +35,5 @@ pip install -e . # conda install -c conda-forge gandlf -y ## verify installation -python -c "import GANDLF as gf;print(gf.__version__)" +python ./gandlf_verifyInstall ``` diff --git a/gandlf_verifyInstall b/gandlf_verifyInstall new file mode 100644 index 000000000..2f2abf8b0 --- /dev/null +++ b/gandlf_verifyInstall @@ -0,0 +1,46 @@ +#!usr/bin/env python +# -*- coding: utf-8 -*- + +import os, argparse +from datetime import date + + +# main function +if __name__ == "__main__": + copyrightMessage = ( + "Contact: [email protected]\n\n" + + "This program is NOT FDA/CE approved and NOT intended for clinical use.\nCopyright (c) " + + str(date.today().year) + + " University of Pennsylvania. All rights reserved." + ) + + parser = argparse.ArgumentParser( + prog="GANDLF_VerifyInstall", + formatter_class=argparse.RawTextHelpFormatter, + description="Verify GaNDLF installation.\n\n" + copyrightMessage, + ) + + try: + import GANDLF as gf + + print("GaNDLF installed version:", gf.__version__) + except: + raise Exception( + "GaNDLF not properly installed, please see https://cbica.github.io/GaNDLF/setup" + ) + + try: + import GANDLF.anonymize.dicomanonymizer as anon + + if anon: + print("GANDLF's git submodules were successfully imported.") + except: + try: + os.system("git submodule update --init --recursive") + except: + print("Git was not found, please try again.") + os.system("pip install -e .") + + args = parser.parse_args() + + print("GaNDLF is ready. See https://cbica.github.io/GaNDLF/usage") diff --git a/setup.py b/setup.py index 2d8e8279b..41e13ad30 100644 --- a/setup.py +++ b/setup.py @@ -99,6 +99,7 @@ def run(self): "gandlf_patchMiner", "gandlf_preprocess", "gandlf_anonymizer", + "gandlf_verifyInstall", ], classifiers=[ "Development Status :: 3 - Alpha",
rasterio__rasterio-2093
WarpedVRT context exit doesn't not set the dataset as closed It's me again with a WarpedVRT bug (I'm sorry). Basically I wanted to know the state of the WarpedVRT dataset after I exited the context manager, and it seems that the WarpedVRT is not set to `closed` but if I try to to `vrt.read()` rasterio will error with `RasterioIOError: Dataset is closed: WarpedVRT(tests/fixtures/cog_gcps.tif)` ```python with rasterio.open("tests/fixtures/cog.tif") as src: with WarpedVRT(src) as vrt: assert not src.closed assert not vrt.closed # <open WarpedVRT name='WarpedVRT(tests/fixtures/cog.tif)' mode='r'> assert vrt.closed # <--- AssertionError | <open WarpedVRT name='WarpedVRT(tests/fixtures/cog.tif)' mode='r'> assert src.closed assert vrt.closed. # <-- still not closed here either ``` System: - Mac Os - rasterio: '1.2b4'`
[ { "content": "\"\"\"rasterio.vrt: a module concerned with GDAL VRTs\"\"\"\n\nimport xml.etree.ElementTree as ET\n\nimport rasterio\nfrom rasterio._warp import WarpedVRTReaderBase\nfrom rasterio.dtypes import _gdal_typename\nfrom rasterio.enums import MaskFlags\nfrom rasterio.env import env_ctx_if_needed\nfrom rasterio.path import parse_path\nfrom rasterio.transform import TransformMethodsMixin\nfrom rasterio.windows import WindowMethodsMixin\n\n\nclass WarpedVRT(WarpedVRTReaderBase, WindowMethodsMixin,\n TransformMethodsMixin):\n \"\"\"A virtual warped dataset.\n\n Abstracts the details of raster warping and allows access to data\n that is reprojected when read.\n\n This class is backed by an in-memory GDAL VRTWarpedDataset VRT file.\n\n Parameters\n ----------\n src_dataset : dataset object\n The warp source.\n src_crs : CRS or str, optional\n Overrides the coordinate reference system of `src_dataset`.\n src_transfrom : Affine, optional\n Overrides the transform of `src_dataset`.\n src_nodata : float, optional\n Overrides the nodata value of `src_dataset`, which is the\n default.\n crs : CRS or str, optional\n The coordinate reference system at the end of the warp\n operation. Default: the crs of `src_dataset`. dst_crs is\n a deprecated alias for this parameter.\n transform : Affine, optional\n The transform for the virtual dataset. Default: will be\n computed from the attributes of `src_dataset`. dst_transform\n is a deprecated alias for this parameter.\n height, width: int, optional\n The dimensions of the virtual dataset. Defaults: will be\n computed from the attributes of `src_dataset`. dst_height\n and dst_width are deprecated alias for these parameters.\n nodata : float, optional\n Nodata value for the virtual dataset. Default: the nodata\n value of `src_dataset` or 0.0. dst_nodata is a deprecated\n alias for this parameter.\n resampling : Resampling, optional\n Warp resampling algorithm. Default: `Resampling.nearest`.\n tolerance : float, optional\n The maximum error tolerance in input pixels when\n approximating the warp transformation. Default: 0.125,\n or one-eigth of a pixel.\n src_alpha : int, optional\n Index of a source band to use as an alpha band for warping.\n add_alpha : bool, optional\n Whether to add an alpha masking band to the virtual dataset.\n Default: False. This option will cause deletion of the VRT\n nodata value.\n init_dest_nodata : bool, optional\n Whether or not to initialize output to `nodata`. Default:\n True.\n warp_mem_limit : int, optional\n The warp operation's memory limit in MB. The default (0)\n means 64 MB with GDAL 2.2.\n dtype : str, optional\n The working data type for warp operation and output.\n warp_extras : dict\n GDAL extra warp options. See\n https://gdal.org/doxygen/structGDALWarpOptions.html.\n\n Attributes\n ----------\n src_dataset : dataset\n The dataset object to be virtually warped.\n resampling : int\n One of the values from rasterio.enums.Resampling. The default is\n `Resampling.nearest`.\n tolerance : float\n The maximum error tolerance in input pixels when approximating\n the warp transformation. The default is 0.125.\n src_nodata: int or float, optional\n The source nodata value. Pixels with this value will not be\n used for interpolation. If not set, it will be default to the\n nodata value of the source image, if available.\n dst_nodata: int or float, optional\n The nodata value used to initialize the destination; it will\n remain in all areas not covered by the reprojected source.\n Defaults to the value of src_nodata, or 0 (gdal default).\n working_dtype : str, optional\n The working data type for warp operation and output.\n warp_extras : dict\n GDAL extra warp options. See\n https://gdal.org/doxygen/structGDALWarpOptions.html.\n\n Examples\n --------\n\n >>> with rasterio.open('tests/data/RGB.byte.tif') as src:\n ... with WarpedVRT(src, crs='EPSG:3857') as vrt:\n ... data = vrt.read()\n\n \"\"\"\n\n def __repr__(self):\n return \"<{} WarpedVRT name='{}' mode='{}'>\".format(\n self.closed and 'closed' or 'open', self.name, self.mode)\n\n def __enter__(self):\n self._env = env_ctx_if_needed()\n self._env.__enter__()\n self.start()\n return self\n\n def __exit__(self, *args, **kwargs):\n self._env.__exit__()\n self.close()\n\n def __del__(self):\n self.close()\n\n def close(self):\n self.stop()\n\n\ndef _boundless_vrt_doc(\n src_dataset, nodata=None, background=None, hidenodata=False,\n width=None, height=None, transform=None, masked=False):\n \"\"\"Make a VRT XML document.\n\n Parameters\n ----------\n src_dataset : Dataset\n The dataset to wrap.\n background : int or float, optional\n The background fill value for the boundless VRT.\n masked : bool\n If True, the src_dataset is replaced by its valid data mask.\n\n Returns\n -------\n str\n An XML text string.\n \"\"\"\n\n nodata = nodata or src_dataset.nodata\n width = width or src_dataset.width\n height = height or src_dataset.height\n transform = transform or src_dataset.transform\n\n vrtdataset = ET.Element('VRTDataset')\n vrtdataset.attrib['rasterYSize'] = str(height)\n vrtdataset.attrib['rasterXSize'] = str(width)\n srs = ET.SubElement(vrtdataset, 'SRS')\n srs.text = src_dataset.crs.wkt if src_dataset.crs else \"\"\n geotransform = ET.SubElement(vrtdataset, 'GeoTransform')\n geotransform.text = ','.join([str(v) for v in transform.to_gdal()])\n\n for bidx, ci, block_shape, dtype in zip(src_dataset.indexes, src_dataset.colorinterp, src_dataset.block_shapes, src_dataset.dtypes):\n vrtrasterband = ET.SubElement(vrtdataset, 'VRTRasterBand')\n vrtrasterband.attrib['dataType'] = _gdal_typename(dtype)\n vrtrasterband.attrib['band'] = str(bidx)\n\n if background is not None or nodata is not None:\n nodatavalue = ET.SubElement(vrtrasterband, 'NoDataValue')\n nodatavalue.text = str(background or nodata)\n\n if hidenodata:\n hidenodatavalue = ET.SubElement(vrtrasterband, 'HideNoDataValue')\n hidenodatavalue.text = \"1\"\n\n colorinterp = ET.SubElement(vrtrasterband, 'ColorInterp')\n colorinterp.text = ci.name.capitalize()\n\n complexsource = ET.SubElement(vrtrasterband, 'ComplexSource')\n sourcefilename = ET.SubElement(complexsource, 'SourceFilename')\n sourcefilename.attrib['relativeToVRT'] = \"0\"\n sourcefilename.attrib[\"shared\"] = \"0\"\n sourcefilename.text = parse_path(src_dataset.name).as_vsi()\n sourceband = ET.SubElement(complexsource, 'SourceBand')\n sourceband.text = str(bidx)\n sourceproperties = ET.SubElement(complexsource, 'SourceProperties')\n sourceproperties.attrib['RasterXSize'] = str(width)\n sourceproperties.attrib['RasterYSize'] = str(height)\n sourceproperties.attrib['dataType'] = _gdal_typename(dtype)\n sourceproperties.attrib['BlockYSize'] = str(block_shape[0])\n sourceproperties.attrib['BlockXSize'] = str(block_shape[1])\n srcrect = ET.SubElement(complexsource, 'SrcRect')\n srcrect.attrib['xOff'] = '0'\n srcrect.attrib['yOff'] = '0'\n srcrect.attrib['xSize'] = str(src_dataset.width)\n srcrect.attrib['ySize'] = str(src_dataset.height)\n dstrect = ET.SubElement(complexsource, 'DstRect')\n dstrect.attrib['xOff'] = str((src_dataset.transform.xoff - transform.xoff) / transform.a)\n dstrect.attrib['yOff'] = str((src_dataset.transform.yoff - transform.yoff) / transform.e)\n dstrect.attrib['xSize'] = str(src_dataset.width * src_dataset.transform.a / transform.a)\n dstrect.attrib['ySize'] = str(src_dataset.height * src_dataset.transform.e / transform.e)\n\n if src_dataset.nodata is not None:\n nodata_elem = ET.SubElement(complexsource, 'NODATA')\n nodata_elem.text = str(src_dataset.nodata)\n\n if src_dataset.options is not None:\n openoptions = ET.SubElement(complexsource, 'OpenOptions')\n for ookey, oovalue in src_dataset.options.items():\n ooi = ET.SubElement(openoptions, 'OOI')\n ooi.attrib['key'] = str(ookey)\n ooi.text = str(oovalue)\n\n # Effectively replaces all values of the source dataset with\n # 255. Due to GDAL optimizations, the source dataset will not\n # be read, so we get a performance improvement.\n if masked:\n scaleratio = ET.SubElement(complexsource, 'ScaleRatio')\n scaleratio.text = '0'\n scaleoffset = ET.SubElement(complexsource, 'ScaleOffset')\n scaleoffset.text = '255'\n\n if all(MaskFlags.per_dataset in flags for flags in src_dataset.mask_flag_enums):\n maskband = ET.SubElement(vrtdataset, 'MaskBand')\n vrtrasterband = ET.SubElement(maskband, 'VRTRasterBand')\n vrtrasterband.attrib['dataType'] = 'Byte'\n\n simplesource = ET.SubElement(vrtrasterband, 'SimpleSource')\n sourcefilename = ET.SubElement(simplesource, 'SourceFilename')\n sourcefilename.attrib['relativeToVRT'] = \"0\"\n sourcefilename.attrib[\"shared\"] = \"0\"\n sourcefilename.text = parse_path(src_dataset.name).as_vsi()\n\n sourceband = ET.SubElement(simplesource, 'SourceBand')\n sourceband.text = 'mask,1'\n sourceproperties = ET.SubElement(simplesource, 'SourceProperties')\n sourceproperties.attrib['RasterXSize'] = str(width)\n sourceproperties.attrib['RasterYSize'] = str(height)\n sourceproperties.attrib['dataType'] = 'Byte'\n sourceproperties.attrib['BlockYSize'] = str(block_shape[0])\n sourceproperties.attrib['BlockXSize'] = str(block_shape[1])\n srcrect = ET.SubElement(simplesource, 'SrcRect')\n srcrect.attrib['xOff'] = '0'\n srcrect.attrib['yOff'] = '0'\n srcrect.attrib['xSize'] = str(src_dataset.width)\n srcrect.attrib['ySize'] = str(src_dataset.height)\n dstrect = ET.SubElement(simplesource, 'DstRect')\n dstrect.attrib['xOff'] = str((src_dataset.transform.xoff - transform.xoff) / transform.a)\n dstrect.attrib['yOff'] = str((src_dataset.transform.yoff - transform.yoff) / transform.e)\n dstrect.attrib['xSize'] = str(src_dataset.width)\n dstrect.attrib['ySize'] = str(src_dataset.height)\n\n return ET.tostring(vrtdataset).decode('ascii')\n", "path": "rasterio/vrt.py" } ]
[ { "content": "\"\"\"rasterio.vrt: a module concerned with GDAL VRTs\"\"\"\n\nimport xml.etree.ElementTree as ET\n\nimport rasterio\nfrom rasterio._warp import WarpedVRTReaderBase\nfrom rasterio.dtypes import _gdal_typename\nfrom rasterio.enums import MaskFlags\nfrom rasterio.env import env_ctx_if_needed\nfrom rasterio.path import parse_path\nfrom rasterio.transform import TransformMethodsMixin\nfrom rasterio.windows import WindowMethodsMixin\n\n\nclass WarpedVRT(WarpedVRTReaderBase, WindowMethodsMixin,\n TransformMethodsMixin):\n \"\"\"A virtual warped dataset.\n\n Abstracts the details of raster warping and allows access to data\n that is reprojected when read.\n\n This class is backed by an in-memory GDAL VRTWarpedDataset VRT file.\n\n Parameters\n ----------\n src_dataset : dataset object\n The warp source.\n src_crs : CRS or str, optional\n Overrides the coordinate reference system of `src_dataset`.\n src_transfrom : Affine, optional\n Overrides the transform of `src_dataset`.\n src_nodata : float, optional\n Overrides the nodata value of `src_dataset`, which is the\n default.\n crs : CRS or str, optional\n The coordinate reference system at the end of the warp\n operation. Default: the crs of `src_dataset`. dst_crs is\n a deprecated alias for this parameter.\n transform : Affine, optional\n The transform for the virtual dataset. Default: will be\n computed from the attributes of `src_dataset`. dst_transform\n is a deprecated alias for this parameter.\n height, width: int, optional\n The dimensions of the virtual dataset. Defaults: will be\n computed from the attributes of `src_dataset`. dst_height\n and dst_width are deprecated alias for these parameters.\n nodata : float, optional\n Nodata value for the virtual dataset. Default: the nodata\n value of `src_dataset` or 0.0. dst_nodata is a deprecated\n alias for this parameter.\n resampling : Resampling, optional\n Warp resampling algorithm. Default: `Resampling.nearest`.\n tolerance : float, optional\n The maximum error tolerance in input pixels when\n approximating the warp transformation. Default: 0.125,\n or one-eigth of a pixel.\n src_alpha : int, optional\n Index of a source band to use as an alpha band for warping.\n add_alpha : bool, optional\n Whether to add an alpha masking band to the virtual dataset.\n Default: False. This option will cause deletion of the VRT\n nodata value.\n init_dest_nodata : bool, optional\n Whether or not to initialize output to `nodata`. Default:\n True.\n warp_mem_limit : int, optional\n The warp operation's memory limit in MB. The default (0)\n means 64 MB with GDAL 2.2.\n dtype : str, optional\n The working data type for warp operation and output.\n warp_extras : dict\n GDAL extra warp options. See\n https://gdal.org/doxygen/structGDALWarpOptions.html.\n\n Attributes\n ----------\n src_dataset : dataset\n The dataset object to be virtually warped.\n resampling : int\n One of the values from rasterio.enums.Resampling. The default is\n `Resampling.nearest`.\n tolerance : float\n The maximum error tolerance in input pixels when approximating\n the warp transformation. The default is 0.125.\n src_nodata: int or float, optional\n The source nodata value. Pixels with this value will not be\n used for interpolation. If not set, it will be default to the\n nodata value of the source image, if available.\n dst_nodata: int or float, optional\n The nodata value used to initialize the destination; it will\n remain in all areas not covered by the reprojected source.\n Defaults to the value of src_nodata, or 0 (gdal default).\n working_dtype : str, optional\n The working data type for warp operation and output.\n warp_extras : dict\n GDAL extra warp options. See\n https://gdal.org/doxygen/structGDALWarpOptions.html.\n\n Examples\n --------\n\n >>> with rasterio.open('tests/data/RGB.byte.tif') as src:\n ... with WarpedVRT(src, crs='EPSG:3857') as vrt:\n ... data = vrt.read()\n\n \"\"\"\n\n def __repr__(self):\n return \"<{} WarpedVRT name='{}' mode='{}'>\".format(\n self.closed and 'closed' or 'open', self.name, self.mode)\n\n def __enter__(self):\n self._env = env_ctx_if_needed()\n self._env.__enter__()\n self.start()\n return self\n\n def __exit__(self, *args, **kwargs):\n self._env.__exit__()\n self.close()\n\n def __del__(self):\n self.close()\n\n\ndef _boundless_vrt_doc(\n src_dataset, nodata=None, background=None, hidenodata=False,\n width=None, height=None, transform=None, masked=False):\n \"\"\"Make a VRT XML document.\n\n Parameters\n ----------\n src_dataset : Dataset\n The dataset to wrap.\n background : int or float, optional\n The background fill value for the boundless VRT.\n masked : bool\n If True, the src_dataset is replaced by its valid data mask.\n\n Returns\n -------\n str\n An XML text string.\n \"\"\"\n\n nodata = nodata or src_dataset.nodata\n width = width or src_dataset.width\n height = height or src_dataset.height\n transform = transform or src_dataset.transform\n\n vrtdataset = ET.Element('VRTDataset')\n vrtdataset.attrib['rasterYSize'] = str(height)\n vrtdataset.attrib['rasterXSize'] = str(width)\n srs = ET.SubElement(vrtdataset, 'SRS')\n srs.text = src_dataset.crs.wkt if src_dataset.crs else \"\"\n geotransform = ET.SubElement(vrtdataset, 'GeoTransform')\n geotransform.text = ','.join([str(v) for v in transform.to_gdal()])\n\n for bidx, ci, block_shape, dtype in zip(src_dataset.indexes, src_dataset.colorinterp, src_dataset.block_shapes, src_dataset.dtypes):\n vrtrasterband = ET.SubElement(vrtdataset, 'VRTRasterBand')\n vrtrasterband.attrib['dataType'] = _gdal_typename(dtype)\n vrtrasterband.attrib['band'] = str(bidx)\n\n if background is not None or nodata is not None:\n nodatavalue = ET.SubElement(vrtrasterband, 'NoDataValue')\n nodatavalue.text = str(background or nodata)\n\n if hidenodata:\n hidenodatavalue = ET.SubElement(vrtrasterband, 'HideNoDataValue')\n hidenodatavalue.text = \"1\"\n\n colorinterp = ET.SubElement(vrtrasterband, 'ColorInterp')\n colorinterp.text = ci.name.capitalize()\n\n complexsource = ET.SubElement(vrtrasterband, 'ComplexSource')\n sourcefilename = ET.SubElement(complexsource, 'SourceFilename')\n sourcefilename.attrib['relativeToVRT'] = \"0\"\n sourcefilename.attrib[\"shared\"] = \"0\"\n sourcefilename.text = parse_path(src_dataset.name).as_vsi()\n sourceband = ET.SubElement(complexsource, 'SourceBand')\n sourceband.text = str(bidx)\n sourceproperties = ET.SubElement(complexsource, 'SourceProperties')\n sourceproperties.attrib['RasterXSize'] = str(width)\n sourceproperties.attrib['RasterYSize'] = str(height)\n sourceproperties.attrib['dataType'] = _gdal_typename(dtype)\n sourceproperties.attrib['BlockYSize'] = str(block_shape[0])\n sourceproperties.attrib['BlockXSize'] = str(block_shape[1])\n srcrect = ET.SubElement(complexsource, 'SrcRect')\n srcrect.attrib['xOff'] = '0'\n srcrect.attrib['yOff'] = '0'\n srcrect.attrib['xSize'] = str(src_dataset.width)\n srcrect.attrib['ySize'] = str(src_dataset.height)\n dstrect = ET.SubElement(complexsource, 'DstRect')\n dstrect.attrib['xOff'] = str((src_dataset.transform.xoff - transform.xoff) / transform.a)\n dstrect.attrib['yOff'] = str((src_dataset.transform.yoff - transform.yoff) / transform.e)\n dstrect.attrib['xSize'] = str(src_dataset.width * src_dataset.transform.a / transform.a)\n dstrect.attrib['ySize'] = str(src_dataset.height * src_dataset.transform.e / transform.e)\n\n if src_dataset.nodata is not None:\n nodata_elem = ET.SubElement(complexsource, 'NODATA')\n nodata_elem.text = str(src_dataset.nodata)\n\n if src_dataset.options is not None:\n openoptions = ET.SubElement(complexsource, 'OpenOptions')\n for ookey, oovalue in src_dataset.options.items():\n ooi = ET.SubElement(openoptions, 'OOI')\n ooi.attrib['key'] = str(ookey)\n ooi.text = str(oovalue)\n\n # Effectively replaces all values of the source dataset with\n # 255. Due to GDAL optimizations, the source dataset will not\n # be read, so we get a performance improvement.\n if masked:\n scaleratio = ET.SubElement(complexsource, 'ScaleRatio')\n scaleratio.text = '0'\n scaleoffset = ET.SubElement(complexsource, 'ScaleOffset')\n scaleoffset.text = '255'\n\n if all(MaskFlags.per_dataset in flags for flags in src_dataset.mask_flag_enums):\n maskband = ET.SubElement(vrtdataset, 'MaskBand')\n vrtrasterband = ET.SubElement(maskband, 'VRTRasterBand')\n vrtrasterband.attrib['dataType'] = 'Byte'\n\n simplesource = ET.SubElement(vrtrasterband, 'SimpleSource')\n sourcefilename = ET.SubElement(simplesource, 'SourceFilename')\n sourcefilename.attrib['relativeToVRT'] = \"0\"\n sourcefilename.attrib[\"shared\"] = \"0\"\n sourcefilename.text = parse_path(src_dataset.name).as_vsi()\n\n sourceband = ET.SubElement(simplesource, 'SourceBand')\n sourceband.text = 'mask,1'\n sourceproperties = ET.SubElement(simplesource, 'SourceProperties')\n sourceproperties.attrib['RasterXSize'] = str(width)\n sourceproperties.attrib['RasterYSize'] = str(height)\n sourceproperties.attrib['dataType'] = 'Byte'\n sourceproperties.attrib['BlockYSize'] = str(block_shape[0])\n sourceproperties.attrib['BlockXSize'] = str(block_shape[1])\n srcrect = ET.SubElement(simplesource, 'SrcRect')\n srcrect.attrib['xOff'] = '0'\n srcrect.attrib['yOff'] = '0'\n srcrect.attrib['xSize'] = str(src_dataset.width)\n srcrect.attrib['ySize'] = str(src_dataset.height)\n dstrect = ET.SubElement(simplesource, 'DstRect')\n dstrect.attrib['xOff'] = str((src_dataset.transform.xoff - transform.xoff) / transform.a)\n dstrect.attrib['yOff'] = str((src_dataset.transform.yoff - transform.yoff) / transform.e)\n dstrect.attrib['xSize'] = str(src_dataset.width)\n dstrect.attrib['ySize'] = str(src_dataset.height)\n\n return ET.tostring(vrtdataset).decode('ascii')\n", "path": "rasterio/vrt.py" } ]
diff --git a/rasterio/vrt.py b/rasterio/vrt.py index 3f3b65474..81b942cb3 100644 --- a/rasterio/vrt.py +++ b/rasterio/vrt.py @@ -122,9 +122,6 @@ def __exit__(self, *args, **kwargs): def __del__(self): self.close() - def close(self): - self.stop() - def _boundless_vrt_doc( src_dataset, nodata=None, background=None, hidenodata=False, diff --git a/tests/test_warpedvrt.py b/tests/test_warpedvrt.py index 9b98ebc23..28085c024 100644 --- a/tests/test_warpedvrt.py +++ b/tests/test_warpedvrt.py @@ -608,3 +608,11 @@ def test_vrt_mem_src_kept_alive(path_rgb_byte_tif): assert (vrt.read() != 0).any() vrt.close() + + +def test_warped_vrt_is_closed(path_rgb_byte_tif): + """A VirtualVRT should be set as closed on exit.""" + with rasterio.open(path_rgb_byte_tif) as src: + with WarpedVRT(src, crs=DST_CRS) as vrt: + assert not vrt.closed + assert vrt.closed
falconry__falcon-382
HTTP Range support is incomplete in the HTTP RFC 2616, a range header must be written with the format Range: bytes=0-1,3-4,6-7 I understand that falcon does not support multiple ranges (and I do not need it personally), but it currently does not even support stripping the "bytes=" from the header before trying a transformation. Here is the code of the range attribute: https://github.com/racker/falcon/blob/2fbe618d486c977df2bb7d7240386aa4a5f781c1/falcon/request.py#L340 A simple fix would be to strip the "bytes=" if present at the start of the header's value.
[ { "content": "# Copyright 2013 by Rackspace Hosting, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom datetime import datetime\n\ntry:\n # NOTE(kgrifs): In Python 2.6 and 2.7, socket._fileobject is a\n # standard way of exposing a socket as a file-like object, and\n # is used by wsgiref for wsgi.input.\n import socket\n NativeStream = socket._fileobject\nexcept AttributeError: # pragma nocover\n # NOTE(kgriffs): In Python 3.3, wsgiref implements wsgi.input\n # using _io.BufferedReader which is an alias of io.BufferedReader\n import io\n NativeStream = io.BufferedReader\n\nimport mimeparse\nimport six\n\nfrom falcon.errors import *\nfrom falcon import util\nfrom falcon.util import uri\nfrom falcon import request_helpers as helpers\n\n\nDEFAULT_ERROR_LOG_FORMAT = (u'{0:%Y-%m-%d %H:%M:%S} [FALCON] [ERROR]'\n u' {1} {2}{3} => ')\n\nTRUE_STRINGS = ('true', 'True', 'yes')\nFALSE_STRINGS = ('false', 'False', 'no')\nWSGI_CONTENT_HEADERS = ('CONTENT_TYPE', 'CONTENT_LENGTH')\n\n\n_maybe_wrap_wsgi_stream = True\n\n\nclass Request(object):\n \"\"\"Represents a client's HTTP request.\n\n Note:\n `Request` is not meant to be instantiated directly by responders.\n\n Args:\n env (dict): A WSGI environment dict passed in from the server. See\n also the PEP-3333 spec.\n options (dict): Set of global options passed from the API handler.\n\n Attributes:\n protocol (str): Either 'http' or 'https'.\n method (str): HTTP method requested (e.g., GET, POST, etc.)\n host (str): Hostname requested by the client\n subdomain (str): Leftmost (i.e., most specific) subdomain from the\n hostname. If only a single domain name is given, `subdomain`\n will be *None*.\n\n Note:\n If the hostname in the request is an IP address, the value\n for `subdomain` is undefined.\n\n user_agent (str): Value of the User-Agent header, or *None* if the\n header is missing.\n app (str): Name of the WSGI app (if using WSGI's notion of virtual\n hosting).\n env (dict): Reference to the WSGI *environ* dict passed in from the\n server. See also PEP-3333.\n context (dict): Dictionary to hold any data about the request which is\n specific to your app (e.g. session object). Falcon itself will\n not interact with this attribute after it has been initialized.\n context_type (None): Custom callable/type to use for initializing the\n ``context`` attribute. To change this value so that ``context``\n is initialized to the type of your choice (e.g. OrderedDict), you\n will need to extend this class and pass that new type to the\n ``request_type`` argument of ``falcon.API()``.\n uri (str): The fully-qualified URI for the request.\n url (str): alias for ``uri``.\n relative_uri (str): The path + query string portion of the full URI.\n path (str): Path portion of the request URL (not including query\n string).\n query_string (str): Query string portion of the request URL, without\n the preceding '?' character.\n accept (str): Value of the Accept header, or '*/*' if the header is\n missing.\n auth (str): Value of the Authorization header, or *None* if the header\n is missing.\n client_accepts_json (bool): True if the Accept header includes JSON,\n otherwise False.\n client_accepts_msgpack (bool): True if the Accept header includes\n msgpack, otherwise False.\n client_accepts_xml (bool): True if the Accept header includes XML,\n otherwise False.\n content_type (str): Value of the Content-Type header, or *None* if\n the header is missing.\n content_length (int): Value of the Content-Length header converted\n to an int, or *None* if the header is missing.\n stream: File-like object for reading the body of the request, if any.\n\n Note:\n If an HTML form is POSTed to the API using the\n *application/x-www-form-urlencoded* media type, Falcon\n will consume `stream` in order to parse the parameters\n and merge them into the query string parameters. In this\n case, the stream will be left at EOF.\n\n Note also that the character encoding for fields, before\n percent-encoding non-ASCII bytes, is assumed to be\n UTF-8. The special \"_charset_\" field is ignored if present.\n\n Falcon expects form-encoded request bodies to be\n encoded according to the standard W3C algorithm (see\n also http://goo.gl/6rlcux).\n\n date (datetime): Value of the Date header, converted to a\n `datetime.datetime` instance. The header value is assumed to\n conform to RFC 1123.\n expect (str): Value of the Expect header, or *None* if the\n header is missing.\n range (tuple of int): A 2-member tuple parsed from the value of the\n Range header.\n\n The two members correspond to the first and last byte\n positions of the requested resource, inclusive. Negative\n indices indicate offset from the end of the resource,\n where -1 is the last byte, -2 is the second-to-last byte,\n and so forth.\n\n Only continous ranges are supported (e.g., \"bytes=0-0,-1\" would\n result in an HTTPBadRequest exception when the attribute is\n accessed.)\n if_match (str): Value of the If-Match header, or *None* if the\n header is missing.\n if_none_match (str): Value of the If-None-Match header, or *None*\n if the header is missing.\n if_modified_since (str): Value of the If-Modified-Since header, or\n None if the header is missing.\n if_unmodified_since (str): Value of the If-Unmodified-Sinc header,\n or *None* if the header is missing.\n if_range (str): Value of the If-Range header, or *None* if the\n header is missing.\n\n headers (dict): Raw HTTP headers from the request with\n canonical dash-separated names. Parsing all the headers\n to create this dict is done the first time this attribute\n is accessed. This parsing can be costly, so unless you\n need all the headers in this format, you should use the\n ``get_header`` method or one of the convenience attributes\n instead, to get a value for a specific header.\n\n params (dict): The mapping of request query parameter names to their\n values. Where the parameter appears multiple times in the query\n string, the value mapped to that parameter key will be a list of\n all the values in the order seen.\n\n options (dict): Set of global options passed from the API handler.\n \"\"\"\n\n __slots__ = (\n '_cached_headers',\n '_cached_uri',\n '_cached_relative_uri',\n 'content_type',\n 'env',\n 'method',\n '_params',\n 'path',\n 'query_string',\n 'stream',\n 'context',\n '_wsgierrors',\n 'options',\n )\n\n # Allow child classes to override this\n context_type = None\n\n def __init__(self, env, options=None):\n global _maybe_wrap_wsgi_stream\n\n self.env = env\n self.options = options if options else RequestOptions()\n\n if self.context_type is None:\n # Literal syntax is more efficient than using dict()\n self.context = {}\n else:\n # pylint will detect this as not-callable because it only sees the\n # declaration of None, not whatever type a subclass may have set.\n self.context = self.context_type() # pylint: disable=not-callable\n\n self._wsgierrors = env['wsgi.errors']\n self.stream = env['wsgi.input']\n self.method = env['REQUEST_METHOD']\n\n # Normalize path\n path = env['PATH_INFO']\n if path:\n if len(path) != 1 and path.endswith('/'):\n self.path = path[:-1]\n else:\n self.path = path\n else:\n self.path = '/'\n\n self._params = {}\n\n # PERF(kgriffs): if...in is faster than using env.get(...)\n if 'QUERY_STRING' in env:\n query_str = env['QUERY_STRING']\n\n if query_str:\n self.query_string = uri.decode(query_str)\n self._params = uri.parse_query_string(\n self.query_string,\n keep_blank_qs_values=self.options.keep_blank_qs_values,\n )\n else:\n self.query_string = six.text_type()\n\n else:\n self.query_string = six.text_type()\n\n self._cached_headers = None\n self._cached_uri = None\n self._cached_relative_uri = None\n\n try:\n self.content_type = self.env['CONTENT_TYPE']\n except KeyError:\n self.content_type = None\n\n # NOTE(kgriffs): Wrap wsgi.input if needed to make read() more robust,\n # normalizing semantics between, e.g., gunicorn and wsgiref.\n if _maybe_wrap_wsgi_stream:\n if isinstance(self.stream, NativeStream):\n # NOTE(kgriffs): This is covered by tests, it's just that\n # coverage can't figure this out for some reason (TBD).\n self._wrap_stream() # pragma nocover\n else:\n # PERF(kgriffs): If self.stream does not need to be wrapped\n # this time, it never needs to be wrapped since the server\n # will continue using the same type for wsgi.input.\n _maybe_wrap_wsgi_stream = False\n\n # PERF(kgriffs): Technically, we should spend a few more\n # cycles and parse the content type for real, but\n # this heuristic will work virtually all the time.\n if (self.content_type is not None and\n 'application/x-www-form-urlencoded' in self.content_type):\n self._parse_form_urlencoded()\n\n # ------------------------------------------------------------------------\n # Properties\n # ------------------------------------------------------------------------\n\n user_agent = helpers.header_property('HTTP_USER_AGENT')\n auth = helpers.header_property('HTTP_AUTHORIZATION')\n\n expect = helpers.header_property('HTTP_EXPECT')\n\n if_match = helpers.header_property('HTTP_IF_MATCH')\n if_none_match = helpers.header_property('HTTP_IF_NONE_MATCH')\n if_modified_since = helpers.header_property('HTTP_IF_MODIFIED_SINCE')\n if_unmodified_since = helpers.header_property('HTTP_IF_UNMODIFIED_SINCE')\n if_range = helpers.header_property('HTTP_IF_RANGE')\n\n @property\n def client_accepts_json(self):\n return self.client_accepts('application/json')\n\n @property\n def client_accepts_msgpack(self):\n return self.client_accepts('application/x-msgpack')\n\n @property\n def client_accepts_xml(self):\n return self.client_accepts('application/xml')\n\n @property\n def accept(self):\n # NOTE(kgriffs): Per RFC, a missing accept header is\n # equivalent to '*/*'\n try:\n return self.env['HTTP_ACCEPT'] or '*/*'\n except KeyError:\n return '*/*'\n\n @property\n def content_length(self):\n try:\n value = self.env['CONTENT_LENGTH']\n except KeyError:\n return None\n\n # NOTE(kgriffs): Normalize an empty value to behave as if\n # the header were not included; wsgiref, at least, inserts\n # an empty CONTENT_LENGTH value if the request does not\n # set the header. Gunicorn and uWSGI do not do this, but\n # others might if they are trying to match wsgiref's\n # behavior too closely.\n if not value:\n return None\n\n try:\n value_as_int = int(value)\n except ValueError:\n msg = 'The value of the header must be a number.'\n raise HTTPInvalidHeader(msg, 'Content-Length')\n\n if value_as_int < 0:\n msg = 'The value of the header must be a positive number.'\n raise HTTPInvalidHeader(msg, 'Content-Length')\n\n return value_as_int\n\n @property\n def date(self):\n try:\n http_date = self.env['HTTP_DATE']\n except KeyError:\n return None\n\n try:\n return util.http_date_to_dt(http_date)\n except ValueError:\n msg = ('It must be formatted according to RFC 1123.')\n raise HTTPInvalidHeader(msg, 'Date')\n\n @property\n def range(self):\n try:\n value = self.env['HTTP_RANGE']\n except KeyError:\n return None\n\n if ',' in value:\n msg = 'The value must be a continuous byte range.'\n raise HTTPInvalidHeader(msg, 'Range')\n\n try:\n first, sep, last = value.partition('-')\n\n if not sep:\n raise ValueError()\n\n if first:\n return (int(first), int(last or -1))\n elif last:\n return (-int(last), -1)\n else:\n msg = 'The byte offsets are missing.'\n raise HTTPInvalidHeader(msg, 'Range')\n\n except ValueError:\n href = 'http://goo.gl/zZ6Ey'\n href_text = 'HTTP/1.1 Range Requests'\n msg = ('It must be a byte range formatted according to RFC 2616.')\n raise HTTPInvalidHeader(msg, 'Range', href=href,\n href_text=href_text)\n\n @property\n def app(self):\n return self.env.get('SCRIPT_NAME', '')\n\n @property\n def protocol(self):\n return self.env['wsgi.url_scheme']\n\n @property\n def uri(self):\n if self._cached_uri is None:\n env = self.env\n protocol = env['wsgi.url_scheme']\n\n # NOTE(kgriffs): According to PEP-3333 we should first\n # try to use the Host header if present.\n #\n # PERF(kgriffs): try..except is faster than .get\n try:\n host = env['HTTP_HOST']\n except KeyError:\n host = env['SERVER_NAME']\n port = env['SERVER_PORT']\n\n if protocol == 'https':\n if port != '443':\n host += ':' + port\n else:\n if port != '80':\n host += ':' + port\n\n # PERF: For small numbers of items, '+' is faster\n # than ''.join(...). Concatenation is also generally\n # faster than formatting.\n value = (protocol + '://' +\n host +\n self.app +\n self.path)\n\n if self.query_string:\n value = value + '?' + self.query_string\n\n self._cached_uri = value\n\n return self._cached_uri\n\n url = uri\n\n @property\n def host(self):\n try:\n # NOTE(kgriffs): Prefer the host header; the web server\n # isn't supposed to mess with it, so it should be what\n # the client actually sent.\n host_header = self.env['HTTP_HOST']\n host, port = uri.parse_host(host_header)\n except KeyError:\n # PERF(kgriffs): According to PEP-3333, this header\n # will always be present.\n host = self.env['SERVER_NAME']\n\n return host\n\n @property\n def subdomain(self):\n # PERF(kgriffs): .partition is slightly faster than .split\n subdomain, sep, remainder = self.host.partition('.')\n return subdomain if sep else None\n\n @property\n def relative_uri(self):\n if self._cached_relative_uri is None:\n if self.query_string:\n self._cached_relative_uri = (self.app + self.path + '?' +\n self.query_string)\n else:\n self._cached_relative_uri = self.app + self.path\n\n return self._cached_relative_uri\n\n @property\n def headers(self):\n # NOTE(kgriffs: First time here will cache the dict so all we\n # have to do is clone it in the future.\n if self._cached_headers is None:\n headers = self._cached_headers = {}\n\n env = self.env\n for name, value in env.items():\n if name.startswith('HTTP_'):\n # NOTE(kgriffs): Don't take the time to fix the case\n # since headers are supposed to be case-insensitive\n # anyway.\n headers[name[5:].replace('_', '-')] = value\n\n elif name in WSGI_CONTENT_HEADERS:\n headers[name.replace('_', '-')] = value\n\n return self._cached_headers.copy()\n\n @property\n def params(self):\n return self._params\n\n # ------------------------------------------------------------------------\n # Methods\n # ------------------------------------------------------------------------\n\n def client_accepts(self, media_type):\n \"\"\"Determines whether or not the client accepts a given media type.\n\n Args:\n media_type (str): An Internet media type to check.\n\n Returns:\n bool: True if the client has indicated in the Accept header that\n it accepts the specified media type. Otherwise, returns\n False.\n \"\"\"\n\n accept = self.accept\n\n # PERF(kgriffs): Usually the following will be true, so\n # try it first.\n if (accept == media_type) or (accept == '*/*'):\n return True\n\n # Fall back to full-blown parsing\n try:\n return mimeparse.quality(media_type, accept) != 0.0\n except ValueError:\n return False\n\n def client_prefers(self, media_types):\n \"\"\"Returns the client's preferred media type given several choices.\n\n Args:\n media_types (iterable of str): One or more Internet media types\n from which to choose the client's preferred type. This value\n **must** be an iterable collection of strings.\n\n Returns:\n str: The client's preferred media type, based on the Accept\n header. Returns *None* if the client does not accept any\n of the given types.\n \"\"\"\n\n try:\n # NOTE(kgriffs): best_match will return '' if no match is found\n preferred_type = mimeparse.best_match(media_types, self.accept)\n except ValueError:\n # Value for the accept header was not formatted correctly\n preferred_type = ''\n\n return (preferred_type if preferred_type else None)\n\n def get_header(self, name, required=False):\n \"\"\"Return a header value as a string.\n\n Args:\n name (str): Header name, case-insensitive (e.g., 'Content-Type')\n required (bool, optional): Set to True to raise HttpBadRequest\n instead of returning gracefully when the header is not found\n (default False).\n\n Returns:\n str: The value of the specified header if it exists, or *None* if\n the header is not found and is not required.\n\n Raises:\n HTTPBadRequest: The header was not found in the request, but\n it was required.\n\n \"\"\"\n\n wsgi_name = name.upper().replace('-', '_')\n\n # Use try..except to optimize for the header existing in most cases\n try:\n # Don't take the time to cache beforehand, using HTTP naming.\n # This will be faster, assuming that most headers are looked\n # up only once, and not all headers will be requested.\n return self.env['HTTP_' + wsgi_name]\n\n except KeyError:\n # NOTE(kgriffs): There are a couple headers that do not\n # use the HTTP prefix in the env, so try those. We expect\n # people to usually just use the relevant helper properties\n # to access these instead of .get_header.\n if wsgi_name in WSGI_CONTENT_HEADERS:\n try:\n return self.env[wsgi_name]\n except KeyError:\n pass\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param(self, name, required=False, store=None):\n \"\"\"Return the value of a query string parameter as a string.\n\n Note:\n If an HTML form is POSTed to the API using the\n *application/x-www-form-urlencoded* media type, the\n parameters from the request body will be merged into\n the query string parameters.\n\n If a key appears more than once in the form data, one of the\n values will be returned as a string, but it is undefined which\n one. Use .get_param_as_list() to retrieve all the values.\n\n Note:\n If a query parameter is assigned a comma-separated list of\n values (e.g., foo=a,b,c) then only one of the values will be\n returned, and it is undefined which one. Use\n .get_param_as_list() to retrieve all the values.\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'sort')\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found (default False)\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found.\n\n Returns:\n string: The value of the param as a string, or *None* if param is\n not found and is not required.\n\n Raises:\n HTTPBadRequest: The param was not found in the request, but was\n required.\n\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n # NOTE(warsaw): If the key appeared multiple times, it will be\n # stored internally as a list. We do not define which one\n # actually gets returned, but let's pick the last one for grins.\n param = params[name]\n if isinstance(param, list):\n param = param[-1]\n\n if store is not None:\n store[name] = param\n\n return param\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param_as_int(self, name,\n required=False, min=None, max=None, store=None):\n \"\"\"Return the value of a query string parameter as an int.\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'limit')\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found or is not an integer (default False).\n min (int, optional): Set to the minimum value allowed for this\n param. If the param is found and it is less than min, an\n HTTPError is raised.\n max (int, optional): Set to the maximum value allowed for this\n param. If the param is found and its value is greater than\n max, an HTTPError is raised.\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found (default\n *None*).\n\n Returns:\n int: The value of the param if it is found and can be converted to\n an integer. If the param is not found, returns *None*, unless\n ``required`` is True.\n\n Raises\n HTTPBadRequest: The param was not found in the request, even though\n it was required to be there. Also raised if the param's value\n falls outside the given interval, i.e., the value must be in\n the interval: min <= value <= max to avoid triggering an error.\n\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n val = params[name]\n if isinstance(val, list):\n val = val[-1]\n\n try:\n val = int(val)\n except ValueError:\n msg = 'The value must be an integer.'\n raise HTTPInvalidParam(msg, name)\n\n if min is not None and val < min:\n msg = 'The value must be at least ' + str(min)\n raise HTTPInvalidParam(msg, name)\n\n if max is not None and max < val:\n msg = 'The value may not exceed ' + str(max)\n raise HTTPInvalidParam(msg, name)\n\n if store is not None:\n store[name] = val\n\n return val\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param_as_bool(self, name, required=False, store=None,\n blank_as_true=False):\n \"\"\"Return the value of a query string parameter as a boolean\n\n The following bool-like strings are supported::\n\n TRUE_STRINGS = ('true', 'True', 'yes')\n FALSE_STRINGS = ('false', 'False', 'no')\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'limit')\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found or is not a recognized bool-ish string (default False).\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found (default\n *None*).\n blank_as_true (bool): If True, empty strings will be treated as\n True. keep_blank_qs_values must be set on the Request (or API\n object and inherited) for empty strings to not be filtered.\n\n Returns:\n bool: The value of the param if it is found and can be converted\n to a boolean. If the param is not found, returns *None* unless\n required is True.\n\n Raises\n HTTPBadRequest: The param was not found in the request, even though\n it was required to be there.\n\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n val = params[name]\n if isinstance(val, list):\n val = val[-1]\n\n if val in TRUE_STRINGS:\n val = True\n elif val in FALSE_STRINGS:\n val = False\n elif blank_as_true and not val:\n val = True\n else:\n msg = 'The value of the parameter must be \"true\" or \"false\".'\n raise HTTPInvalidParam(msg, name)\n\n if store is not None:\n store[name] = val\n\n return val\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param_as_list(self, name,\n transform=None, required=False, store=None):\n \"\"\"Return the value of a query string parameter as a list.\n\n List items must be comma-separated or must be provided\n as multiple instances of the same param in the query string\n ala *application/x-www-form-urlencoded*.\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'limit')\n transform (callable, optional): An optional transform function\n that takes as input each element in the list as a string and\n outputs a transformed element for inclusion in the list that\n will be returned. For example, passing the int function will\n transform list items into numbers.\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found or is not an integer (default False)\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found (default\n *None*).\n\n Returns:\n list: The value of the param if it is found. Otherwise, returns\n *None* unless required is True. Empty list elements will be\n discarded. For example a query string containing this::\n\n things=1,,3\n\n or a query string containing this::\n\n things=1&things=&things=3\n\n would both result in::\n\n ['1', '3']\n\n Raises\n HTTPBadRequest: The param was not found in the request, but was\n required.\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n items = params[name]\n\n # NOTE(warsaw): When a key appears multiple times in the request\n # query, it will already be represented internally as a list.\n # NOTE(kgriffs): Likewise for comma-delimited values.\n if not isinstance(items, list):\n items = [items]\n\n # PERF(kgriffs): Use if-else rather than a DRY approach\n # that sets transform to a passthrough function; avoids\n # function calling overhead.\n if transform is not None:\n try:\n items = [transform(i) for i in items]\n\n except ValueError:\n msg = 'The value is not formatted correctly.'\n raise HTTPInvalidParam(msg, name)\n\n if store is not None:\n store[name] = items\n\n return items\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n # TODO(kgriffs): Use the nocover pragma only for the six.PY3 if..else\n def log_error(self, message): # pragma: no cover\n \"\"\"Write an error message to the server's log.\n\n Prepends timestamp and request info to message, and writes the\n result out to the WSGI server's error stream (`wsgi.error`).\n\n Args:\n message (str): A string describing the problem. If a byte-string\n it is simply written out as-is. Unicode strings will be\n converted to UTF-8.\n\n \"\"\"\n\n if self.query_string:\n query_string_formatted = '?' + self.query_string\n else:\n query_string_formatted = ''\n\n log_line = (\n DEFAULT_ERROR_LOG_FORMAT.\n format(datetime.now(), self.method, self.path,\n query_string_formatted)\n )\n\n if six.PY3:\n self._wsgierrors.write(log_line + message + '\\n')\n else:\n if isinstance(message, unicode):\n message = message.encode('utf-8')\n\n self._wsgierrors.write(log_line.encode('utf-8'))\n self._wsgierrors.write(message + '\\n')\n\n # ------------------------------------------------------------------------\n # Helpers\n # ------------------------------------------------------------------------\n\n def _wrap_stream(self): # pragma nocover\n try:\n # NOTE(kgriffs): We can only add the wrapper if the\n # content-length header was provided.\n if self.content_length is not None:\n self.stream = helpers.Body(self.stream, self.content_length)\n\n except HTTPInvalidHeader:\n # NOTE(kgriffs): The content-length header was specified,\n # but it had an invalid value.\n pass\n\n def _parse_form_urlencoded(self):\n # NOTE(kgriffs): This assumes self.stream has been patched\n # above in the case of wsgiref, so that self.content_length\n # is not needed. Normally we just avoid accessing\n # self.content_length, because it is a little expensive\n # to call. We could cache self.content_length, but the\n # overhead to do that won't usually be helpful, since\n # content length will only ever be read once per\n # request in most cases.\n body = self.stream.read()\n\n # NOTE(kgriffs): According to http://goo.gl/6rlcux the\n # body should be US-ASCII. Enforcing this also helps\n # catch malicious input.\n try:\n body = body.decode('ascii')\n except UnicodeDecodeError:\n body = None\n self.log_error('Non-ASCII characters found in form body '\n 'with Content-Type of '\n 'application/x-www-form-urlencoded. Body '\n 'will be ignored.')\n\n if body:\n extra_params = uri.parse_query_string(\n uri.decode(body),\n keep_blank_qs_values=self.options.keep_blank_qs_values,\n )\n\n self._params.update(extra_params)\n\n\n# PERF: To avoid typos and improve storage space and speed over a dict.\nclass RequestOptions(object):\n \"\"\"This class is a container for Request options.\n\n Attributes:\n keep_blank_qs_values (bool): Set to ``True`` in order to retain\n blank values in query string parameters (default ``False``.)\n\n \"\"\"\n __slots__ = (\n 'keep_blank_qs_values',\n )\n\n def __init__(self):\n self.keep_blank_qs_values = False\n", "path": "falcon/request.py" } ]
[ { "content": "# Copyright 2013 by Rackspace Hosting, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom datetime import datetime\n\ntry:\n # NOTE(kgrifs): In Python 2.6 and 2.7, socket._fileobject is a\n # standard way of exposing a socket as a file-like object, and\n # is used by wsgiref for wsgi.input.\n import socket\n NativeStream = socket._fileobject\nexcept AttributeError: # pragma nocover\n # NOTE(kgriffs): In Python 3.3, wsgiref implements wsgi.input\n # using _io.BufferedReader which is an alias of io.BufferedReader\n import io\n NativeStream = io.BufferedReader\n\nimport mimeparse\nimport six\n\nfrom falcon.errors import *\nfrom falcon import util\nfrom falcon.util import uri\nfrom falcon import request_helpers as helpers\n\n\nDEFAULT_ERROR_LOG_FORMAT = (u'{0:%Y-%m-%d %H:%M:%S} [FALCON] [ERROR]'\n u' {1} {2}{3} => ')\n\nTRUE_STRINGS = ('true', 'True', 'yes')\nFALSE_STRINGS = ('false', 'False', 'no')\nWSGI_CONTENT_HEADERS = ('CONTENT_TYPE', 'CONTENT_LENGTH')\n\n\n_maybe_wrap_wsgi_stream = True\n\n\nclass Request(object):\n \"\"\"Represents a client's HTTP request.\n\n Note:\n `Request` is not meant to be instantiated directly by responders.\n\n Args:\n env (dict): A WSGI environment dict passed in from the server. See\n also the PEP-3333 spec.\n options (dict): Set of global options passed from the API handler.\n\n Attributes:\n protocol (str): Either 'http' or 'https'.\n method (str): HTTP method requested (e.g., GET, POST, etc.)\n host (str): Hostname requested by the client\n subdomain (str): Leftmost (i.e., most specific) subdomain from the\n hostname. If only a single domain name is given, `subdomain`\n will be *None*.\n\n Note:\n If the hostname in the request is an IP address, the value\n for `subdomain` is undefined.\n\n user_agent (str): Value of the User-Agent header, or *None* if the\n header is missing.\n app (str): Name of the WSGI app (if using WSGI's notion of virtual\n hosting).\n env (dict): Reference to the WSGI *environ* dict passed in from the\n server. See also PEP-3333.\n context (dict): Dictionary to hold any data about the request which is\n specific to your app (e.g. session object). Falcon itself will\n not interact with this attribute after it has been initialized.\n context_type (None): Custom callable/type to use for initializing the\n ``context`` attribute. To change this value so that ``context``\n is initialized to the type of your choice (e.g. OrderedDict), you\n will need to extend this class and pass that new type to the\n ``request_type`` argument of ``falcon.API()``.\n uri (str): The fully-qualified URI for the request.\n url (str): alias for ``uri``.\n relative_uri (str): The path + query string portion of the full URI.\n path (str): Path portion of the request URL (not including query\n string).\n query_string (str): Query string portion of the request URL, without\n the preceding '?' character.\n accept (str): Value of the Accept header, or '*/*' if the header is\n missing.\n auth (str): Value of the Authorization header, or *None* if the header\n is missing.\n client_accepts_json (bool): True if the Accept header includes JSON,\n otherwise False.\n client_accepts_msgpack (bool): True if the Accept header includes\n msgpack, otherwise False.\n client_accepts_xml (bool): True if the Accept header includes XML,\n otherwise False.\n content_type (str): Value of the Content-Type header, or *None* if\n the header is missing.\n content_length (int): Value of the Content-Length header converted\n to an int, or *None* if the header is missing.\n stream: File-like object for reading the body of the request, if any.\n\n Note:\n If an HTML form is POSTed to the API using the\n *application/x-www-form-urlencoded* media type, Falcon\n will consume `stream` in order to parse the parameters\n and merge them into the query string parameters. In this\n case, the stream will be left at EOF.\n\n Note also that the character encoding for fields, before\n percent-encoding non-ASCII bytes, is assumed to be\n UTF-8. The special \"_charset_\" field is ignored if present.\n\n Falcon expects form-encoded request bodies to be\n encoded according to the standard W3C algorithm (see\n also http://goo.gl/6rlcux).\n\n date (datetime): Value of the Date header, converted to a\n `datetime.datetime` instance. The header value is assumed to\n conform to RFC 1123.\n expect (str): Value of the Expect header, or *None* if the\n header is missing.\n range (tuple of int): A 2-member tuple parsed from the value of the\n Range header.\n\n The two members correspond to the first and last byte\n positions of the requested resource, inclusive. Negative\n indices indicate offset from the end of the resource,\n where -1 is the last byte, -2 is the second-to-last byte,\n and so forth.\n\n Only continous ranges are supported (e.g., \"bytes=0-0,-1\" would\n result in an HTTPBadRequest exception when the attribute is\n accessed.)\n if_match (str): Value of the If-Match header, or *None* if the\n header is missing.\n if_none_match (str): Value of the If-None-Match header, or *None*\n if the header is missing.\n if_modified_since (str): Value of the If-Modified-Since header, or\n None if the header is missing.\n if_unmodified_since (str): Value of the If-Unmodified-Sinc header,\n or *None* if the header is missing.\n if_range (str): Value of the If-Range header, or *None* if the\n header is missing.\n\n headers (dict): Raw HTTP headers from the request with\n canonical dash-separated names. Parsing all the headers\n to create this dict is done the first time this attribute\n is accessed. This parsing can be costly, so unless you\n need all the headers in this format, you should use the\n ``get_header`` method or one of the convenience attributes\n instead, to get a value for a specific header.\n\n params (dict): The mapping of request query parameter names to their\n values. Where the parameter appears multiple times in the query\n string, the value mapped to that parameter key will be a list of\n all the values in the order seen.\n\n options (dict): Set of global options passed from the API handler.\n \"\"\"\n\n __slots__ = (\n '_cached_headers',\n '_cached_uri',\n '_cached_relative_uri',\n 'content_type',\n 'env',\n 'method',\n '_params',\n 'path',\n 'query_string',\n 'stream',\n 'context',\n '_wsgierrors',\n 'options',\n )\n\n # Allow child classes to override this\n context_type = None\n\n def __init__(self, env, options=None):\n global _maybe_wrap_wsgi_stream\n\n self.env = env\n self.options = options if options else RequestOptions()\n\n if self.context_type is None:\n # Literal syntax is more efficient than using dict()\n self.context = {}\n else:\n # pylint will detect this as not-callable because it only sees the\n # declaration of None, not whatever type a subclass may have set.\n self.context = self.context_type() # pylint: disable=not-callable\n\n self._wsgierrors = env['wsgi.errors']\n self.stream = env['wsgi.input']\n self.method = env['REQUEST_METHOD']\n\n # Normalize path\n path = env['PATH_INFO']\n if path:\n if len(path) != 1 and path.endswith('/'):\n self.path = path[:-1]\n else:\n self.path = path\n else:\n self.path = '/'\n\n self._params = {}\n\n # PERF(kgriffs): if...in is faster than using env.get(...)\n if 'QUERY_STRING' in env:\n query_str = env['QUERY_STRING']\n\n if query_str:\n self.query_string = uri.decode(query_str)\n self._params = uri.parse_query_string(\n self.query_string,\n keep_blank_qs_values=self.options.keep_blank_qs_values,\n )\n else:\n self.query_string = six.text_type()\n\n else:\n self.query_string = six.text_type()\n\n self._cached_headers = None\n self._cached_uri = None\n self._cached_relative_uri = None\n\n try:\n self.content_type = self.env['CONTENT_TYPE']\n except KeyError:\n self.content_type = None\n\n # NOTE(kgriffs): Wrap wsgi.input if needed to make read() more robust,\n # normalizing semantics between, e.g., gunicorn and wsgiref.\n if _maybe_wrap_wsgi_stream:\n if isinstance(self.stream, NativeStream):\n # NOTE(kgriffs): This is covered by tests, it's just that\n # coverage can't figure this out for some reason (TBD).\n self._wrap_stream() # pragma nocover\n else:\n # PERF(kgriffs): If self.stream does not need to be wrapped\n # this time, it never needs to be wrapped since the server\n # will continue using the same type for wsgi.input.\n _maybe_wrap_wsgi_stream = False\n\n # PERF(kgriffs): Technically, we should spend a few more\n # cycles and parse the content type for real, but\n # this heuristic will work virtually all the time.\n if (self.content_type is not None and\n 'application/x-www-form-urlencoded' in self.content_type):\n self._parse_form_urlencoded()\n\n # ------------------------------------------------------------------------\n # Properties\n # ------------------------------------------------------------------------\n\n user_agent = helpers.header_property('HTTP_USER_AGENT')\n auth = helpers.header_property('HTTP_AUTHORIZATION')\n\n expect = helpers.header_property('HTTP_EXPECT')\n\n if_match = helpers.header_property('HTTP_IF_MATCH')\n if_none_match = helpers.header_property('HTTP_IF_NONE_MATCH')\n if_modified_since = helpers.header_property('HTTP_IF_MODIFIED_SINCE')\n if_unmodified_since = helpers.header_property('HTTP_IF_UNMODIFIED_SINCE')\n if_range = helpers.header_property('HTTP_IF_RANGE')\n\n @property\n def client_accepts_json(self):\n return self.client_accepts('application/json')\n\n @property\n def client_accepts_msgpack(self):\n return self.client_accepts('application/x-msgpack')\n\n @property\n def client_accepts_xml(self):\n return self.client_accepts('application/xml')\n\n @property\n def accept(self):\n # NOTE(kgriffs): Per RFC, a missing accept header is\n # equivalent to '*/*'\n try:\n return self.env['HTTP_ACCEPT'] or '*/*'\n except KeyError:\n return '*/*'\n\n @property\n def content_length(self):\n try:\n value = self.env['CONTENT_LENGTH']\n except KeyError:\n return None\n\n # NOTE(kgriffs): Normalize an empty value to behave as if\n # the header were not included; wsgiref, at least, inserts\n # an empty CONTENT_LENGTH value if the request does not\n # set the header. Gunicorn and uWSGI do not do this, but\n # others might if they are trying to match wsgiref's\n # behavior too closely.\n if not value:\n return None\n\n try:\n value_as_int = int(value)\n except ValueError:\n msg = 'The value of the header must be a number.'\n raise HTTPInvalidHeader(msg, 'Content-Length')\n\n if value_as_int < 0:\n msg = 'The value of the header must be a positive number.'\n raise HTTPInvalidHeader(msg, 'Content-Length')\n\n return value_as_int\n\n @property\n def date(self):\n try:\n http_date = self.env['HTTP_DATE']\n except KeyError:\n return None\n\n try:\n return util.http_date_to_dt(http_date)\n except ValueError:\n msg = ('It must be formatted according to RFC 1123.')\n raise HTTPInvalidHeader(msg, 'Date')\n\n @property\n def range(self):\n try:\n value = self.env['HTTP_RANGE']\n if value.startswith('bytes='):\n value = value[6:]\n except KeyError:\n return None\n\n if ',' in value:\n msg = 'The value must be a continuous byte range.'\n raise HTTPInvalidHeader(msg, 'Range')\n\n try:\n first, sep, last = value.partition('-')\n\n if not sep:\n raise ValueError()\n\n if first:\n return (int(first), int(last or -1))\n elif last:\n return (-int(last), -1)\n else:\n msg = 'The byte offsets are missing.'\n raise HTTPInvalidHeader(msg, 'Range')\n\n except ValueError:\n href = 'http://goo.gl/zZ6Ey'\n href_text = 'HTTP/1.1 Range Requests'\n msg = ('It must be a byte range formatted according to RFC 2616.')\n raise HTTPInvalidHeader(msg, 'Range', href=href,\n href_text=href_text)\n\n @property\n def app(self):\n return self.env.get('SCRIPT_NAME', '')\n\n @property\n def protocol(self):\n return self.env['wsgi.url_scheme']\n\n @property\n def uri(self):\n if self._cached_uri is None:\n env = self.env\n protocol = env['wsgi.url_scheme']\n\n # NOTE(kgriffs): According to PEP-3333 we should first\n # try to use the Host header if present.\n #\n # PERF(kgriffs): try..except is faster than .get\n try:\n host = env['HTTP_HOST']\n except KeyError:\n host = env['SERVER_NAME']\n port = env['SERVER_PORT']\n\n if protocol == 'https':\n if port != '443':\n host += ':' + port\n else:\n if port != '80':\n host += ':' + port\n\n # PERF: For small numbers of items, '+' is faster\n # than ''.join(...). Concatenation is also generally\n # faster than formatting.\n value = (protocol + '://' +\n host +\n self.app +\n self.path)\n\n if self.query_string:\n value = value + '?' + self.query_string\n\n self._cached_uri = value\n\n return self._cached_uri\n\n url = uri\n\n @property\n def host(self):\n try:\n # NOTE(kgriffs): Prefer the host header; the web server\n # isn't supposed to mess with it, so it should be what\n # the client actually sent.\n host_header = self.env['HTTP_HOST']\n host, port = uri.parse_host(host_header)\n except KeyError:\n # PERF(kgriffs): According to PEP-3333, this header\n # will always be present.\n host = self.env['SERVER_NAME']\n\n return host\n\n @property\n def subdomain(self):\n # PERF(kgriffs): .partition is slightly faster than .split\n subdomain, sep, remainder = self.host.partition('.')\n return subdomain if sep else None\n\n @property\n def relative_uri(self):\n if self._cached_relative_uri is None:\n if self.query_string:\n self._cached_relative_uri = (self.app + self.path + '?' +\n self.query_string)\n else:\n self._cached_relative_uri = self.app + self.path\n\n return self._cached_relative_uri\n\n @property\n def headers(self):\n # NOTE(kgriffs: First time here will cache the dict so all we\n # have to do is clone it in the future.\n if self._cached_headers is None:\n headers = self._cached_headers = {}\n\n env = self.env\n for name, value in env.items():\n if name.startswith('HTTP_'):\n # NOTE(kgriffs): Don't take the time to fix the case\n # since headers are supposed to be case-insensitive\n # anyway.\n headers[name[5:].replace('_', '-')] = value\n\n elif name in WSGI_CONTENT_HEADERS:\n headers[name.replace('_', '-')] = value\n\n return self._cached_headers.copy()\n\n @property\n def params(self):\n return self._params\n\n # ------------------------------------------------------------------------\n # Methods\n # ------------------------------------------------------------------------\n\n def client_accepts(self, media_type):\n \"\"\"Determines whether or not the client accepts a given media type.\n\n Args:\n media_type (str): An Internet media type to check.\n\n Returns:\n bool: True if the client has indicated in the Accept header that\n it accepts the specified media type. Otherwise, returns\n False.\n \"\"\"\n\n accept = self.accept\n\n # PERF(kgriffs): Usually the following will be true, so\n # try it first.\n if (accept == media_type) or (accept == '*/*'):\n return True\n\n # Fall back to full-blown parsing\n try:\n return mimeparse.quality(media_type, accept) != 0.0\n except ValueError:\n return False\n\n def client_prefers(self, media_types):\n \"\"\"Returns the client's preferred media type given several choices.\n\n Args:\n media_types (iterable of str): One or more Internet media types\n from which to choose the client's preferred type. This value\n **must** be an iterable collection of strings.\n\n Returns:\n str: The client's preferred media type, based on the Accept\n header. Returns *None* if the client does not accept any\n of the given types.\n \"\"\"\n\n try:\n # NOTE(kgriffs): best_match will return '' if no match is found\n preferred_type = mimeparse.best_match(media_types, self.accept)\n except ValueError:\n # Value for the accept header was not formatted correctly\n preferred_type = ''\n\n return (preferred_type if preferred_type else None)\n\n def get_header(self, name, required=False):\n \"\"\"Return a header value as a string.\n\n Args:\n name (str): Header name, case-insensitive (e.g., 'Content-Type')\n required (bool, optional): Set to True to raise HttpBadRequest\n instead of returning gracefully when the header is not found\n (default False).\n\n Returns:\n str: The value of the specified header if it exists, or *None* if\n the header is not found and is not required.\n\n Raises:\n HTTPBadRequest: The header was not found in the request, but\n it was required.\n\n \"\"\"\n\n wsgi_name = name.upper().replace('-', '_')\n\n # Use try..except to optimize for the header existing in most cases\n try:\n # Don't take the time to cache beforehand, using HTTP naming.\n # This will be faster, assuming that most headers are looked\n # up only once, and not all headers will be requested.\n return self.env['HTTP_' + wsgi_name]\n\n except KeyError:\n # NOTE(kgriffs): There are a couple headers that do not\n # use the HTTP prefix in the env, so try those. We expect\n # people to usually just use the relevant helper properties\n # to access these instead of .get_header.\n if wsgi_name in WSGI_CONTENT_HEADERS:\n try:\n return self.env[wsgi_name]\n except KeyError:\n pass\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param(self, name, required=False, store=None):\n \"\"\"Return the value of a query string parameter as a string.\n\n Note:\n If an HTML form is POSTed to the API using the\n *application/x-www-form-urlencoded* media type, the\n parameters from the request body will be merged into\n the query string parameters.\n\n If a key appears more than once in the form data, one of the\n values will be returned as a string, but it is undefined which\n one. Use .get_param_as_list() to retrieve all the values.\n\n Note:\n If a query parameter is assigned a comma-separated list of\n values (e.g., foo=a,b,c) then only one of the values will be\n returned, and it is undefined which one. Use\n .get_param_as_list() to retrieve all the values.\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'sort')\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found (default False)\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found.\n\n Returns:\n string: The value of the param as a string, or *None* if param is\n not found and is not required.\n\n Raises:\n HTTPBadRequest: The param was not found in the request, but was\n required.\n\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n # NOTE(warsaw): If the key appeared multiple times, it will be\n # stored internally as a list. We do not define which one\n # actually gets returned, but let's pick the last one for grins.\n param = params[name]\n if isinstance(param, list):\n param = param[-1]\n\n if store is not None:\n store[name] = param\n\n return param\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param_as_int(self, name,\n required=False, min=None, max=None, store=None):\n \"\"\"Return the value of a query string parameter as an int.\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'limit')\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found or is not an integer (default False).\n min (int, optional): Set to the minimum value allowed for this\n param. If the param is found and it is less than min, an\n HTTPError is raised.\n max (int, optional): Set to the maximum value allowed for this\n param. If the param is found and its value is greater than\n max, an HTTPError is raised.\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found (default\n *None*).\n\n Returns:\n int: The value of the param if it is found and can be converted to\n an integer. If the param is not found, returns *None*, unless\n ``required`` is True.\n\n Raises\n HTTPBadRequest: The param was not found in the request, even though\n it was required to be there. Also raised if the param's value\n falls outside the given interval, i.e., the value must be in\n the interval: min <= value <= max to avoid triggering an error.\n\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n val = params[name]\n if isinstance(val, list):\n val = val[-1]\n\n try:\n val = int(val)\n except ValueError:\n msg = 'The value must be an integer.'\n raise HTTPInvalidParam(msg, name)\n\n if min is not None and val < min:\n msg = 'The value must be at least ' + str(min)\n raise HTTPInvalidParam(msg, name)\n\n if max is not None and max < val:\n msg = 'The value may not exceed ' + str(max)\n raise HTTPInvalidParam(msg, name)\n\n if store is not None:\n store[name] = val\n\n return val\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param_as_bool(self, name, required=False, store=None,\n blank_as_true=False):\n \"\"\"Return the value of a query string parameter as a boolean\n\n The following bool-like strings are supported::\n\n TRUE_STRINGS = ('true', 'True', 'yes')\n FALSE_STRINGS = ('false', 'False', 'no')\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'limit')\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found or is not a recognized bool-ish string (default False).\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found (default\n *None*).\n blank_as_true (bool): If True, empty strings will be treated as\n True. keep_blank_qs_values must be set on the Request (or API\n object and inherited) for empty strings to not be filtered.\n\n Returns:\n bool: The value of the param if it is found and can be converted\n to a boolean. If the param is not found, returns *None* unless\n required is True.\n\n Raises\n HTTPBadRequest: The param was not found in the request, even though\n it was required to be there.\n\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n val = params[name]\n if isinstance(val, list):\n val = val[-1]\n\n if val in TRUE_STRINGS:\n val = True\n elif val in FALSE_STRINGS:\n val = False\n elif blank_as_true and not val:\n val = True\n else:\n msg = 'The value of the parameter must be \"true\" or \"false\".'\n raise HTTPInvalidParam(msg, name)\n\n if store is not None:\n store[name] = val\n\n return val\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n def get_param_as_list(self, name,\n transform=None, required=False, store=None):\n \"\"\"Return the value of a query string parameter as a list.\n\n List items must be comma-separated or must be provided\n as multiple instances of the same param in the query string\n ala *application/x-www-form-urlencoded*.\n\n Args:\n name (str): Parameter name, case-sensitive (e.g., 'limit')\n transform (callable, optional): An optional transform function\n that takes as input each element in the list as a string and\n outputs a transformed element for inclusion in the list that\n will be returned. For example, passing the int function will\n transform list items into numbers.\n required (bool, optional): Set to True to raise HTTPBadRequest\n instead of returning gracefully when the parameter is not\n found or is not an integer (default False)\n store (dict, optional): A dict-like object in which to place the\n value of the param, but only if the param is found (default\n *None*).\n\n Returns:\n list: The value of the param if it is found. Otherwise, returns\n *None* unless required is True. Empty list elements will be\n discarded. For example a query string containing this::\n\n things=1,,3\n\n or a query string containing this::\n\n things=1&things=&things=3\n\n would both result in::\n\n ['1', '3']\n\n Raises\n HTTPBadRequest: The param was not found in the request, but was\n required.\n \"\"\"\n\n params = self._params\n\n # PERF: Use if..in since it is a good all-around performer; we don't\n # know how likely params are to be specified by clients.\n if name in params:\n items = params[name]\n\n # NOTE(warsaw): When a key appears multiple times in the request\n # query, it will already be represented internally as a list.\n # NOTE(kgriffs): Likewise for comma-delimited values.\n if not isinstance(items, list):\n items = [items]\n\n # PERF(kgriffs): Use if-else rather than a DRY approach\n # that sets transform to a passthrough function; avoids\n # function calling overhead.\n if transform is not None:\n try:\n items = [transform(i) for i in items]\n\n except ValueError:\n msg = 'The value is not formatted correctly.'\n raise HTTPInvalidParam(msg, name)\n\n if store is not None:\n store[name] = items\n\n return items\n\n if not required:\n return None\n\n raise HTTPMissingParam(name)\n\n # TODO(kgriffs): Use the nocover pragma only for the six.PY3 if..else\n def log_error(self, message): # pragma: no cover\n \"\"\"Write an error message to the server's log.\n\n Prepends timestamp and request info to message, and writes the\n result out to the WSGI server's error stream (`wsgi.error`).\n\n Args:\n message (str): A string describing the problem. If a byte-string\n it is simply written out as-is. Unicode strings will be\n converted to UTF-8.\n\n \"\"\"\n\n if self.query_string:\n query_string_formatted = '?' + self.query_string\n else:\n query_string_formatted = ''\n\n log_line = (\n DEFAULT_ERROR_LOG_FORMAT.\n format(datetime.now(), self.method, self.path,\n query_string_formatted)\n )\n\n if six.PY3:\n self._wsgierrors.write(log_line + message + '\\n')\n else:\n if isinstance(message, unicode):\n message = message.encode('utf-8')\n\n self._wsgierrors.write(log_line.encode('utf-8'))\n self._wsgierrors.write(message + '\\n')\n\n # ------------------------------------------------------------------------\n # Helpers\n # ------------------------------------------------------------------------\n\n def _wrap_stream(self): # pragma nocover\n try:\n # NOTE(kgriffs): We can only add the wrapper if the\n # content-length header was provided.\n if self.content_length is not None:\n self.stream = helpers.Body(self.stream, self.content_length)\n\n except HTTPInvalidHeader:\n # NOTE(kgriffs): The content-length header was specified,\n # but it had an invalid value.\n pass\n\n def _parse_form_urlencoded(self):\n # NOTE(kgriffs): This assumes self.stream has been patched\n # above in the case of wsgiref, so that self.content_length\n # is not needed. Normally we just avoid accessing\n # self.content_length, because it is a little expensive\n # to call. We could cache self.content_length, but the\n # overhead to do that won't usually be helpful, since\n # content length will only ever be read once per\n # request in most cases.\n body = self.stream.read()\n\n # NOTE(kgriffs): According to http://goo.gl/6rlcux the\n # body should be US-ASCII. Enforcing this also helps\n # catch malicious input.\n try:\n body = body.decode('ascii')\n except UnicodeDecodeError:\n body = None\n self.log_error('Non-ASCII characters found in form body '\n 'with Content-Type of '\n 'application/x-www-form-urlencoded. Body '\n 'will be ignored.')\n\n if body:\n extra_params = uri.parse_query_string(\n uri.decode(body),\n keep_blank_qs_values=self.options.keep_blank_qs_values,\n )\n\n self._params.update(extra_params)\n\n\n# PERF: To avoid typos and improve storage space and speed over a dict.\nclass RequestOptions(object):\n \"\"\"This class is a container for Request options.\n\n Attributes:\n keep_blank_qs_values (bool): Set to ``True`` in order to retain\n blank values in query string parameters (default ``False``.)\n\n \"\"\"\n __slots__ = (\n 'keep_blank_qs_values',\n )\n\n def __init__(self):\n self.keep_blank_qs_values = False\n", "path": "falcon/request.py" } ]
diff --git a/falcon/request.py b/falcon/request.py index 8be41c405..58ffb4755 100644 --- a/falcon/request.py +++ b/falcon/request.py @@ -340,6 +340,8 @@ def date(self): def range(self): try: value = self.env['HTTP_RANGE'] + if value.startswith('bytes='): + value = value[6:] except KeyError: return None diff --git a/tests/test_req_vars.py b/tests/test_req_vars.py index 7123eb631..904d622ce 100644 --- a/tests/test_req_vars.py +++ b/tests/test_req_vars.py @@ -380,6 +380,10 @@ def test_range(self): req = Request(testing.create_environ(headers=headers)) self.assertEqual(req.range, (-10240, -1)) + headers = {'Range': 'bytes=0-2'} + req = Request(testing.create_environ(headers=headers)) + self.assertEqual(req.range, (0, 2)) + headers = {'Range': ''} req = Request(testing.create_environ(headers=headers)) self.assertRaises(falcon.HTTPInvalidHeader, lambda: req.range)
feast-dev__feast-3954
No such option: -f for feast CLI ## Expected Behavior According to documentation: https://docs.feast.dev/how-to-guides/feast-snowflake-gcp-aws/structuring-repos ``` feast -f staging/feature_store.yaml apply ``` should work ## Current Behavior ``` Usage: feast [OPTIONS] COMMAND [ARGS]... Try 'feast --help' for help. Error: No such option: -f ``` ## Steps to reproduce ### Specifications - Version: 0.34.1 - Platform: Linux - Subsystem:
[ { "content": "# Copyright 2019 The Feast Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport json\nimport logging\nfrom datetime import datetime\nfrom pathlib import Path\nfrom typing import List, Optional\n\nimport click\nimport yaml\nfrom colorama import Fore, Style\nfrom dateutil import parser\nfrom importlib_metadata import version as importlib_version\nfrom pygments import formatters, highlight, lexers\n\nfrom feast import utils\nfrom feast.constants import (\n DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT,\n DEFAULT_REGISTRY_SERVER_PORT,\n)\nfrom feast.errors import FeastObjectNotFoundException, FeastProviderLoginError\nfrom feast.feature_view import FeatureView\nfrom feast.infra.contrib.grpc_server import get_grpc_server\nfrom feast.on_demand_feature_view import OnDemandFeatureView\nfrom feast.repo_config import load_repo_config\nfrom feast.repo_operations import (\n apply_total,\n cli_check_repo,\n create_feature_store,\n generate_project_name,\n init_repo,\n plan,\n registry_dump,\n teardown,\n)\nfrom feast.repo_upgrade import RepoUpgrader\nfrom feast.utils import maybe_local_tz\n\n_logger = logging.getLogger(__name__)\n\n\nclass NoOptionDefaultFormat(click.Command):\n def format_options(self, ctx: click.Context, formatter: click.HelpFormatter):\n \"\"\"Writes all the options into the formatter if they exist.\"\"\"\n opts = []\n for param in self.get_params(ctx):\n rv = param.get_help_record(ctx)\n if rv is not None:\n opts.append(rv)\n if opts:\n with formatter.section(\"Options(No current command options)\"):\n formatter.write_dl(opts)\n\n\[email protected]()\[email protected](\n \"--chdir\",\n \"-c\",\n help=\"Switch to a different feature repository directory before executing the given subcommand.\",\n)\[email protected](\n \"--log-level\",\n default=\"warning\",\n help=\"The logging level. One of DEBUG, INFO, WARNING, ERROR, and CRITICAL (case-insensitive).\",\n)\[email protected](\n \"--feature-store-yaml\",\n help=\"Override the directory where the CLI should look for the feature_store.yaml file.\",\n)\[email protected]_context\ndef cli(\n ctx: click.Context,\n chdir: Optional[str],\n log_level: str,\n feature_store_yaml: Optional[str],\n):\n \"\"\"\n Feast CLI\n\n For more information, see our public docs at https://docs.feast.dev/\n \"\"\"\n ctx.ensure_object(dict)\n ctx.obj[\"CHDIR\"] = Path.cwd() if chdir is None else Path(chdir).absolute()\n ctx.obj[\"FS_YAML_FILE\"] = (\n Path(feature_store_yaml).absolute()\n if feature_store_yaml\n else utils.get_default_yaml_file_path(ctx.obj[\"CHDIR\"])\n )\n try:\n level = getattr(logging, log_level.upper())\n logging.basicConfig(\n format=\"%(asctime)s %(name)s %(levelname)s: %(message)s\",\n datefmt=\"%m/%d/%Y %I:%M:%S %p\",\n level=level,\n )\n # Override the logging level for already created loggers (due to loggers being created at the import time)\n # Note, that format & datefmt does not need to be set, because by default child loggers don't override them\n\n # Also note, that mypy complains that logging.root doesn't have \"manager\" because of the way it's written.\n # So we have to put a type ignore hint for mypy.\n for logger_name in logging.root.manager.loggerDict: # type: ignore\n if \"feast\" in logger_name:\n logger = logging.getLogger(logger_name)\n logger.setLevel(level)\n except Exception as e:\n raise e\n pass\n\n\[email protected]()\ndef version():\n \"\"\"\n Display Feast SDK version\n \"\"\"\n print(f'Feast SDK Version: \"{importlib_version(\"feast\")}\"')\n\n\[email protected]()\[email protected](\n \"--host\",\n \"-h\",\n type=click.STRING,\n default=\"0.0.0.0\",\n show_default=True,\n help=\"Specify a host for the server\",\n)\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=8888,\n show_default=True,\n help=\"Specify a port for the server\",\n)\[email protected](\n \"--registry_ttl_sec\",\n \"-r\",\n help=\"Number of seconds after which the registry is refreshed\",\n type=click.INT,\n default=5,\n show_default=True,\n)\[email protected](\n \"--root_path\",\n help=\"Provide root path to make the UI working behind proxy\",\n type=click.STRING,\n default=\"\",\n)\[email protected]_context\ndef ui(\n ctx: click.Context,\n host: str,\n port: int,\n registry_ttl_sec: int,\n root_path: Optional[str] = \"\",\n):\n \"\"\"\n Shows the Feast UI over the current directory\n \"\"\"\n store = create_feature_store(ctx)\n # Pass in the registry_dump method to get around a circular dependency\n store.serve_ui(\n host=host,\n port=port,\n get_registry_dump=registry_dump,\n registry_ttl_sec=registry_ttl_sec,\n root_path=root_path,\n )\n\n\[email protected]()\[email protected]_context\ndef endpoint(ctx: click.Context):\n \"\"\"\n Display feature server endpoints\n \"\"\"\n store = create_feature_store(ctx)\n endpoint = store.get_feature_server_endpoint()\n if endpoint is not None:\n _logger.info(\n f\"Feature server endpoint: {Style.BRIGHT + Fore.GREEN}{endpoint}{Style.RESET_ALL}\"\n )\n else:\n _logger.info(\"There is no active feature server.\")\n\n\[email protected](name=\"data-sources\")\ndef data_sources_cmd():\n \"\"\"\n Access data sources\n \"\"\"\n pass\n\n\n@data_sources_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef data_source_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe a data source\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n data_source = store.get_data_source(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(data_source)), default_flow_style=False, sort_keys=False\n )\n )\n\n\n@data_sources_cmd.command(name=\"list\")\[email protected]_context\ndef data_source_list(ctx: click.Context):\n \"\"\"\n List all data sources\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for datasource in store.list_data_sources():\n table.append([datasource.name, datasource.__class__])\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\", \"CLASS\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"entities\")\ndef entities_cmd():\n \"\"\"\n Access entities\n \"\"\"\n pass\n\n\n@entities_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef entity_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe an entity\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n entity = store.get_entity(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(entity)), default_flow_style=False, sort_keys=False\n )\n )\n\n\n@entities_cmd.command(name=\"list\")\[email protected]_context\ndef entity_list(ctx: click.Context):\n \"\"\"\n List all entities\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for entity in store.list_entities():\n table.append([entity.name, entity.description, entity.value_type])\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\", \"DESCRIPTION\", \"TYPE\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"feature-services\")\ndef feature_services_cmd():\n \"\"\"\n Access feature services\n \"\"\"\n pass\n\n\n@feature_services_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef feature_service_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe a feature service\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n feature_service = store.get_feature_service(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(feature_service)),\n default_flow_style=False,\n sort_keys=False,\n )\n )\n\n\n@feature_services_cmd.command(name=\"list\")\[email protected]_context\ndef feature_service_list(ctx: click.Context):\n \"\"\"\n List all feature services\n \"\"\"\n store = create_feature_store(ctx)\n feature_services = []\n for feature_service in store.list_feature_services():\n feature_names = []\n for projection in feature_service.feature_view_projections:\n feature_names.extend(\n [f\"{projection.name}:{feature.name}\" for feature in projection.features]\n )\n feature_services.append([feature_service.name, \", \".join(feature_names)])\n\n from tabulate import tabulate\n\n print(tabulate(feature_services, headers=[\"NAME\", \"FEATURES\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"feature-views\")\ndef feature_views_cmd():\n \"\"\"\n Access feature views\n \"\"\"\n pass\n\n\n@feature_views_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef feature_view_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe a feature view\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n feature_view = store.get_feature_view(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(feature_view)), default_flow_style=False, sort_keys=False\n )\n )\n\n\n@feature_views_cmd.command(name=\"list\")\[email protected]_context\ndef feature_view_list(ctx: click.Context):\n \"\"\"\n List all feature views\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for feature_view in [\n *store.list_feature_views(),\n *store.list_request_feature_views(),\n *store.list_on_demand_feature_views(),\n ]:\n entities = set()\n if isinstance(feature_view, FeatureView):\n entities.update(feature_view.entities)\n elif isinstance(feature_view, OnDemandFeatureView):\n for backing_fv in feature_view.source_feature_view_projections.values():\n entities.update(store.get_feature_view(backing_fv.name).entities)\n table.append(\n [\n feature_view.name,\n entities if len(entities) > 0 else \"n/a\",\n type(feature_view).__name__,\n ]\n )\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\", \"ENTITIES\", \"TYPE\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"on-demand-feature-views\")\ndef on_demand_feature_views_cmd():\n \"\"\"\n [Experimental] Access on demand feature views\n \"\"\"\n pass\n\n\n@on_demand_feature_views_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef on_demand_feature_view_describe(ctx: click.Context, name: str):\n \"\"\"\n [Experimental] Describe an on demand feature view\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n on_demand_feature_view = store.get_on_demand_feature_view(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(on_demand_feature_view)),\n default_flow_style=False,\n sort_keys=False,\n )\n )\n\n\n@on_demand_feature_views_cmd.command(name=\"list\")\[email protected]_context\ndef on_demand_feature_view_list(ctx: click.Context):\n \"\"\"\n [Experimental] List all on demand feature views\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for on_demand_feature_view in store.list_on_demand_feature_views():\n table.append([on_demand_feature_view.name])\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\"], tablefmt=\"plain\"))\n\n\[email protected](\"plan\", cls=NoOptionDefaultFormat)\[email protected](\n \"--skip-source-validation\",\n is_flag=True,\n help=\"Don't validate the data sources by checking for that the tables exist.\",\n)\[email protected]_context\ndef plan_command(ctx: click.Context, skip_source_validation: bool):\n \"\"\"\n Create or update a feature store deployment\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n repo_config = load_repo_config(repo, fs_yaml_file)\n try:\n plan(repo_config, repo, skip_source_validation)\n except FeastProviderLoginError as e:\n print(str(e))\n\n\[email protected](\"apply\", cls=NoOptionDefaultFormat)\[email protected](\n \"--skip-source-validation\",\n is_flag=True,\n help=\"Don't validate the data sources by checking for that the tables exist.\",\n)\[email protected]_context\ndef apply_total_command(ctx: click.Context, skip_source_validation: bool):\n \"\"\"\n Create or update a feature store deployment\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n\n repo_config = load_repo_config(repo, fs_yaml_file)\n try:\n apply_total(repo_config, repo, skip_source_validation)\n except FeastProviderLoginError as e:\n print(str(e))\n\n\[email protected](\"teardown\", cls=NoOptionDefaultFormat)\[email protected]_context\ndef teardown_command(ctx: click.Context):\n \"\"\"\n Tear down deployed feature store infrastructure\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n repo_config = load_repo_config(repo, fs_yaml_file)\n\n teardown(repo_config, repo)\n\n\[email protected](\"registry-dump\")\[email protected]_context\ndef registry_dump_command(ctx: click.Context):\n \"\"\"\n Print contents of the metadata registry\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n repo_config = load_repo_config(repo, fs_yaml_file)\n\n click.echo(registry_dump(repo_config, repo_path=repo))\n\n\[email protected](\"materialize\")\[email protected](\"start_ts\")\[email protected](\"end_ts\")\[email protected](\n \"--views\",\n \"-v\",\n help=\"Feature views to materialize\",\n multiple=True,\n)\[email protected]_context\ndef materialize_command(\n ctx: click.Context, start_ts: str, end_ts: str, views: List[str]\n):\n \"\"\"\n Run a (non-incremental) materialization job to ingest data into the online store. Feast\n will read all data between START_TS and END_TS from the offline store and write it to the\n online store. If you don't specify feature view names using --views, all registered Feature\n Views will be materialized.\n\n START_TS and END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'\n \"\"\"\n store = create_feature_store(ctx)\n\n store.materialize(\n feature_views=None if not views else views,\n start_date=utils.make_tzaware(parser.parse(start_ts)),\n end_date=utils.make_tzaware(parser.parse(end_ts)),\n )\n\n\[email protected](\"materialize-incremental\")\[email protected](\"end_ts\")\[email protected](\n \"--views\",\n \"-v\",\n help=\"Feature views to incrementally materialize\",\n multiple=True,\n)\[email protected]_context\ndef materialize_incremental_command(ctx: click.Context, end_ts: str, views: List[str]):\n \"\"\"\n Run an incremental materialization job to ingest new data into the online store. Feast will read\n all data from the previously ingested point to END_TS from the offline store and write it to the\n online store. If you don't specify feature view names using --views, all registered Feature\n Views will be incrementally materialized.\n\n END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'\n \"\"\"\n store = create_feature_store(ctx)\n store.materialize_incremental(\n feature_views=None if not views else views,\n end_date=utils.make_tzaware(datetime.fromisoformat(end_ts)),\n )\n\n\[email protected](\"init\")\[email protected](\"PROJECT_DIRECTORY\", required=False)\[email protected](\n \"--minimal\", \"-m\", is_flag=True, help=\"Create an empty project repository\"\n)\[email protected](\n \"--template\",\n \"-t\",\n type=click.Choice(\n [\n \"local\",\n \"gcp\",\n \"aws\",\n \"snowflake\",\n \"spark\",\n \"postgres\",\n \"hbase\",\n \"cassandra\",\n \"rockset\",\n \"hazelcast\",\n ],\n case_sensitive=False,\n ),\n help=\"Specify a template for the created project\",\n default=\"local\",\n)\ndef init_command(project_directory, minimal: bool, template: str):\n \"\"\"Create a new Feast repository\"\"\"\n if not project_directory:\n project_directory = generate_project_name()\n\n if minimal:\n template = \"minimal\"\n\n init_repo(project_directory, template)\n\n\[email protected](\"serve\")\[email protected](\n \"--host\",\n \"-h\",\n type=click.STRING,\n default=\"127.0.0.1\",\n show_default=True,\n help=\"Specify a host for the server\",\n)\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=6566,\n show_default=True,\n help=\"Specify a port for the server\",\n)\[email protected](\n \"--type\",\n \"-t\",\n \"type_\",\n type=click.STRING,\n default=\"http\",\n show_default=True,\n help=\"Specify a server type: 'http' or 'grpc'\",\n)\[email protected](\n \"--no-access-log\",\n is_flag=True,\n show_default=True,\n help=\"Disable the Uvicorn access log\",\n)\[email protected](\n \"--no-feature-log\",\n is_flag=True,\n show_default=True,\n help=\"Disable logging served features\",\n)\[email protected](\n \"--workers\",\n \"-w\",\n type=click.INT,\n default=1,\n show_default=True,\n help=\"Number of worker\",\n)\[email protected](\n \"--keep-alive-timeout\",\n type=click.INT,\n default=5,\n show_default=True,\n help=\"Timeout for keep alive\",\n)\[email protected](\n \"--registry_ttl_sec\",\n \"-r\",\n help=\"Number of seconds after which the registry is refreshed\",\n type=click.INT,\n default=5,\n show_default=True,\n)\[email protected]_context\ndef serve_command(\n ctx: click.Context,\n host: str,\n port: int,\n type_: str,\n no_access_log: bool,\n no_feature_log: bool,\n workers: int,\n keep_alive_timeout: int,\n registry_ttl_sec: int = 5,\n):\n \"\"\"Start a feature server locally on a given port.\"\"\"\n store = create_feature_store(ctx)\n\n store.serve(\n host=host,\n port=port,\n type_=type_,\n no_access_log=no_access_log,\n no_feature_log=no_feature_log,\n workers=workers,\n keep_alive_timeout=keep_alive_timeout,\n registry_ttl_sec=registry_ttl_sec,\n )\n\n\[email protected](\"listen\")\[email protected](\n \"--address\",\n \"-a\",\n type=click.STRING,\n default=\"localhost:50051\",\n show_default=True,\n help=\"Address of the gRPC server\",\n)\[email protected](\n \"--max_workers\",\n \"-w\",\n type=click.INT,\n default=10,\n show_default=False,\n help=\"The maximum number of threads that can be used to execute the gRPC calls\",\n)\[email protected](\n \"--registry_ttl_sec\",\n \"-r\",\n help=\"Number of seconds after which the registry is refreshed\",\n type=click.INT,\n default=5,\n show_default=True,\n)\[email protected]_context\ndef listen_command(\n ctx: click.Context,\n address: str,\n max_workers: int,\n registry_ttl_sec: int,\n):\n \"\"\"Start a gRPC feature server to ingest streaming features on given address\"\"\"\n store = create_feature_store(ctx)\n server = get_grpc_server(address, store, max_workers, registry_ttl_sec)\n server.start()\n server.wait_for_termination()\n\n\[email protected](\"serve_transformations\")\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT,\n help=\"Specify a port for the server\",\n)\[email protected]_context\ndef serve_transformations_command(ctx: click.Context, port: int):\n \"\"\"[Experimental] Start a feature consumption server locally on a given port.\"\"\"\n store = create_feature_store(ctx)\n\n store.serve_transformations(port)\n\n\[email protected](\"serve_registry\")\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=DEFAULT_REGISTRY_SERVER_PORT,\n help=\"Specify a port for the server\",\n)\[email protected]_context\ndef serve_registry_command(ctx: click.Context, port: int):\n \"\"\"Start a registry server locally on a given port.\"\"\"\n store = create_feature_store(ctx)\n\n store.serve_registry(port)\n\n\[email protected](\"validate\")\[email protected](\n \"--feature-service\",\n \"-f\",\n help=\"Specify a feature service name\",\n)\[email protected](\n \"--reference\",\n \"-r\",\n help=\"Specify a validation reference name\",\n)\[email protected](\n \"--no-profile-cache\",\n is_flag=True,\n help=\"Do not store cached profile in registry\",\n)\[email protected](\"start_ts\")\[email protected](\"end_ts\")\[email protected]_context\ndef validate(\n ctx: click.Context,\n feature_service: str,\n reference: str,\n start_ts: str,\n end_ts: str,\n no_profile_cache,\n):\n \"\"\"\n Perform validation of logged features (produced by a given feature service) against provided reference.\n\n START_TS and END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'\n \"\"\"\n store = create_feature_store(ctx)\n\n feature_service = store.get_feature_service(name=feature_service)\n reference = store.get_validation_reference(reference)\n\n result = store.validate_logged_features(\n source=feature_service,\n reference=reference,\n start=maybe_local_tz(datetime.fromisoformat(start_ts)),\n end=maybe_local_tz(datetime.fromisoformat(end_ts)),\n throw_exception=False,\n cache_profile=not no_profile_cache,\n )\n\n if not result:\n print(f\"{Style.BRIGHT + Fore.GREEN}Validation successful!{Style.RESET_ALL}\")\n return\n\n errors = [e.to_dict() for e in result.report.errors]\n formatted_json = json.dumps(errors, indent=4)\n colorful_json = highlight(\n formatted_json, lexers.JsonLexer(), formatters.TerminalFormatter()\n )\n print(f\"{Style.BRIGHT + Fore.RED}Validation failed!{Style.RESET_ALL}\")\n print(colorful_json)\n exit(1)\n\n\[email protected](\"repo-upgrade\", cls=NoOptionDefaultFormat)\[email protected](\n \"--write\",\n is_flag=True,\n default=False,\n help=\"Upgrade a feature repo to use the API expected by feast 0.23.\",\n)\[email protected]_context\ndef repo_upgrade(ctx: click.Context, write: bool):\n \"\"\"\n Upgrade a feature repo in place.\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n try:\n RepoUpgrader(repo, write).upgrade()\n except FeastProviderLoginError as e:\n print(str(e))\n\n\nif __name__ == \"__main__\":\n cli()\n", "path": "sdk/python/feast/cli.py" } ]
[ { "content": "# Copyright 2019 The Feast Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport json\nimport logging\nfrom datetime import datetime\nfrom pathlib import Path\nfrom typing import List, Optional\n\nimport click\nimport yaml\nfrom colorama import Fore, Style\nfrom dateutil import parser\nfrom importlib_metadata import version as importlib_version\nfrom pygments import formatters, highlight, lexers\n\nfrom feast import utils\nfrom feast.constants import (\n DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT,\n DEFAULT_REGISTRY_SERVER_PORT,\n)\nfrom feast.errors import FeastObjectNotFoundException, FeastProviderLoginError\nfrom feast.feature_view import FeatureView\nfrom feast.infra.contrib.grpc_server import get_grpc_server\nfrom feast.on_demand_feature_view import OnDemandFeatureView\nfrom feast.repo_config import load_repo_config\nfrom feast.repo_operations import (\n apply_total,\n cli_check_repo,\n create_feature_store,\n generate_project_name,\n init_repo,\n plan,\n registry_dump,\n teardown,\n)\nfrom feast.repo_upgrade import RepoUpgrader\nfrom feast.utils import maybe_local_tz\n\n_logger = logging.getLogger(__name__)\n\n\nclass NoOptionDefaultFormat(click.Command):\n def format_options(self, ctx: click.Context, formatter: click.HelpFormatter):\n \"\"\"Writes all the options into the formatter if they exist.\"\"\"\n opts = []\n for param in self.get_params(ctx):\n rv = param.get_help_record(ctx)\n if rv is not None:\n opts.append(rv)\n if opts:\n with formatter.section(\"Options(No current command options)\"):\n formatter.write_dl(opts)\n\n\[email protected]()\[email protected](\n \"--chdir\",\n \"-c\",\n help=\"Switch to a different feature repository directory before executing the given subcommand.\",\n)\[email protected](\n \"--log-level\",\n default=\"warning\",\n help=\"The logging level. One of DEBUG, INFO, WARNING, ERROR, and CRITICAL (case-insensitive).\",\n)\[email protected](\n \"--feature-store-yaml\",\n \"-f\",\n help=\"Override the directory where the CLI should look for the feature_store.yaml file.\",\n)\[email protected]_context\ndef cli(\n ctx: click.Context,\n chdir: Optional[str],\n log_level: str,\n feature_store_yaml: Optional[str],\n):\n \"\"\"\n Feast CLI\n\n For more information, see our public docs at https://docs.feast.dev/\n \"\"\"\n ctx.ensure_object(dict)\n ctx.obj[\"CHDIR\"] = Path.cwd() if chdir is None else Path(chdir).absolute()\n ctx.obj[\"FS_YAML_FILE\"] = (\n Path(feature_store_yaml).absolute()\n if feature_store_yaml\n else utils.get_default_yaml_file_path(ctx.obj[\"CHDIR\"])\n )\n try:\n level = getattr(logging, log_level.upper())\n logging.basicConfig(\n format=\"%(asctime)s %(name)s %(levelname)s: %(message)s\",\n datefmt=\"%m/%d/%Y %I:%M:%S %p\",\n level=level,\n )\n # Override the logging level for already created loggers (due to loggers being created at the import time)\n # Note, that format & datefmt does not need to be set, because by default child loggers don't override them\n\n # Also note, that mypy complains that logging.root doesn't have \"manager\" because of the way it's written.\n # So we have to put a type ignore hint for mypy.\n for logger_name in logging.root.manager.loggerDict: # type: ignore\n if \"feast\" in logger_name:\n logger = logging.getLogger(logger_name)\n logger.setLevel(level)\n except Exception as e:\n raise e\n pass\n\n\[email protected]()\ndef version():\n \"\"\"\n Display Feast SDK version\n \"\"\"\n print(f'Feast SDK Version: \"{importlib_version(\"feast\")}\"')\n\n\[email protected]()\[email protected](\n \"--host\",\n \"-h\",\n type=click.STRING,\n default=\"0.0.0.0\",\n show_default=True,\n help=\"Specify a host for the server\",\n)\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=8888,\n show_default=True,\n help=\"Specify a port for the server\",\n)\[email protected](\n \"--registry_ttl_sec\",\n \"-r\",\n help=\"Number of seconds after which the registry is refreshed\",\n type=click.INT,\n default=5,\n show_default=True,\n)\[email protected](\n \"--root_path\",\n help=\"Provide root path to make the UI working behind proxy\",\n type=click.STRING,\n default=\"\",\n)\[email protected]_context\ndef ui(\n ctx: click.Context,\n host: str,\n port: int,\n registry_ttl_sec: int,\n root_path: Optional[str] = \"\",\n):\n \"\"\"\n Shows the Feast UI over the current directory\n \"\"\"\n store = create_feature_store(ctx)\n # Pass in the registry_dump method to get around a circular dependency\n store.serve_ui(\n host=host,\n port=port,\n get_registry_dump=registry_dump,\n registry_ttl_sec=registry_ttl_sec,\n root_path=root_path,\n )\n\n\[email protected]()\[email protected]_context\ndef endpoint(ctx: click.Context):\n \"\"\"\n Display feature server endpoints\n \"\"\"\n store = create_feature_store(ctx)\n endpoint = store.get_feature_server_endpoint()\n if endpoint is not None:\n _logger.info(\n f\"Feature server endpoint: {Style.BRIGHT + Fore.GREEN}{endpoint}{Style.RESET_ALL}\"\n )\n else:\n _logger.info(\"There is no active feature server.\")\n\n\[email protected](name=\"data-sources\")\ndef data_sources_cmd():\n \"\"\"\n Access data sources\n \"\"\"\n pass\n\n\n@data_sources_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef data_source_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe a data source\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n data_source = store.get_data_source(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(data_source)), default_flow_style=False, sort_keys=False\n )\n )\n\n\n@data_sources_cmd.command(name=\"list\")\[email protected]_context\ndef data_source_list(ctx: click.Context):\n \"\"\"\n List all data sources\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for datasource in store.list_data_sources():\n table.append([datasource.name, datasource.__class__])\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\", \"CLASS\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"entities\")\ndef entities_cmd():\n \"\"\"\n Access entities\n \"\"\"\n pass\n\n\n@entities_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef entity_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe an entity\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n entity = store.get_entity(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(entity)), default_flow_style=False, sort_keys=False\n )\n )\n\n\n@entities_cmd.command(name=\"list\")\[email protected]_context\ndef entity_list(ctx: click.Context):\n \"\"\"\n List all entities\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for entity in store.list_entities():\n table.append([entity.name, entity.description, entity.value_type])\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\", \"DESCRIPTION\", \"TYPE\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"feature-services\")\ndef feature_services_cmd():\n \"\"\"\n Access feature services\n \"\"\"\n pass\n\n\n@feature_services_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef feature_service_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe a feature service\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n feature_service = store.get_feature_service(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(feature_service)),\n default_flow_style=False,\n sort_keys=False,\n )\n )\n\n\n@feature_services_cmd.command(name=\"list\")\[email protected]_context\ndef feature_service_list(ctx: click.Context):\n \"\"\"\n List all feature services\n \"\"\"\n store = create_feature_store(ctx)\n feature_services = []\n for feature_service in store.list_feature_services():\n feature_names = []\n for projection in feature_service.feature_view_projections:\n feature_names.extend(\n [f\"{projection.name}:{feature.name}\" for feature in projection.features]\n )\n feature_services.append([feature_service.name, \", \".join(feature_names)])\n\n from tabulate import tabulate\n\n print(tabulate(feature_services, headers=[\"NAME\", \"FEATURES\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"feature-views\")\ndef feature_views_cmd():\n \"\"\"\n Access feature views\n \"\"\"\n pass\n\n\n@feature_views_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef feature_view_describe(ctx: click.Context, name: str):\n \"\"\"\n Describe a feature view\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n feature_view = store.get_feature_view(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(feature_view)), default_flow_style=False, sort_keys=False\n )\n )\n\n\n@feature_views_cmd.command(name=\"list\")\[email protected]_context\ndef feature_view_list(ctx: click.Context):\n \"\"\"\n List all feature views\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for feature_view in [\n *store.list_feature_views(),\n *store.list_request_feature_views(),\n *store.list_on_demand_feature_views(),\n ]:\n entities = set()\n if isinstance(feature_view, FeatureView):\n entities.update(feature_view.entities)\n elif isinstance(feature_view, OnDemandFeatureView):\n for backing_fv in feature_view.source_feature_view_projections.values():\n entities.update(store.get_feature_view(backing_fv.name).entities)\n table.append(\n [\n feature_view.name,\n entities if len(entities) > 0 else \"n/a\",\n type(feature_view).__name__,\n ]\n )\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\", \"ENTITIES\", \"TYPE\"], tablefmt=\"plain\"))\n\n\[email protected](name=\"on-demand-feature-views\")\ndef on_demand_feature_views_cmd():\n \"\"\"\n [Experimental] Access on demand feature views\n \"\"\"\n pass\n\n\n@on_demand_feature_views_cmd.command(\"describe\")\[email protected](\"name\", type=click.STRING)\[email protected]_context\ndef on_demand_feature_view_describe(ctx: click.Context, name: str):\n \"\"\"\n [Experimental] Describe an on demand feature view\n \"\"\"\n store = create_feature_store(ctx)\n\n try:\n on_demand_feature_view = store.get_on_demand_feature_view(name)\n except FeastObjectNotFoundException as e:\n print(e)\n exit(1)\n\n print(\n yaml.dump(\n yaml.safe_load(str(on_demand_feature_view)),\n default_flow_style=False,\n sort_keys=False,\n )\n )\n\n\n@on_demand_feature_views_cmd.command(name=\"list\")\[email protected]_context\ndef on_demand_feature_view_list(ctx: click.Context):\n \"\"\"\n [Experimental] List all on demand feature views\n \"\"\"\n store = create_feature_store(ctx)\n table = []\n for on_demand_feature_view in store.list_on_demand_feature_views():\n table.append([on_demand_feature_view.name])\n\n from tabulate import tabulate\n\n print(tabulate(table, headers=[\"NAME\"], tablefmt=\"plain\"))\n\n\[email protected](\"plan\", cls=NoOptionDefaultFormat)\[email protected](\n \"--skip-source-validation\",\n is_flag=True,\n help=\"Don't validate the data sources by checking for that the tables exist.\",\n)\[email protected]_context\ndef plan_command(ctx: click.Context, skip_source_validation: bool):\n \"\"\"\n Create or update a feature store deployment\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n repo_config = load_repo_config(repo, fs_yaml_file)\n try:\n plan(repo_config, repo, skip_source_validation)\n except FeastProviderLoginError as e:\n print(str(e))\n\n\[email protected](\"apply\", cls=NoOptionDefaultFormat)\[email protected](\n \"--skip-source-validation\",\n is_flag=True,\n help=\"Don't validate the data sources by checking for that the tables exist.\",\n)\[email protected]_context\ndef apply_total_command(ctx: click.Context, skip_source_validation: bool):\n \"\"\"\n Create or update a feature store deployment\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n\n repo_config = load_repo_config(repo, fs_yaml_file)\n try:\n apply_total(repo_config, repo, skip_source_validation)\n except FeastProviderLoginError as e:\n print(str(e))\n\n\[email protected](\"teardown\", cls=NoOptionDefaultFormat)\[email protected]_context\ndef teardown_command(ctx: click.Context):\n \"\"\"\n Tear down deployed feature store infrastructure\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n repo_config = load_repo_config(repo, fs_yaml_file)\n\n teardown(repo_config, repo)\n\n\[email protected](\"registry-dump\")\[email protected]_context\ndef registry_dump_command(ctx: click.Context):\n \"\"\"\n Print contents of the metadata registry\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n repo_config = load_repo_config(repo, fs_yaml_file)\n\n click.echo(registry_dump(repo_config, repo_path=repo))\n\n\[email protected](\"materialize\")\[email protected](\"start_ts\")\[email protected](\"end_ts\")\[email protected](\n \"--views\",\n \"-v\",\n help=\"Feature views to materialize\",\n multiple=True,\n)\[email protected]_context\ndef materialize_command(\n ctx: click.Context, start_ts: str, end_ts: str, views: List[str]\n):\n \"\"\"\n Run a (non-incremental) materialization job to ingest data into the online store. Feast\n will read all data between START_TS and END_TS from the offline store and write it to the\n online store. If you don't specify feature view names using --views, all registered Feature\n Views will be materialized.\n\n START_TS and END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'\n \"\"\"\n store = create_feature_store(ctx)\n\n store.materialize(\n feature_views=None if not views else views,\n start_date=utils.make_tzaware(parser.parse(start_ts)),\n end_date=utils.make_tzaware(parser.parse(end_ts)),\n )\n\n\[email protected](\"materialize-incremental\")\[email protected](\"end_ts\")\[email protected](\n \"--views\",\n \"-v\",\n help=\"Feature views to incrementally materialize\",\n multiple=True,\n)\[email protected]_context\ndef materialize_incremental_command(ctx: click.Context, end_ts: str, views: List[str]):\n \"\"\"\n Run an incremental materialization job to ingest new data into the online store. Feast will read\n all data from the previously ingested point to END_TS from the offline store and write it to the\n online store. If you don't specify feature view names using --views, all registered Feature\n Views will be incrementally materialized.\n\n END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'\n \"\"\"\n store = create_feature_store(ctx)\n store.materialize_incremental(\n feature_views=None if not views else views,\n end_date=utils.make_tzaware(datetime.fromisoformat(end_ts)),\n )\n\n\[email protected](\"init\")\[email protected](\"PROJECT_DIRECTORY\", required=False)\[email protected](\n \"--minimal\", \"-m\", is_flag=True, help=\"Create an empty project repository\"\n)\[email protected](\n \"--template\",\n \"-t\",\n type=click.Choice(\n [\n \"local\",\n \"gcp\",\n \"aws\",\n \"snowflake\",\n \"spark\",\n \"postgres\",\n \"hbase\",\n \"cassandra\",\n \"rockset\",\n \"hazelcast\",\n ],\n case_sensitive=False,\n ),\n help=\"Specify a template for the created project\",\n default=\"local\",\n)\ndef init_command(project_directory, minimal: bool, template: str):\n \"\"\"Create a new Feast repository\"\"\"\n if not project_directory:\n project_directory = generate_project_name()\n\n if minimal:\n template = \"minimal\"\n\n init_repo(project_directory, template)\n\n\[email protected](\"serve\")\[email protected](\n \"--host\",\n \"-h\",\n type=click.STRING,\n default=\"127.0.0.1\",\n show_default=True,\n help=\"Specify a host for the server\",\n)\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=6566,\n show_default=True,\n help=\"Specify a port for the server\",\n)\[email protected](\n \"--type\",\n \"-t\",\n \"type_\",\n type=click.STRING,\n default=\"http\",\n show_default=True,\n help=\"Specify a server type: 'http' or 'grpc'\",\n)\[email protected](\n \"--no-access-log\",\n is_flag=True,\n show_default=True,\n help=\"Disable the Uvicorn access log\",\n)\[email protected](\n \"--no-feature-log\",\n is_flag=True,\n show_default=True,\n help=\"Disable logging served features\",\n)\[email protected](\n \"--workers\",\n \"-w\",\n type=click.INT,\n default=1,\n show_default=True,\n help=\"Number of worker\",\n)\[email protected](\n \"--keep-alive-timeout\",\n type=click.INT,\n default=5,\n show_default=True,\n help=\"Timeout for keep alive\",\n)\[email protected](\n \"--registry_ttl_sec\",\n \"-r\",\n help=\"Number of seconds after which the registry is refreshed\",\n type=click.INT,\n default=5,\n show_default=True,\n)\[email protected]_context\ndef serve_command(\n ctx: click.Context,\n host: str,\n port: int,\n type_: str,\n no_access_log: bool,\n no_feature_log: bool,\n workers: int,\n keep_alive_timeout: int,\n registry_ttl_sec: int = 5,\n):\n \"\"\"Start a feature server locally on a given port.\"\"\"\n store = create_feature_store(ctx)\n\n store.serve(\n host=host,\n port=port,\n type_=type_,\n no_access_log=no_access_log,\n no_feature_log=no_feature_log,\n workers=workers,\n keep_alive_timeout=keep_alive_timeout,\n registry_ttl_sec=registry_ttl_sec,\n )\n\n\[email protected](\"listen\")\[email protected](\n \"--address\",\n \"-a\",\n type=click.STRING,\n default=\"localhost:50051\",\n show_default=True,\n help=\"Address of the gRPC server\",\n)\[email protected](\n \"--max_workers\",\n \"-w\",\n type=click.INT,\n default=10,\n show_default=False,\n help=\"The maximum number of threads that can be used to execute the gRPC calls\",\n)\[email protected](\n \"--registry_ttl_sec\",\n \"-r\",\n help=\"Number of seconds after which the registry is refreshed\",\n type=click.INT,\n default=5,\n show_default=True,\n)\[email protected]_context\ndef listen_command(\n ctx: click.Context,\n address: str,\n max_workers: int,\n registry_ttl_sec: int,\n):\n \"\"\"Start a gRPC feature server to ingest streaming features on given address\"\"\"\n store = create_feature_store(ctx)\n server = get_grpc_server(address, store, max_workers, registry_ttl_sec)\n server.start()\n server.wait_for_termination()\n\n\[email protected](\"serve_transformations\")\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=DEFAULT_FEATURE_TRANSFORMATION_SERVER_PORT,\n help=\"Specify a port for the server\",\n)\[email protected]_context\ndef serve_transformations_command(ctx: click.Context, port: int):\n \"\"\"[Experimental] Start a feature consumption server locally on a given port.\"\"\"\n store = create_feature_store(ctx)\n\n store.serve_transformations(port)\n\n\[email protected](\"serve_registry\")\[email protected](\n \"--port\",\n \"-p\",\n type=click.INT,\n default=DEFAULT_REGISTRY_SERVER_PORT,\n help=\"Specify a port for the server\",\n)\[email protected]_context\ndef serve_registry_command(ctx: click.Context, port: int):\n \"\"\"Start a registry server locally on a given port.\"\"\"\n store = create_feature_store(ctx)\n\n store.serve_registry(port)\n\n\[email protected](\"validate\")\[email protected](\n \"--feature-service\",\n \"-f\",\n help=\"Specify a feature service name\",\n)\[email protected](\n \"--reference\",\n \"-r\",\n help=\"Specify a validation reference name\",\n)\[email protected](\n \"--no-profile-cache\",\n is_flag=True,\n help=\"Do not store cached profile in registry\",\n)\[email protected](\"start_ts\")\[email protected](\"end_ts\")\[email protected]_context\ndef validate(\n ctx: click.Context,\n feature_service: str,\n reference: str,\n start_ts: str,\n end_ts: str,\n no_profile_cache,\n):\n \"\"\"\n Perform validation of logged features (produced by a given feature service) against provided reference.\n\n START_TS and END_TS should be in ISO 8601 format, e.g. '2021-07-16T19:20:01'\n \"\"\"\n store = create_feature_store(ctx)\n\n feature_service = store.get_feature_service(name=feature_service)\n reference = store.get_validation_reference(reference)\n\n result = store.validate_logged_features(\n source=feature_service,\n reference=reference,\n start=maybe_local_tz(datetime.fromisoformat(start_ts)),\n end=maybe_local_tz(datetime.fromisoformat(end_ts)),\n throw_exception=False,\n cache_profile=not no_profile_cache,\n )\n\n if not result:\n print(f\"{Style.BRIGHT + Fore.GREEN}Validation successful!{Style.RESET_ALL}\")\n return\n\n errors = [e.to_dict() for e in result.report.errors]\n formatted_json = json.dumps(errors, indent=4)\n colorful_json = highlight(\n formatted_json, lexers.JsonLexer(), formatters.TerminalFormatter()\n )\n print(f\"{Style.BRIGHT + Fore.RED}Validation failed!{Style.RESET_ALL}\")\n print(colorful_json)\n exit(1)\n\n\[email protected](\"repo-upgrade\", cls=NoOptionDefaultFormat)\[email protected](\n \"--write\",\n is_flag=True,\n default=False,\n help=\"Upgrade a feature repo to use the API expected by feast 0.23.\",\n)\[email protected]_context\ndef repo_upgrade(ctx: click.Context, write: bool):\n \"\"\"\n Upgrade a feature repo in place.\n \"\"\"\n repo = ctx.obj[\"CHDIR\"]\n fs_yaml_file = ctx.obj[\"FS_YAML_FILE\"]\n cli_check_repo(repo, fs_yaml_file)\n try:\n RepoUpgrader(repo, write).upgrade()\n except FeastProviderLoginError as e:\n print(str(e))\n\n\nif __name__ == \"__main__\":\n cli()\n", "path": "sdk/python/feast/cli.py" } ]
diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 985c44b821f..7ce8aaef2bc 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -76,6 +76,7 @@ def format_options(self, ctx: click.Context, formatter: click.HelpFormatter): ) @click.option( "--feature-store-yaml", + "-f", help="Override the directory where the CLI should look for the feature_store.yaml file.", ) @click.pass_context
pytest-dev__pytest-django-216
Support settings DJANGO_SETTINGS_MODULE in pytest_configure See comment in #119, this should be possible: ``` python import os def pytest_configure(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myproject.settings') ```
[ { "content": "\"\"\"\nHelpers to load Django lazily when Django settings can't be configured.\n\"\"\"\n\nimport os\nimport sys\n\nimport pytest\n\n\ndef skip_if_no_django():\n \"\"\"Raises a skip exception when no Django settings are available\"\"\"\n if not django_settings_is_configured():\n pytest.skip('Test skipped since no Django settings is present.')\n\n\ndef django_settings_is_configured():\n # Avoid importing Django if it has not yet been imported\n if not os.environ.get('DJANGO_SETTINGS_MODULE') \\\n and 'django.conf' not in sys.modules:\n return False\n\n # If DJANGO_SETTINGS_MODULE is defined at this point, Django is assumed to\n # always be loaded.\n from django.conf import settings\n assert settings.configured is True\n return True\n\n\ndef get_django_version():\n return __import__('django').VERSION\n", "path": "pytest_django/lazy_django.py" } ]
[ { "content": "\"\"\"\nHelpers to load Django lazily when Django settings can't be configured.\n\"\"\"\n\nimport os\nimport sys\n\nimport pytest\n\n\ndef skip_if_no_django():\n \"\"\"Raises a skip exception when no Django settings are available\"\"\"\n if not django_settings_is_configured():\n pytest.skip('Test skipped since no Django settings is present.')\n\n\ndef django_settings_is_configured():\n # Avoid importing Django if it has not yet been imported\n if not os.environ.get('DJANGO_SETTINGS_MODULE') \\\n and 'django.conf' not in sys.modules:\n return False\n\n # If DJANGO_SETTINGS_MODULE is defined at this point, Django is assumed to\n # always be loaded.\n return True\n\n\ndef get_django_version():\n return __import__('django').VERSION\n", "path": "pytest_django/lazy_django.py" } ]
diff --git a/pytest_django/lazy_django.py b/pytest_django/lazy_django.py index 845804099..4ba4d5aa7 100644 --- a/pytest_django/lazy_django.py +++ b/pytest_django/lazy_django.py @@ -22,8 +22,6 @@ def django_settings_is_configured(): # If DJANGO_SETTINGS_MODULE is defined at this point, Django is assumed to # always be loaded. - from django.conf import settings - assert settings.configured is True return True diff --git a/tests/test_django_settings_module.py b/tests/test_django_settings_module.py index a34448715..c3bfe4c3f 100644 --- a/tests/test_django_settings_module.py +++ b/tests/test_django_settings_module.py @@ -96,6 +96,25 @@ def test_ds_after_user_conftest(testdir, monkeypatch): result.stdout.fnmatch_lines(['*1 passed*']) +def test_ds_in_pytest_configure(testdir, monkeypatch): + monkeypatch.delenv('DJANGO_SETTINGS_MODULE') + pkg = testdir.mkpydir('tpkg') + settings = pkg.join('settings_ds.py') + settings.write(BARE_SETTINGS) + testdir.makeconftest(""" + import os + + from django.conf import settings + + def pytest_configure(): + if not settings.configured: + os.environ.setdefault('DJANGO_SETTINGS_MODULE', + 'tpkg.settings_ds') + """) + r = testdir.runpytest() + assert r.ret == 0 + + def test_django_settings_configure(testdir, monkeypatch): """ Make sure Django can be configured without setting
django-cms__django-cms-1016
2.2 Trove classifier is incorrect The current release added Development Status to the PyPI Trove categories, but it remains `'Development Status :: 4 - Beta'` which it had during the RCs - I suspect that it should now be `'Development Status :: 5 - Production/Stable'` I don't have a git clone in front of me, so I can't open a pull request for it directly, at the moment.
[ { "content": "from setuptools import setup, find_packages\nimport os\nimport cms\n\n \nCLASSIFIERS = [\n 'Development Status :: 4 - Beta',\n 'Environment :: Web Environment',\n 'Framework :: Django',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n]\n\nsetup(\n author=\"Patrick Lauber\",\n author_email=\"[email protected]\",\n name='django-cms',\n version=cms.__version__,\n description='An Advanced Django CMS',\n long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),\n url='https://www.django-cms.org/',\n license='BSD License',\n platforms=['OS Independent'],\n classifiers=CLASSIFIERS,\n install_requires=[\n 'Django>=1.2.5',\n 'django-classy-tags>=0.3.4.1',\n 'south>=0.7.2',\n 'html5lib',\n 'django-mptt>=0.4.2',\n 'django-sekizai>=0.4.2',\n ],\n packages=find_packages(exclude=[\"project\",\"project.*\"]),\n include_package_data=True,\n zip_safe = False\n)\n", "path": "setup.py" } ]
[ { "content": "from setuptools import setup, find_packages\nimport os\nimport cms\n\n \nCLASSIFIERS = [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Web Environment',\n 'Framework :: Django',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n]\n\nsetup(\n author=\"Patrick Lauber\",\n author_email=\"[email protected]\",\n name='django-cms',\n version=cms.__version__,\n description='An Advanced Django CMS',\n long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),\n url='https://www.django-cms.org/',\n license='BSD License',\n platforms=['OS Independent'],\n classifiers=CLASSIFIERS,\n install_requires=[\n 'Django>=1.2.5',\n 'django-classy-tags>=0.3.4.1',\n 'south>=0.7.2',\n 'html5lib',\n 'django-mptt>=0.4.2',\n 'django-sekizai>=0.4.2',\n ],\n packages=find_packages(exclude=[\"project\",\"project.*\"]),\n include_package_data=True,\n zip_safe = False\n)\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 8ca967a3250..65b6ffc1ed5 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ CLASSIFIERS = [ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers',
open-telemetry__opentelemetry-python-3284
Reserved attribute seems to be out of sync for message https://github.com/open-telemetry/opentelemetry-python/blob/e00306206ea25cf8549eca289e39e0b6ba2fa560/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py#L290 seems to have getMessage whereas https://docs.python.org/3/library/logging.html#logrecord-attributes has message as reserved attribute. This causes "message" to appear in record having exact same data as msg
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport abc\nimport atexit\nimport concurrent.futures\nimport json\nimport logging\nimport threading\nimport traceback\nfrom time import time_ns\nfrom typing import Any, Callable, Optional, Tuple, Union\n\nfrom opentelemetry._logs import Logger as APILogger\nfrom opentelemetry._logs import LoggerProvider as APILoggerProvider\nfrom opentelemetry._logs import LogRecord as APILogRecord\nfrom opentelemetry._logs import (\n SeverityNumber,\n get_logger,\n get_logger_provider,\n std_to_otel,\n)\nfrom opentelemetry.sdk.resources import Resource\nfrom opentelemetry.sdk.util import ns_to_iso_str\nfrom opentelemetry.sdk.util.instrumentation import InstrumentationScope\nfrom opentelemetry.semconv.trace import SpanAttributes\nfrom opentelemetry.trace import (\n format_span_id,\n format_trace_id,\n get_current_span,\n)\nfrom opentelemetry.trace.span import TraceFlags\nfrom opentelemetry.util.types import Attributes\n\n_logger = logging.getLogger(__name__)\n\n\nclass LogRecord(APILogRecord):\n \"\"\"A LogRecord instance represents an event being logged.\n\n LogRecord instances are created and emitted via `Logger`\n every time something is logged. They contain all the information\n pertinent to the event being logged.\n \"\"\"\n\n def __init__(\n self,\n timestamp: Optional[int] = None,\n observed_timestamp: Optional[int] = None,\n trace_id: Optional[int] = None,\n span_id: Optional[int] = None,\n trace_flags: Optional[TraceFlags] = None,\n severity_text: Optional[str] = None,\n severity_number: Optional[SeverityNumber] = None,\n body: Optional[Any] = None,\n resource: Optional[Resource] = None,\n attributes: Optional[Attributes] = None,\n ):\n super().__init__(\n **{\n \"timestamp\": timestamp,\n \"observed_timestamp\": observed_timestamp,\n \"trace_id\": trace_id,\n \"span_id\": span_id,\n \"trace_flags\": trace_flags,\n \"severity_text\": severity_text,\n \"severity_number\": severity_number,\n \"body\": body,\n \"attributes\": attributes,\n }\n )\n self.resource = resource\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, LogRecord):\n return NotImplemented\n return self.__dict__ == other.__dict__\n\n def to_json(self, indent=4) -> str:\n return json.dumps(\n {\n \"body\": self.body,\n \"severity_number\": repr(self.severity_number),\n \"severity_text\": self.severity_text,\n \"attributes\": self.attributes,\n \"timestamp\": ns_to_iso_str(self.timestamp),\n \"trace_id\": f\"0x{format_trace_id(self.trace_id)}\"\n if self.trace_id is not None\n else \"\",\n \"span_id\": f\"0x{format_span_id(self.span_id)}\"\n if self.span_id is not None\n else \"\",\n \"trace_flags\": self.trace_flags,\n \"resource\": repr(self.resource.attributes)\n if self.resource\n else \"\",\n },\n indent=indent,\n )\n\n\nclass LogData:\n \"\"\"Readable LogRecord data plus associated InstrumentationLibrary.\"\"\"\n\n def __init__(\n self,\n log_record: LogRecord,\n instrumentation_scope: InstrumentationScope,\n ):\n self.log_record = log_record\n self.instrumentation_scope = instrumentation_scope\n\n\nclass LogRecordProcessor(abc.ABC):\n \"\"\"Interface to hook the log record emitting action.\n\n Log processors can be registered directly using\n :func:`LoggerProvider.add_log_record_processor` and they are invoked\n in the same order as they were registered.\n \"\"\"\n\n @abc.abstractmethod\n def emit(self, log_data: LogData):\n \"\"\"Emits the `LogData`\"\"\"\n\n @abc.abstractmethod\n def shutdown(self):\n \"\"\"Called when a :class:`opentelemetry.sdk._logs.Logger` is shutdown\"\"\"\n\n @abc.abstractmethod\n def force_flush(self, timeout_millis: int = 30000):\n \"\"\"Export all the received logs to the configured Exporter that have not yet\n been exported.\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported.\n\n Returns:\n False if the timeout is exceeded, True otherwise.\n \"\"\"\n\n\n# Temporary fix until https://github.com/PyCQA/pylint/issues/4098 is resolved\n# pylint:disable=no-member\nclass SynchronousMultiLogRecordProcessor(LogRecordProcessor):\n \"\"\"Implementation of class:`LogRecordProcessor` that forwards all received\n events to a list of log processors sequentially.\n\n The underlying log processors are called in sequential order as they were\n added.\n \"\"\"\n\n def __init__(self):\n # use a tuple to avoid race conditions when adding a new log and\n # iterating through it on \"emit\".\n self._log_record_processors = () # type: Tuple[LogRecordProcessor, ...]\n self._lock = threading.Lock()\n\n def add_log_record_processor(\n self, log_record_processor: LogRecordProcessor\n ) -> None:\n \"\"\"Adds a Logprocessor to the list of log processors handled by this instance\"\"\"\n with self._lock:\n self._log_record_processors += (log_record_processor,)\n\n def emit(self, log_data: LogData) -> None:\n for lp in self._log_record_processors:\n lp.emit(log_data)\n\n def shutdown(self) -> None:\n \"\"\"Shutdown the log processors one by one\"\"\"\n for lp in self._log_record_processors:\n lp.shutdown()\n\n def force_flush(self, timeout_millis: int = 30000) -> bool:\n \"\"\"Force flush the log processors one by one\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported. If the first n log processors exceeded the timeout\n then remaining log processors will not be flushed.\n\n Returns:\n True if all the log processors flushes the logs within timeout,\n False otherwise.\n \"\"\"\n deadline_ns = time_ns() + timeout_millis * 1000000\n for lp in self._log_record_processors:\n current_ts = time_ns()\n if current_ts >= deadline_ns:\n return False\n\n if not lp.force_flush((deadline_ns - current_ts) // 1000000):\n return False\n\n return True\n\n\nclass ConcurrentMultiLogRecordProcessor(LogRecordProcessor):\n \"\"\"Implementation of :class:`LogRecordProcessor` that forwards all received\n events to a list of log processors in parallel.\n\n Calls to the underlying log processors are forwarded in parallel by\n submitting them to a thread pool executor and waiting until each log\n processor finished its work.\n\n Args:\n max_workers: The number of threads managed by the thread pool executor\n and thus defining how many log processors can work in parallel.\n \"\"\"\n\n def __init__(self, max_workers: int = 2):\n # use a tuple to avoid race conditions when adding a new log and\n # iterating through it on \"emit\".\n self._log_record_processors = () # type: Tuple[LogRecordProcessor, ...]\n self._lock = threading.Lock()\n self._executor = concurrent.futures.ThreadPoolExecutor(\n max_workers=max_workers\n )\n\n def add_log_record_processor(\n self, log_record_processor: LogRecordProcessor\n ):\n with self._lock:\n self._log_record_processors += (log_record_processor,)\n\n def _submit_and_wait(\n self,\n func: Callable[[LogRecordProcessor], Callable[..., None]],\n *args: Any,\n **kwargs: Any,\n ):\n futures = []\n for lp in self._log_record_processors:\n future = self._executor.submit(func(lp), *args, **kwargs)\n futures.append(future)\n for future in futures:\n future.result()\n\n def emit(self, log_data: LogData):\n self._submit_and_wait(lambda lp: lp.emit, log_data)\n\n def shutdown(self):\n self._submit_and_wait(lambda lp: lp.shutdown)\n\n def force_flush(self, timeout_millis: int = 30000) -> bool:\n \"\"\"Force flush the log processors in parallel.\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported.\n\n Returns:\n True if all the log processors flushes the logs within timeout,\n False otherwise.\n \"\"\"\n futures = []\n for lp in self._log_record_processors:\n future = self._executor.submit(lp.force_flush, timeout_millis)\n futures.append(future)\n\n done_futures, not_done_futures = concurrent.futures.wait(\n futures, timeout_millis / 1e3\n )\n\n if not_done_futures:\n return False\n\n for future in done_futures:\n if not future.result():\n return False\n\n return True\n\n\n# skip natural LogRecord attributes\n# http://docs.python.org/library/logging.html#logrecord-attributes\n_RESERVED_ATTRS = frozenset(\n (\n \"asctime\",\n \"args\",\n \"created\",\n \"exc_info\",\n \"exc_text\",\n \"filename\",\n \"funcName\",\n \"getMessage\",\n \"levelname\",\n \"levelno\",\n \"lineno\",\n \"module\",\n \"msecs\",\n \"msg\",\n \"name\",\n \"pathname\",\n \"process\",\n \"processName\",\n \"relativeCreated\",\n \"stack_info\",\n \"thread\",\n \"threadName\",\n )\n)\n\n\nclass LoggingHandler(logging.Handler):\n \"\"\"A handler class which writes logging records, in OTLP format, to\n a network destination or file. Supports signals from the `logging` module.\n https://docs.python.org/3/library/logging.html\n \"\"\"\n\n def __init__(\n self,\n level=logging.NOTSET,\n logger_provider=None,\n ) -> None:\n super().__init__(level=level)\n self._logger_provider = logger_provider or get_logger_provider()\n self._logger = get_logger(\n __name__, logger_provider=self._logger_provider\n )\n\n @staticmethod\n def _get_attributes(record: logging.LogRecord) -> Attributes:\n attributes = {\n k: v for k, v in vars(record).items() if k not in _RESERVED_ATTRS\n }\n if record.exc_info:\n exc_type = \"\"\n message = \"\"\n stack_trace = \"\"\n exctype, value, tb = record.exc_info\n if exctype is not None:\n exc_type = exctype.__name__\n if value is not None and value.args:\n message = value.args[0]\n if tb is not None:\n # https://github.com/open-telemetry/opentelemetry-specification/blob/9fa7c656b26647b27e485a6af7e38dc716eba98a/specification/trace/semantic_conventions/exceptions.md#stacktrace-representation\n stack_trace = \"\".join(\n traceback.format_exception(*record.exc_info)\n )\n attributes[SpanAttributes.EXCEPTION_TYPE] = exc_type\n attributes[SpanAttributes.EXCEPTION_MESSAGE] = message\n attributes[SpanAttributes.EXCEPTION_STACKTRACE] = stack_trace\n return attributes\n\n def _translate(self, record: logging.LogRecord) -> LogRecord:\n timestamp = int(record.created * 1e9)\n span_context = get_current_span().get_span_context()\n attributes = self._get_attributes(record)\n severity_number = std_to_otel(record.levelno)\n return LogRecord(\n timestamp=timestamp,\n trace_id=span_context.trace_id,\n span_id=span_context.span_id,\n trace_flags=span_context.trace_flags,\n severity_text=record.levelname,\n severity_number=severity_number,\n body=record.getMessage(),\n resource=self._logger.resource,\n attributes=attributes,\n )\n\n def emit(self, record: logging.LogRecord) -> None:\n \"\"\"\n Emit a record.\n\n The record is translated to OTel format, and then sent across the pipeline.\n \"\"\"\n self._logger.emit(self._translate(record))\n\n def flush(self) -> None:\n \"\"\"\n Flushes the logging output.\n \"\"\"\n self._logger_provider.force_flush()\n\n\nclass Logger(APILogger):\n def __init__(\n self,\n resource: Resource,\n multi_log_record_processor: Union[\n SynchronousMultiLogRecordProcessor,\n ConcurrentMultiLogRecordProcessor,\n ],\n instrumentation_scope: InstrumentationScope,\n ):\n super().__init__(\n instrumentation_scope.name,\n instrumentation_scope.version,\n instrumentation_scope.schema_url,\n )\n self._resource = resource\n self._multi_log_record_processor = multi_log_record_processor\n self._instrumentation_scope = instrumentation_scope\n\n @property\n def resource(self):\n return self._resource\n\n def emit(self, record: LogRecord):\n \"\"\"Emits the :class:`LogData` by associating :class:`LogRecord`\n and instrumentation info.\n \"\"\"\n log_data = LogData(record, self._instrumentation_scope)\n self._multi_log_record_processor.emit(log_data)\n\n\nclass LoggerProvider(APILoggerProvider):\n def __init__(\n self,\n resource: Resource = Resource.create(),\n shutdown_on_exit: bool = True,\n multi_log_record_processor: Union[\n SynchronousMultiLogRecordProcessor,\n ConcurrentMultiLogRecordProcessor,\n ] = None,\n ):\n self._resource = resource\n self._multi_log_record_processor = (\n multi_log_record_processor or SynchronousMultiLogRecordProcessor()\n )\n self._at_exit_handler = None\n if shutdown_on_exit:\n self._at_exit_handler = atexit.register(self.shutdown)\n\n @property\n def resource(self):\n return self._resource\n\n def get_logger(\n self,\n name: str,\n version: Optional[str] = None,\n schema_url: Optional[str] = None,\n ) -> Logger:\n return Logger(\n self._resource,\n self._multi_log_record_processor,\n InstrumentationScope(\n name,\n version,\n schema_url,\n ),\n )\n\n def add_log_record_processor(\n self, log_record_processor: LogRecordProcessor\n ):\n \"\"\"Registers a new :class:`LogRecordProcessor` for this `LoggerProvider` instance.\n\n The log processors are invoked in the same order they are registered.\n \"\"\"\n self._multi_log_record_processor.add_log_record_processor(\n log_record_processor\n )\n\n def shutdown(self):\n \"\"\"Shuts down the log processors.\"\"\"\n self._multi_log_record_processor.shutdown()\n if self._at_exit_handler is not None:\n atexit.unregister(self._at_exit_handler)\n self._at_exit_handler = None\n\n def force_flush(self, timeout_millis: int = 30000) -> bool:\n \"\"\"Force flush the log processors.\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported.\n\n Returns:\n True if all the log processors flushes the logs within timeout,\n False otherwise.\n \"\"\"\n return self._multi_log_record_processor.force_flush(timeout_millis)\n", "path": "opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py" } ]
[ { "content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport abc\nimport atexit\nimport concurrent.futures\nimport json\nimport logging\nimport threading\nimport traceback\nfrom time import time_ns\nfrom typing import Any, Callable, Optional, Tuple, Union\n\nfrom opentelemetry._logs import Logger as APILogger\nfrom opentelemetry._logs import LoggerProvider as APILoggerProvider\nfrom opentelemetry._logs import LogRecord as APILogRecord\nfrom opentelemetry._logs import (\n SeverityNumber,\n get_logger,\n get_logger_provider,\n std_to_otel,\n)\nfrom opentelemetry.sdk.resources import Resource\nfrom opentelemetry.sdk.util import ns_to_iso_str\nfrom opentelemetry.sdk.util.instrumentation import InstrumentationScope\nfrom opentelemetry.semconv.trace import SpanAttributes\nfrom opentelemetry.trace import (\n format_span_id,\n format_trace_id,\n get_current_span,\n)\nfrom opentelemetry.trace.span import TraceFlags\nfrom opentelemetry.util.types import Attributes\n\n_logger = logging.getLogger(__name__)\n\n\nclass LogRecord(APILogRecord):\n \"\"\"A LogRecord instance represents an event being logged.\n\n LogRecord instances are created and emitted via `Logger`\n every time something is logged. They contain all the information\n pertinent to the event being logged.\n \"\"\"\n\n def __init__(\n self,\n timestamp: Optional[int] = None,\n observed_timestamp: Optional[int] = None,\n trace_id: Optional[int] = None,\n span_id: Optional[int] = None,\n trace_flags: Optional[TraceFlags] = None,\n severity_text: Optional[str] = None,\n severity_number: Optional[SeverityNumber] = None,\n body: Optional[Any] = None,\n resource: Optional[Resource] = None,\n attributes: Optional[Attributes] = None,\n ):\n super().__init__(\n **{\n \"timestamp\": timestamp,\n \"observed_timestamp\": observed_timestamp,\n \"trace_id\": trace_id,\n \"span_id\": span_id,\n \"trace_flags\": trace_flags,\n \"severity_text\": severity_text,\n \"severity_number\": severity_number,\n \"body\": body,\n \"attributes\": attributes,\n }\n )\n self.resource = resource\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, LogRecord):\n return NotImplemented\n return self.__dict__ == other.__dict__\n\n def to_json(self, indent=4) -> str:\n return json.dumps(\n {\n \"body\": self.body,\n \"severity_number\": repr(self.severity_number),\n \"severity_text\": self.severity_text,\n \"attributes\": self.attributes,\n \"timestamp\": ns_to_iso_str(self.timestamp),\n \"trace_id\": f\"0x{format_trace_id(self.trace_id)}\"\n if self.trace_id is not None\n else \"\",\n \"span_id\": f\"0x{format_span_id(self.span_id)}\"\n if self.span_id is not None\n else \"\",\n \"trace_flags\": self.trace_flags,\n \"resource\": repr(self.resource.attributes)\n if self.resource\n else \"\",\n },\n indent=indent,\n )\n\n\nclass LogData:\n \"\"\"Readable LogRecord data plus associated InstrumentationLibrary.\"\"\"\n\n def __init__(\n self,\n log_record: LogRecord,\n instrumentation_scope: InstrumentationScope,\n ):\n self.log_record = log_record\n self.instrumentation_scope = instrumentation_scope\n\n\nclass LogRecordProcessor(abc.ABC):\n \"\"\"Interface to hook the log record emitting action.\n\n Log processors can be registered directly using\n :func:`LoggerProvider.add_log_record_processor` and they are invoked\n in the same order as they were registered.\n \"\"\"\n\n @abc.abstractmethod\n def emit(self, log_data: LogData):\n \"\"\"Emits the `LogData`\"\"\"\n\n @abc.abstractmethod\n def shutdown(self):\n \"\"\"Called when a :class:`opentelemetry.sdk._logs.Logger` is shutdown\"\"\"\n\n @abc.abstractmethod\n def force_flush(self, timeout_millis: int = 30000):\n \"\"\"Export all the received logs to the configured Exporter that have not yet\n been exported.\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported.\n\n Returns:\n False if the timeout is exceeded, True otherwise.\n \"\"\"\n\n\n# Temporary fix until https://github.com/PyCQA/pylint/issues/4098 is resolved\n# pylint:disable=no-member\nclass SynchronousMultiLogRecordProcessor(LogRecordProcessor):\n \"\"\"Implementation of class:`LogRecordProcessor` that forwards all received\n events to a list of log processors sequentially.\n\n The underlying log processors are called in sequential order as they were\n added.\n \"\"\"\n\n def __init__(self):\n # use a tuple to avoid race conditions when adding a new log and\n # iterating through it on \"emit\".\n self._log_record_processors = () # type: Tuple[LogRecordProcessor, ...]\n self._lock = threading.Lock()\n\n def add_log_record_processor(\n self, log_record_processor: LogRecordProcessor\n ) -> None:\n \"\"\"Adds a Logprocessor to the list of log processors handled by this instance\"\"\"\n with self._lock:\n self._log_record_processors += (log_record_processor,)\n\n def emit(self, log_data: LogData) -> None:\n for lp in self._log_record_processors:\n lp.emit(log_data)\n\n def shutdown(self) -> None:\n \"\"\"Shutdown the log processors one by one\"\"\"\n for lp in self._log_record_processors:\n lp.shutdown()\n\n def force_flush(self, timeout_millis: int = 30000) -> bool:\n \"\"\"Force flush the log processors one by one\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported. If the first n log processors exceeded the timeout\n then remaining log processors will not be flushed.\n\n Returns:\n True if all the log processors flushes the logs within timeout,\n False otherwise.\n \"\"\"\n deadline_ns = time_ns() + timeout_millis * 1000000\n for lp in self._log_record_processors:\n current_ts = time_ns()\n if current_ts >= deadline_ns:\n return False\n\n if not lp.force_flush((deadline_ns - current_ts) // 1000000):\n return False\n\n return True\n\n\nclass ConcurrentMultiLogRecordProcessor(LogRecordProcessor):\n \"\"\"Implementation of :class:`LogRecordProcessor` that forwards all received\n events to a list of log processors in parallel.\n\n Calls to the underlying log processors are forwarded in parallel by\n submitting them to a thread pool executor and waiting until each log\n processor finished its work.\n\n Args:\n max_workers: The number of threads managed by the thread pool executor\n and thus defining how many log processors can work in parallel.\n \"\"\"\n\n def __init__(self, max_workers: int = 2):\n # use a tuple to avoid race conditions when adding a new log and\n # iterating through it on \"emit\".\n self._log_record_processors = () # type: Tuple[LogRecordProcessor, ...]\n self._lock = threading.Lock()\n self._executor = concurrent.futures.ThreadPoolExecutor(\n max_workers=max_workers\n )\n\n def add_log_record_processor(\n self, log_record_processor: LogRecordProcessor\n ):\n with self._lock:\n self._log_record_processors += (log_record_processor,)\n\n def _submit_and_wait(\n self,\n func: Callable[[LogRecordProcessor], Callable[..., None]],\n *args: Any,\n **kwargs: Any,\n ):\n futures = []\n for lp in self._log_record_processors:\n future = self._executor.submit(func(lp), *args, **kwargs)\n futures.append(future)\n for future in futures:\n future.result()\n\n def emit(self, log_data: LogData):\n self._submit_and_wait(lambda lp: lp.emit, log_data)\n\n def shutdown(self):\n self._submit_and_wait(lambda lp: lp.shutdown)\n\n def force_flush(self, timeout_millis: int = 30000) -> bool:\n \"\"\"Force flush the log processors in parallel.\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported.\n\n Returns:\n True if all the log processors flushes the logs within timeout,\n False otherwise.\n \"\"\"\n futures = []\n for lp in self._log_record_processors:\n future = self._executor.submit(lp.force_flush, timeout_millis)\n futures.append(future)\n\n done_futures, not_done_futures = concurrent.futures.wait(\n futures, timeout_millis / 1e3\n )\n\n if not_done_futures:\n return False\n\n for future in done_futures:\n if not future.result():\n return False\n\n return True\n\n\n# skip natural LogRecord attributes\n# http://docs.python.org/library/logging.html#logrecord-attributes\n_RESERVED_ATTRS = frozenset(\n (\n \"asctime\",\n \"args\",\n \"created\",\n \"exc_info\",\n \"exc_text\",\n \"filename\",\n \"funcName\",\n \"message\",\n \"levelname\",\n \"levelno\",\n \"lineno\",\n \"module\",\n \"msecs\",\n \"msg\",\n \"name\",\n \"pathname\",\n \"process\",\n \"processName\",\n \"relativeCreated\",\n \"stack_info\",\n \"thread\",\n \"threadName\",\n )\n)\n\n\nclass LoggingHandler(logging.Handler):\n \"\"\"A handler class which writes logging records, in OTLP format, to\n a network destination or file. Supports signals from the `logging` module.\n https://docs.python.org/3/library/logging.html\n \"\"\"\n\n def __init__(\n self,\n level=logging.NOTSET,\n logger_provider=None,\n ) -> None:\n super().__init__(level=level)\n self._logger_provider = logger_provider or get_logger_provider()\n self._logger = get_logger(\n __name__, logger_provider=self._logger_provider\n )\n\n @staticmethod\n def _get_attributes(record: logging.LogRecord) -> Attributes:\n attributes = {\n k: v for k, v in vars(record).items() if k not in _RESERVED_ATTRS\n }\n if record.exc_info:\n exc_type = \"\"\n message = \"\"\n stack_trace = \"\"\n exctype, value, tb = record.exc_info\n if exctype is not None:\n exc_type = exctype.__name__\n if value is not None and value.args:\n message = value.args[0]\n if tb is not None:\n # https://github.com/open-telemetry/opentelemetry-specification/blob/9fa7c656b26647b27e485a6af7e38dc716eba98a/specification/trace/semantic_conventions/exceptions.md#stacktrace-representation\n stack_trace = \"\".join(\n traceback.format_exception(*record.exc_info)\n )\n attributes[SpanAttributes.EXCEPTION_TYPE] = exc_type\n attributes[SpanAttributes.EXCEPTION_MESSAGE] = message\n attributes[SpanAttributes.EXCEPTION_STACKTRACE] = stack_trace\n return attributes\n\n def _translate(self, record: logging.LogRecord) -> LogRecord:\n timestamp = int(record.created * 1e9)\n span_context = get_current_span().get_span_context()\n attributes = self._get_attributes(record)\n severity_number = std_to_otel(record.levelno)\n return LogRecord(\n timestamp=timestamp,\n trace_id=span_context.trace_id,\n span_id=span_context.span_id,\n trace_flags=span_context.trace_flags,\n severity_text=record.levelname,\n severity_number=severity_number,\n body=record.getMessage(),\n resource=self._logger.resource,\n attributes=attributes,\n )\n\n def emit(self, record: logging.LogRecord) -> None:\n \"\"\"\n Emit a record.\n\n The record is translated to OTel format, and then sent across the pipeline.\n \"\"\"\n self._logger.emit(self._translate(record))\n\n def flush(self) -> None:\n \"\"\"\n Flushes the logging output.\n \"\"\"\n self._logger_provider.force_flush()\n\n\nclass Logger(APILogger):\n def __init__(\n self,\n resource: Resource,\n multi_log_record_processor: Union[\n SynchronousMultiLogRecordProcessor,\n ConcurrentMultiLogRecordProcessor,\n ],\n instrumentation_scope: InstrumentationScope,\n ):\n super().__init__(\n instrumentation_scope.name,\n instrumentation_scope.version,\n instrumentation_scope.schema_url,\n )\n self._resource = resource\n self._multi_log_record_processor = multi_log_record_processor\n self._instrumentation_scope = instrumentation_scope\n\n @property\n def resource(self):\n return self._resource\n\n def emit(self, record: LogRecord):\n \"\"\"Emits the :class:`LogData` by associating :class:`LogRecord`\n and instrumentation info.\n \"\"\"\n log_data = LogData(record, self._instrumentation_scope)\n self._multi_log_record_processor.emit(log_data)\n\n\nclass LoggerProvider(APILoggerProvider):\n def __init__(\n self,\n resource: Resource = Resource.create(),\n shutdown_on_exit: bool = True,\n multi_log_record_processor: Union[\n SynchronousMultiLogRecordProcessor,\n ConcurrentMultiLogRecordProcessor,\n ] = None,\n ):\n self._resource = resource\n self._multi_log_record_processor = (\n multi_log_record_processor or SynchronousMultiLogRecordProcessor()\n )\n self._at_exit_handler = None\n if shutdown_on_exit:\n self._at_exit_handler = atexit.register(self.shutdown)\n\n @property\n def resource(self):\n return self._resource\n\n def get_logger(\n self,\n name: str,\n version: Optional[str] = None,\n schema_url: Optional[str] = None,\n ) -> Logger:\n return Logger(\n self._resource,\n self._multi_log_record_processor,\n InstrumentationScope(\n name,\n version,\n schema_url,\n ),\n )\n\n def add_log_record_processor(\n self, log_record_processor: LogRecordProcessor\n ):\n \"\"\"Registers a new :class:`LogRecordProcessor` for this `LoggerProvider` instance.\n\n The log processors are invoked in the same order they are registered.\n \"\"\"\n self._multi_log_record_processor.add_log_record_processor(\n log_record_processor\n )\n\n def shutdown(self):\n \"\"\"Shuts down the log processors.\"\"\"\n self._multi_log_record_processor.shutdown()\n if self._at_exit_handler is not None:\n atexit.unregister(self._at_exit_handler)\n self._at_exit_handler = None\n\n def force_flush(self, timeout_millis: int = 30000) -> bool:\n \"\"\"Force flush the log processors.\n\n Args:\n timeout_millis: The maximum amount of time to wait for logs to be\n exported.\n\n Returns:\n True if all the log processors flushes the logs within timeout,\n False otherwise.\n \"\"\"\n return self._multi_log_record_processor.force_flush(timeout_millis)\n", "path": "opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py" } ]
diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py index 83cef931491..eda9b093c93 100644 --- a/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py +++ b/opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py @@ -296,7 +296,7 @@ def force_flush(self, timeout_millis: int = 30000) -> bool: "exc_text", "filename", "funcName", - "getMessage", + "message", "levelname", "levelno", "lineno",
HypothesisWorks__hypothesis-1084
TypeError thrown when trying to import hypothesis in 3.44.21 hypothesis (3.44.21) In [4]: from hypothesis import given --------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-4-4ce9639ca03b> in <module>() ----> 1 from hypothesis import given /usr/local/lib/python2.7/dist-packages/hypothesis/__init__.py in <module>() 29 from hypothesis.version import __version_info__, __version__ 30 from hypothesis.control import assume, note, reject, event ---> 31 from hypothesis.core import given, find, example, seed, reproduce_failure, \ 32 PrintSettings 33 from hypothesis.utils.conventions import infer /usr/local/lib/python2.7/dist-packages/hypothesis/core.py in <module>() 35 from coverage.collector import Collector 36 ---> 37 import hypothesis.strategies as st 38 from hypothesis import __version__ 39 from hypothesis.errors import Flaky, Timeout, NoSuchExample, \ /usr/local/lib/python2.7/dist-packages/hypothesis/strategies.py in <module>() 30 from hypothesis.control import assume 31 from hypothesis._settings import note_deprecation ---> 32 from hypothesis.internal.cache import LRUReusedCache 33 from hypothesis.searchstrategy import SearchStrategy 34 from hypothesis.internal.compat import gcd, ceil, floor, hrange, \ /usr/local/lib/python2.7/dist-packages/hypothesis/internal/cache.py in <module>() 21 22 ---> 23 @attr.s(slots=True) 24 class Entry(object): 25 key = attr.ib() TypeError: attributes() got an unexpected keyword argument 'slots'
[ { "content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2018 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\nfrom __future__ import division, print_function, absolute_import\n\nimport os\nimport sys\n\nimport setuptools\n\n\ndef local_file(name):\n return os.path.relpath(os.path.join(os.path.dirname(__file__), name))\n\n\nSOURCE = local_file('src')\nREADME = local_file('README.rst')\n\n\n# Assignment to placate pyflakes. The actual version is from the exec that\n# follows.\n__version__ = None\n\nwith open(local_file('src/hypothesis/version.py')) as o:\n exec(o.read())\n\nassert __version__ is not None\n\n\nextras = {\n 'datetime': ['pytz'],\n 'pytz': ['pytz'],\n 'fakefactory': ['Faker>=0.7'],\n 'numpy': ['numpy>=1.9.0'],\n 'pytest': ['pytest>=2.8.0'],\n}\n\n# Django 2 only supports Python 3, but doesn't have any python_requires\n# markers in its setup.py --- so \"pip install django\" just fails in\n# Python 2. So rather than relying on pip, we pin the version of\n# Django on Python 2 ourselves.\n#\n# See https://github.com/HypothesisWorks/hypothesis-python/pull/1008\nif sys.version_info[0] < 3:\n django_major_pin = '<2'\nelse:\n django_major_pin = '<3'\n\n# We only support the releases of Django that are supported by the Django\n# core team. See https://www.djangoproject.com/download/#supported-versions\n#\n# New versions of setuptools allow us to set very precise pins; older versions\n# of setuptools are coarser.\nmajor_setuptools_version = int(setuptools.__version__.split('.')[0])\nif major_setuptools_version >= 8:\n django_minor_pin = '>=1.8,!=1.9.*,!=1.10.*'\nelse:\n django_minor_pin = '>=1.8'\n\ndjango_pin = 'django%s,%s' % (django_minor_pin, django_major_pin)\nextras['django'] = ['pytz', django_pin]\n\nextras['faker'] = extras['fakefactory']\n\nextras['all'] = sorted(sum(extras.values(), []))\n\nextras[\":python_version == '2.7'\"] = ['enum34']\n\ninstall_requires = ['attrs', 'coverage']\n\nif sys.version_info[0] < 3:\n install_requires.append('enum34')\n\nsetuptools.setup(\n name='hypothesis',\n version=__version__,\n author='David R. MacIver',\n author_email='[email protected]',\n packages=setuptools.find_packages(SOURCE),\n package_dir={'': SOURCE},\n url='https://github.com/HypothesisWorks/hypothesis-python',\n license='MPL v2',\n description='A library for property based testing',\n zip_safe=False,\n extras_require=extras,\n install_requires=install_requires,\n python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',\n 'Operating System :: Unix',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development :: Testing',\n ],\n entry_points={\n 'pytest11': ['hypothesispytest = hypothesis.extra.pytestplugin'],\n },\n long_description=open(README).read(),\n)\n", "path": "setup.py" } ]
[ { "content": "# coding=utf-8\n#\n# This file is part of Hypothesis, which may be found at\n# https://github.com/HypothesisWorks/hypothesis-python\n#\n# Most of this work is copyright (C) 2013-2018 David R. MacIver\n# ([email protected]), but it contains contributions by others. See\n# CONTRIBUTING.rst for a full list of people who may hold copyright, and\n# consult the git log if you need to determine who owns an individual\n# contribution.\n#\n# This Source Code Form is subject to the terms of the Mozilla Public License,\n# v. 2.0. If a copy of the MPL was not distributed with this file, You can\n# obtain one at http://mozilla.org/MPL/2.0/.\n#\n# END HEADER\n\nfrom __future__ import division, print_function, absolute_import\n\nimport os\nimport sys\n\nimport setuptools\n\n\ndef local_file(name):\n return os.path.relpath(os.path.join(os.path.dirname(__file__), name))\n\n\nSOURCE = local_file('src')\nREADME = local_file('README.rst')\n\n\n# Assignment to placate pyflakes. The actual version is from the exec that\n# follows.\n__version__ = None\n\nwith open(local_file('src/hypothesis/version.py')) as o:\n exec(o.read())\n\nassert __version__ is not None\n\n\nextras = {\n 'datetime': ['pytz'],\n 'pytz': ['pytz'],\n 'fakefactory': ['Faker>=0.7'],\n 'numpy': ['numpy>=1.9.0'],\n 'pytest': ['pytest>=2.8.0'],\n}\n\n# Django 2 only supports Python 3, but doesn't have any python_requires\n# markers in its setup.py --- so \"pip install django\" just fails in\n# Python 2. So rather than relying on pip, we pin the version of\n# Django on Python 2 ourselves.\n#\n# See https://github.com/HypothesisWorks/hypothesis-python/pull/1008\nif sys.version_info[0] < 3:\n django_major_pin = '<2'\nelse:\n django_major_pin = '<3'\n\n# We only support the releases of Django that are supported by the Django\n# core team. See https://www.djangoproject.com/download/#supported-versions\n#\n# New versions of setuptools allow us to set very precise pins; older versions\n# of setuptools are coarser.\nmajor_setuptools_version = int(setuptools.__version__.split('.')[0])\nif major_setuptools_version >= 8:\n django_minor_pin = '>=1.8,!=1.9.*,!=1.10.*'\nelse:\n django_minor_pin = '>=1.8'\n\ndjango_pin = 'django%s,%s' % (django_minor_pin, django_major_pin)\nextras['django'] = ['pytz', django_pin]\n\nextras['faker'] = extras['fakefactory']\n\nextras['all'] = sorted(sum(extras.values(), []))\n\nextras[\":python_version == '2.7'\"] = ['enum34']\n\ninstall_requires = ['attrs>=16.0.0', 'coverage']\n\nif sys.version_info[0] < 3:\n install_requires.append('enum34')\n\nsetuptools.setup(\n name='hypothesis',\n version=__version__,\n author='David R. MacIver',\n author_email='[email protected]',\n packages=setuptools.find_packages(SOURCE),\n package_dir={'': SOURCE},\n url='https://github.com/HypothesisWorks/hypothesis-python',\n license='MPL v2',\n description='A library for property based testing',\n zip_safe=False,\n extras_require=extras,\n install_requires=install_requires,\n python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',\n 'Operating System :: Unix',\n 'Operating System :: POSIX',\n 'Operating System :: Microsoft :: Windows',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development :: Testing',\n ],\n entry_points={\n 'pytest11': ['hypothesispytest = hypothesis.extra.pytestplugin'],\n },\n long_description=open(README).read(),\n)\n", "path": "setup.py" } ]
diff --git a/RELEASE.rst b/RELEASE.rst new file mode 100644 index 0000000000..1832eb2a69 --- /dev/null +++ b/RELEASE.rst @@ -0,0 +1,8 @@ +RELEASE_TYPE: patch + +This release fixes a dependency problem. It was possible to install +Hypothesis with an old version of :pypi:`attrs`, which would throw a +``TypeError`` as soon as you tried to import hypothesis. Specifically, you +need attrs 16.0.0 or newer. + +Hypothesis will now require the correct version of attrs when installing. diff --git a/setup.py b/setup.py index bf106d66b4..a493544878 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ def local_file(name): extras[":python_version == '2.7'"] = ['enum34'] -install_requires = ['attrs', 'coverage'] +install_requires = ['attrs>=16.0.0', 'coverage'] if sys.version_info[0] < 3: install_requires.append('enum34')
sktime__sktime-5287
[BUG] Bug in the imputer class. Fit and transform ignore the parameter for y The `fit` and `transform` functions of the `Imputer` class in sktime ignore the input parameter for `y`. Upon debugging, it was found that `y` is always `None` and cannot be changed. **To Reproduce** ```python from sktime.datasets import load_airline from sktime.transformations.series.impute import Imputer model_reg = YfromX(lgb.LGBMRegressor()) model_reg.fit(x_train, y_train) x_train = load_airline() y_train = load_airline() transformer = Imputer(method="forecaster", forecaster=model_reg) transformer.fit(y_train, y=x_train) y_train_imputed = transformer.transform(y_train, y=x_train) ``` **Expected behavior** The `fit` and `transform` functions of the `Imputer` class should accept the input parameter for `y` and use it as exogenous regressors for the LGBMRegressor YfromX forecaster model. **Additional context** I am using the editable development version of sktime, and the issue persists in this version. ``` System: MacOS 12.5.1 Python dependencies: pandas 12.1 numpy 1.25.2 lightgbm 4.0.0 sklearn 1.3.0 ``` </details> <!-- Please run the following code snippet and paste the output here: from sktime import show_versions; show_versions() (ImportError: cannot import name 'show_versions' from 'sktime' (unknown location)) --> </details> <!-- Thanks for contributing! -->
[ { "content": "#!/usr/bin/env python3 -u\n# copyright: sktime developers, BSD-3-Clause License (see LICENSE file)\n\"\"\"Transformer to impute missing values in series.\"\"\"\n\n__author__ = [\"aiwalter\"]\n__all__ = [\"Imputer\"]\n\nimport numpy as np\nimport pandas as pd\nfrom sklearn.utils import check_random_state\n\nfrom sktime.forecasting.base import ForecastingHorizon\nfrom sktime.forecasting.trend import PolynomialTrendForecaster\nfrom sktime.transformations.base import BaseTransformer\n\n\nclass Imputer(BaseTransformer):\n \"\"\"Missing value imputation.\n\n The Imputer transforms input series by replacing missing values according\n to an imputation strategy specified by `method`.\n\n Parameters\n ----------\n method : str, default=\"drift\"\n Method to fill the missing values.\n\n * \"drift\" : drift/trend values by sktime.PolynomialTrendForecaster(degree=1)\n first, X in transform() is filled with ffill then bfill\n then PolynomialTrendForecaster(degree=1) is fitted to filled X, and\n predict values are queried at indices which had missing values\n * \"linear\" : linear interpolation, uses pd.Series.interpolate()\n WARNING: This method can not extrapolate, so it is fitted always on the\n data given to transform().\n * \"nearest\" : use nearest value, uses pd.Series.interpolate()\n * \"constant\" : same constant value (given in arg value) for all NaN\n * \"mean\" : pd.Series.mean() of *fit* data\n * \"median\" : pd.Series.median() of *fit* data\n * \"backfill\" ot \"bfill\" : adapted from pd.Series.fillna()\n * \"pad\" or \"ffill\" : adapted from pd.Series.fillna()\n * \"random\" : random values between pd.Series.min() and .max() of *fit* data\n if pd.Series dtype is int, sample is uniform discrete\n if pd.Series dtype is float, sample is uniform continuous\n * \"forecaster\" : use an sktime Forecaster, given in param forecaster.\n First, X in *fit* is filled with ffill then bfill\n then forecaster is fitted to filled X, and *predict* values are queried\n at indices of X data in *transform* which had missing values\n For the following methods, the train data is used to fit them:\n \"drift\", \"mean\", \"median\", \"random\". For all other methods, the\n transform data is sufficient to compute the impute values.\n\n missing_values : int/float/str, default=None\n The placeholder for the missing values. All occurrences of\n missing_values will be imputed, in addition to np.nan.\n If None, then only np.nan values are imputed.\n value : int/float, default=None\n Value to use to fill missing values when method=\"constant\".\n forecaster : Any Forecaster based on sktime.BaseForecaster, default=None\n Use a given Forecaster to impute by insample predictions when\n method=\"forecaster\". Before fitting, missing data is imputed with\n method=\"ffill\" or \"bfill\" as heuristic. in case of multivariate X,\n the forecaster is applied separete to each column like a\n ColumnEnsembleForecaster.\n random_state : int/float/str, optional\n Value to set random.seed() if method=\"random\", default None\n\n Examples\n --------\n >>> from sktime.transformations.series.impute import Imputer\n >>> from sktime.datasets import load_airline\n >>> from sktime.forecasting.model_selection import temporal_train_test_split\n >>> y = load_airline()\n >>> y_train, y_test = temporal_train_test_split(y)\n >>> transformer = Imputer(method=\"drift\")\n >>> transformer.fit(y_train)\n Imputer(...)\n >>> y_test.iloc[3] = np.nan\n >>> y_hat = transformer.transform(y_test)\n \"\"\"\n\n _tags = {\n \"scitype:transform-input\": \"Series\",\n # what is the scitype of X: Series, or Panel\n \"scitype:transform-output\": \"Series\",\n # what scitype is returned: Primitives, Series, Panel\n \"scitype:instancewise\": True, # is this an instance-wise transform?\n \"X_inner_mtype\": [\"pd.DataFrame\"],\n # which mtypes do _fit/_predict support for X?\n \"y_inner_mtype\": \"None\", # which mtypes do _fit/_predict support for y?\n \"fit_is_empty\": False,\n \"handles-missing-data\": True,\n \"skip-inverse-transform\": True,\n \"capability:inverse_transform\": True,\n \"univariate-only\": False,\n \"capability:missing_values:removes\": True,\n # is transform result always guaranteed to contain no missing values?\n \"remember_data\": False, # remember all data seen as _X\n }\n\n def __init__(\n self,\n method=\"drift\",\n random_state=None,\n value=None,\n forecaster=None,\n missing_values=None,\n ):\n self.method = method\n self.missing_values = missing_values\n self.value = value\n self.forecaster = forecaster\n self.random_state = random_state\n super().__init__()\n\n # these methods require self._X remembered in _fit and _update\n if method in [\"drift\", \"forecaster\", \"random\"]:\n self.set_tags(**{\"remember_data\": True})\n\n # these methods can be applied to multi-index frames without vectorization or\n # by using an efficient pandas native method\n if method in [\n \"constant\",\n \"mean\",\n \"median\",\n \"backfill\",\n \"bfill\",\n \"pad\",\n \"ffill\",\n ]:\n self.set_tags(\n **{\n \"X_inner_mtype\": [\n \"pd.DataFrame\",\n \"pd-multiindex\",\n \"pd_multiindex_hier\",\n ]\n }\n )\n\n def _fit(self, X, y=None):\n \"\"\"Fit transformer to X and y.\n\n private _fit containing the core logic, called from fit\n\n Parameters\n ----------\n X : Series or Panel of mtype X_inner_mtype\n if X_inner_mtype is list, _fit must support all types in it\n Data to fit transform to\n y : Series or Panel of mtype y_inner_mtype, default=None\n Additional data, e.g., labels for transformation\n\n Returns\n -------\n self: reference to self\n \"\"\"\n self._check_method()\n # all methods of Imputer that are actually doing a fit are\n # implemented here. Some methods don't need to fit, so they are just\n # implemented in _transform\n\n index = X.index\n if isinstance(index, pd.MultiIndex):\n X_grouped = X.groupby(level=list(range(index.nlevels - 1)))\n if self.method == \"mean\":\n self._mean = X_grouped.mean()\n elif self.method == \"median\":\n self._median = X_grouped.median()\n else:\n if self.method in [\"drift\", \"forecaster\"]:\n self._y = y.copy() if y is not None else None\n if self.method == \"drift\":\n self._forecaster = PolynomialTrendForecaster(degree=1)\n elif self.method == \"forecaster\":\n self._forecaster = self.forecaster.clone()\n elif self.method == \"mean\":\n self._mean = X.mean()\n elif self.method == \"median\":\n self._median = X.median()\n\n def _transform(self, X, y=None):\n \"\"\"Transform X and return a transformed version.\n\n private _transform containing the core logic, called from transform\n\n Parameters\n ----------\n X : pd.Series or pd.DataFrame\n Data to be transformed\n y : ignored argument for interface compatibility\n Additional data, e.g., labels for transformation\n\n Returns\n -------\n X : pd.Series or pd.DataFrame, same type as X\n transformed version of X\n \"\"\"\n X = X.copy()\n\n # replace missing_values with np.nan\n if self.missing_values:\n X = X.replace(to_replace=self.missing_values, value=np.nan)\n\n if not _has_missing_values(X):\n return X\n\n index = X.index\n\n if self.method == \"random\":\n for col in X.columns:\n isna = X[col].isna()\n X.loc[isna, col] = self._create_random_distribution(X[col])(isna.sum())\n return X\n elif self.method == \"constant\":\n return X.fillna(value=self.value)\n elif isinstance(index, pd.MultiIndex):\n X_grouped = X.groupby(level=list(range(index.nlevels - 1)))\n\n if self.method in [\"backfill\", \"bfill\"]:\n X = X_grouped.fillna(method=\"bfill\")\n # fill trailing NAs of panel instances with reverse method\n return X.fillna(method=\"ffill\")\n elif self.method in [\"pad\", \"ffill\"]:\n X = X_grouped.fillna(method=\"ffill\")\n # fill leading NAs of panel instances with reverse method\n return X.fillna(method=\"bfill\")\n elif self.method == \"mean\":\n return X_grouped.fillna(value=self._mean)\n elif self.method == \"median\":\n return X_grouped.fillna(value=self._median)\n else:\n raise AssertionError(\"Code should not be reached\")\n else:\n if self.method in [\"backfill\", \"bfill\", \"pad\", \"ffill\"]:\n X = X.fillna(method=self.method)\n elif self.method == \"drift\":\n X = self._impute_with_forecaster(X, y)\n elif self.method == \"forecaster\":\n X = self._impute_with_forecaster(X, y)\n elif self.method == \"mean\":\n return X.fillna(value=self._mean)\n elif self.method == \"median\":\n return X.fillna(value=self._median)\n elif self.method in [\"nearest\", \"linear\"]:\n X = X.interpolate(method=self.method)\n else:\n raise ValueError(f\"`method`: {self.method} not available.\")\n\n # fill first/last elements of series,\n # as some methods (e.g. \"linear\") can't impute those\n X = X.fillna(method=\"ffill\").fillna(method=\"backfill\")\n\n return X\n\n def _check_method(self):\n method = self.method\n if method not in [\n \"mean\",\n \"drift\",\n \"linear\",\n \"nearest\",\n \"constant\",\n \"median\",\n \"backfill\",\n \"bfill\",\n \"pad\",\n \"ffill\",\n \"random\",\n \"forecaster\",\n ]:\n raise ValueError(f\"Given method {method} is not an allowed method.\")\n if (\n self.value is not None\n and method != \"constant\"\n or method == \"constant\"\n and self.value is None\n ):\n raise ValueError(\n \"\"\"Imputing with a value can only be\n used if method=\"constant\" and if parameter \"value\" is not None\"\"\"\n )\n elif (\n self.forecaster is not None\n and method != \"forecaster\"\n or method == \"forecaster\"\n and self.forecaster is None\n ):\n raise ValueError(\n \"\"\"Imputing with a forecaster can only be used if\n method=\\\"forecaster\\\" and if arg forecaster is not None\"\"\"\n )\n else:\n pass\n\n def _create_random_distribution(self, z: pd.Series):\n \"\"\"Create uniform distribution function within boundaries of given series.\n\n The distribution is discrete, if the series contains only int-like values.\n\n Parameters\n ----------\n z : pd.Series\n A series to create a random distribution from\n\n Returns\n -------\n Callable[[Optional[int]], float]\n Random (discrete) uniform distribution between min and max of series\n \"\"\"\n rng = check_random_state(self.random_state)\n if (z.dropna() % 1 == 0).all():\n return lambda size, low=z.min(), high=z.max(): rng.randint(\n low=low, high=high, size=size\n )\n else:\n return lambda size, low=z.min(), high=z.max(): rng.uniform(\n low=low, high=high, size=size\n )\n\n def _impute_with_forecaster(self, X, y):\n \"\"\"Use a given forecaster for imputation by in-sample predictions.\n\n Parameters\n ----------\n X : pd.DataFrame\n Series to impute.\n y : pd.DataFrame\n Exog data for forecaster.\n\n Returns\n -------\n Xt : pd.DataFrame\n Series with imputed values.\n \"\"\"\n for col in X.columns:\n if _has_missing_values(X[col]):\n # define fh based on index of missing values\n na_index = X[col].index[X[col].isna()]\n fh = ForecastingHorizon(values=na_index, is_relative=False)\n\n # fill NaN before fitting with ffill and backfill (heuristic)\n\n self._forecaster.fit(\n y=self._X[col].fillna(method=\"ffill\").fillna(method=\"backfill\"),\n X=self._y[col].fillna(method=\"ffill\").fillna(method=\"backfill\")\n if self._y is not None\n else None,\n fh=fh,\n )\n\n # replace missing values with predicted values\n X[col][na_index] = self._forecaster.predict(fh=fh, X=y)\n return X\n\n @classmethod\n def get_test_params(cls, parameter_set=\"default\"):\n \"\"\"Return testing parameter settings for the estimator.\n\n Parameters\n ----------\n parameter_set : str, default=\"default\"\n Name of the set of test parameters to return, for use in tests. If no\n special parameters are defined for a value, will return `\"default\"` set.\n\n\n Returns\n -------\n params : dict or list of dict, default = {}\n Parameters to create testing instances of the class\n Each dict are parameters to construct an \"interesting\" test instance, i.e.,\n `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance.\n `create_test_instance` uses the first (or only) dictionary in `params`\n \"\"\"\n from sklearn.linear_model import LinearRegression\n\n from sktime.forecasting.compose import make_reduction\n from sktime.forecasting.trend import TrendForecaster\n\n linear_forecaster = make_reduction(LinearRegression(), strategy=\"multioutput\")\n\n return [\n {\"method\": \"drift\"},\n {\"method\": \"linear\"},\n {\"method\": \"nearest\"},\n {\"method\": \"constant\", \"value\": 1},\n {\"method\": \"median\"},\n {\"method\": \"backfill\"},\n {\"method\": \"bfill\"},\n {\"method\": \"pad\"},\n {\"method\": \"random\"},\n {\"method\": \"forecaster\", \"forecaster\": TrendForecaster()},\n {\"method\": \"forecaster\", \"forecaster\": linear_forecaster},\n ]\n\n\ndef _has_missing_values(X):\n return X.isnull().to_numpy().any()\n", "path": "sktime/transformations/series/impute.py" } ]
[ { "content": "#!/usr/bin/env python3 -u\n# copyright: sktime developers, BSD-3-Clause License (see LICENSE file)\n\"\"\"Transformer to impute missing values in series.\"\"\"\n\n__author__ = [\"aiwalter\"]\n__all__ = [\"Imputer\"]\n\nimport numpy as np\nimport pandas as pd\nfrom sklearn.utils import check_random_state\n\nfrom sktime.forecasting.base import ForecastingHorizon\nfrom sktime.forecasting.trend import PolynomialTrendForecaster\nfrom sktime.transformations.base import BaseTransformer\n\n\nclass Imputer(BaseTransformer):\n \"\"\"Missing value imputation.\n\n The Imputer transforms input series by replacing missing values according\n to an imputation strategy specified by `method`.\n\n Parameters\n ----------\n method : str, default=\"drift\"\n Method to fill the missing values.\n\n * \"drift\" : drift/trend values by sktime.PolynomialTrendForecaster(degree=1)\n first, X in transform() is filled with ffill then bfill\n then PolynomialTrendForecaster(degree=1) is fitted to filled X, and\n predict values are queried at indices which had missing values\n * \"linear\" : linear interpolation, uses pd.Series.interpolate()\n WARNING: This method can not extrapolate, so it is fitted always on the\n data given to transform().\n * \"nearest\" : use nearest value, uses pd.Series.interpolate()\n * \"constant\" : same constant value (given in arg value) for all NaN\n * \"mean\" : pd.Series.mean() of *fit* data\n * \"median\" : pd.Series.median() of *fit* data\n * \"backfill\" ot \"bfill\" : adapted from pd.Series.fillna()\n * \"pad\" or \"ffill\" : adapted from pd.Series.fillna()\n * \"random\" : random values between pd.Series.min() and .max() of *fit* data\n if pd.Series dtype is int, sample is uniform discrete\n if pd.Series dtype is float, sample is uniform continuous\n * \"forecaster\" : use an sktime Forecaster, given in param forecaster.\n First, X in *fit* is filled with ffill then bfill\n then forecaster is fitted to filled X, and *predict* values are queried\n at indices of X data in *transform* which had missing values\n For the following methods, the train data is used to fit them:\n \"drift\", \"mean\", \"median\", \"random\". For all other methods, the\n transform data is sufficient to compute the impute values.\n\n missing_values : int/float/str, default=None\n The placeholder for the missing values. All occurrences of\n missing_values will be imputed, in addition to np.nan.\n If None, then only np.nan values are imputed.\n value : int/float, default=None\n Value to use to fill missing values when method=\"constant\".\n forecaster : Any Forecaster based on sktime.BaseForecaster, default=None\n Use a given Forecaster to impute by insample predictions when\n method=\"forecaster\". Before fitting, missing data is imputed with\n method=\"ffill\" or \"bfill\" as heuristic. in case of multivariate X,\n the forecaster is applied separete to each column like a\n ColumnEnsembleForecaster.\n random_state : int/float/str, optional\n Value to set random.seed() if method=\"random\", default None\n\n Examples\n --------\n >>> from sktime.transformations.series.impute import Imputer\n >>> from sktime.datasets import load_airline\n >>> from sktime.forecasting.model_selection import temporal_train_test_split\n >>> y = load_airline()\n >>> y_train, y_test = temporal_train_test_split(y)\n >>> transformer = Imputer(method=\"drift\")\n >>> transformer.fit(y_train)\n Imputer(...)\n >>> y_test.iloc[3] = np.nan\n >>> y_hat = transformer.transform(y_test)\n \"\"\"\n\n _tags = {\n \"scitype:transform-input\": \"Series\",\n # what is the scitype of X: Series, or Panel\n \"scitype:transform-output\": \"Series\",\n # what scitype is returned: Primitives, Series, Panel\n \"scitype:instancewise\": True, # is this an instance-wise transform?\n \"X_inner_mtype\": [\"pd.DataFrame\"],\n # which mtypes do _fit/_predict support for X?\n \"y_inner_mtype\": \"None\", # which mtypes do _fit/_predict support for y?\n \"fit_is_empty\": False,\n \"handles-missing-data\": True,\n \"skip-inverse-transform\": True,\n \"capability:inverse_transform\": True,\n \"univariate-only\": False,\n \"capability:missing_values:removes\": True,\n # is transform result always guaranteed to contain no missing values?\n \"remember_data\": False, # remember all data seen as _X\n }\n\n def __init__(\n self,\n method=\"drift\",\n random_state=None,\n value=None,\n forecaster=None,\n missing_values=None,\n ):\n self.method = method\n self.missing_values = missing_values\n self.value = value\n self.forecaster = forecaster\n self.random_state = random_state\n super().__init__()\n\n # these methods require self._X remembered in _fit and _update\n if method in [\"drift\", \"forecaster\", \"random\"]:\n self.set_tags(**{\"remember_data\": True})\n\n # these methods can be applied to multi-index frames without vectorization or\n # by using an efficient pandas native method\n if method in [\n \"constant\",\n \"mean\",\n \"median\",\n \"backfill\",\n \"bfill\",\n \"pad\",\n \"ffill\",\n ]:\n self.set_tags(\n **{\n \"X_inner_mtype\": [\n \"pd.DataFrame\",\n \"pd-multiindex\",\n \"pd_multiindex_hier\",\n ]\n }\n )\n\n if method in \"forecaster\":\n self.set_tags(**{\"y_inner_mtype\": [\"pd.DataFrame\"]})\n\n def _fit(self, X, y=None):\n \"\"\"Fit transformer to X and y.\n\n private _fit containing the core logic, called from fit\n\n Parameters\n ----------\n X : Series or Panel of mtype X_inner_mtype\n if X_inner_mtype is list, _fit must support all types in it\n Data to fit transform to\n y : Series or Panel of mtype y_inner_mtype, default=None\n Additional data, e.g., labels for transformation\n\n Returns\n -------\n self: reference to self\n \"\"\"\n self._check_method()\n # all methods of Imputer that are actually doing a fit are\n # implemented here. Some methods don't need to fit, so they are just\n # implemented in _transform\n\n index = X.index\n if isinstance(index, pd.MultiIndex):\n X_grouped = X.groupby(level=list(range(index.nlevels - 1)))\n if self.method == \"mean\":\n self._mean = X_grouped.mean()\n elif self.method == \"median\":\n self._median = X_grouped.median()\n else:\n if self.method in [\"drift\", \"forecaster\"]:\n self._y = y.copy() if y is not None else None\n if self.method == \"drift\":\n self._forecaster = PolynomialTrendForecaster(degree=1)\n elif self.method == \"forecaster\":\n self._forecaster = self.forecaster.clone()\n elif self.method == \"mean\":\n self._mean = X.mean()\n elif self.method == \"median\":\n self._median = X.median()\n\n def _transform(self, X, y=None):\n \"\"\"Transform X and return a transformed version.\n\n private _transform containing the core logic, called from transform\n\n Parameters\n ----------\n X : pd.Series or pd.DataFrame\n Data to be transformed\n y : ignored argument for interface compatibility\n Additional data, e.g., labels for transformation\n\n Returns\n -------\n X : pd.Series or pd.DataFrame, same type as X\n transformed version of X\n \"\"\"\n X = X.copy()\n\n # replace missing_values with np.nan\n if self.missing_values:\n X = X.replace(to_replace=self.missing_values, value=np.nan)\n\n if not _has_missing_values(X):\n return X\n\n index = X.index\n\n if self.method == \"random\":\n for col in X.columns:\n isna = X[col].isna()\n X.loc[isna, col] = self._create_random_distribution(X[col])(isna.sum())\n return X\n elif self.method == \"constant\":\n return X.fillna(value=self.value)\n elif isinstance(index, pd.MultiIndex):\n X_grouped = X.groupby(level=list(range(index.nlevels - 1)))\n\n if self.method in [\"backfill\", \"bfill\"]:\n X = X_grouped.fillna(method=\"bfill\")\n # fill trailing NAs of panel instances with reverse method\n return X.fillna(method=\"ffill\")\n elif self.method in [\"pad\", \"ffill\"]:\n X = X_grouped.fillna(method=\"ffill\")\n # fill leading NAs of panel instances with reverse method\n return X.fillna(method=\"bfill\")\n elif self.method == \"mean\":\n return X_grouped.fillna(value=self._mean)\n elif self.method == \"median\":\n return X_grouped.fillna(value=self._median)\n else:\n raise AssertionError(\"Code should not be reached\")\n else:\n if self.method in [\"backfill\", \"bfill\", \"pad\", \"ffill\"]:\n X = X.fillna(method=self.method)\n elif self.method == \"drift\":\n X = self._impute_with_forecaster(X, y)\n elif self.method == \"forecaster\":\n X = self._impute_with_forecaster(X, y)\n elif self.method == \"mean\":\n return X.fillna(value=self._mean)\n elif self.method == \"median\":\n return X.fillna(value=self._median)\n elif self.method in [\"nearest\", \"linear\"]:\n X = X.interpolate(method=self.method)\n else:\n raise ValueError(f\"`method`: {self.method} not available.\")\n\n # fill first/last elements of series,\n # as some methods (e.g. \"linear\") can't impute those\n X = X.fillna(method=\"ffill\").fillna(method=\"backfill\")\n\n return X\n\n def _check_method(self):\n method = self.method\n if method not in [\n \"mean\",\n \"drift\",\n \"linear\",\n \"nearest\",\n \"constant\",\n \"median\",\n \"backfill\",\n \"bfill\",\n \"pad\",\n \"ffill\",\n \"random\",\n \"forecaster\",\n ]:\n raise ValueError(f\"Given method {method} is not an allowed method.\")\n if (\n self.value is not None\n and method != \"constant\"\n or method == \"constant\"\n and self.value is None\n ):\n raise ValueError(\n \"\"\"Imputing with a value can only be\n used if method=\"constant\" and if parameter \"value\" is not None\"\"\"\n )\n elif (\n self.forecaster is not None\n and method != \"forecaster\"\n or method == \"forecaster\"\n and self.forecaster is None\n ):\n raise ValueError(\n \"\"\"Imputing with a forecaster can only be used if\n method=\\\"forecaster\\\" and if arg forecaster is not None\"\"\"\n )\n else:\n pass\n\n def _create_random_distribution(self, z: pd.Series):\n \"\"\"Create uniform distribution function within boundaries of given series.\n\n The distribution is discrete, if the series contains only int-like values.\n\n Parameters\n ----------\n z : pd.Series\n A series to create a random distribution from\n\n Returns\n -------\n Callable[[Optional[int]], float]\n Random (discrete) uniform distribution between min and max of series\n \"\"\"\n rng = check_random_state(self.random_state)\n if (z.dropna() % 1 == 0).all():\n return lambda size, low=z.min(), high=z.max(): rng.randint(\n low=low, high=high, size=size\n )\n else:\n return lambda size, low=z.min(), high=z.max(): rng.uniform(\n low=low, high=high, size=size\n )\n\n def _impute_with_forecaster(self, X, y):\n \"\"\"Use a given forecaster for imputation by in-sample predictions.\n\n Parameters\n ----------\n X : pd.DataFrame\n Series to impute.\n y : pd.DataFrame\n Exog data for forecaster.\n\n Returns\n -------\n Xt : pd.DataFrame\n Series with imputed values.\n \"\"\"\n for col in X.columns:\n if _has_missing_values(X[col]):\n # define fh based on index of missing values\n na_index = X[col].index[X[col].isna()]\n fh = ForecastingHorizon(values=na_index, is_relative=False)\n\n # fill NaN before fitting with ffill and backfill (heuristic)\n\n self._forecaster.fit(\n y=self._X[col].fillna(method=\"ffill\").fillna(method=\"backfill\"),\n X=self._y[col].fillna(method=\"ffill\").fillna(method=\"backfill\")\n if self._y is not None\n else None,\n fh=fh,\n )\n\n # replace missing values with predicted values\n X[col][na_index] = self._forecaster.predict(fh=fh, X=y)\n return X\n\n @classmethod\n def get_test_params(cls, parameter_set=\"default\"):\n \"\"\"Return testing parameter settings for the estimator.\n\n Parameters\n ----------\n parameter_set : str, default=\"default\"\n Name of the set of test parameters to return, for use in tests. If no\n special parameters are defined for a value, will return `\"default\"` set.\n\n\n Returns\n -------\n params : dict or list of dict, default = {}\n Parameters to create testing instances of the class\n Each dict are parameters to construct an \"interesting\" test instance, i.e.,\n `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance.\n `create_test_instance` uses the first (or only) dictionary in `params`\n \"\"\"\n from sklearn.linear_model import LinearRegression\n\n from sktime.forecasting.compose import make_reduction\n from sktime.forecasting.trend import TrendForecaster\n\n linear_forecaster = make_reduction(LinearRegression(), strategy=\"multioutput\")\n\n return [\n {\"method\": \"drift\"},\n {\"method\": \"linear\"},\n {\"method\": \"nearest\"},\n {\"method\": \"constant\", \"value\": 1},\n {\"method\": \"median\"},\n {\"method\": \"backfill\"},\n {\"method\": \"bfill\"},\n {\"method\": \"pad\"},\n {\"method\": \"random\"},\n {\"method\": \"forecaster\", \"forecaster\": TrendForecaster()},\n {\"method\": \"forecaster\", \"forecaster\": linear_forecaster},\n ]\n\n\ndef _has_missing_values(X):\n return X.isnull().to_numpy().any()\n", "path": "sktime/transformations/series/impute.py" } ]
diff --git a/sktime/transformations/series/impute.py b/sktime/transformations/series/impute.py index ce3a26bd73a..da623f0f81b 100644 --- a/sktime/transformations/series/impute.py +++ b/sktime/transformations/series/impute.py @@ -137,6 +137,9 @@ def __init__( } ) + if method in "forecaster": + self.set_tags(**{"y_inner_mtype": ["pd.DataFrame"]}) + def _fit(self, X, y=None): """Fit transformer to X and y. diff --git a/sktime/transformations/series/tests/test_imputer.py b/sktime/transformations/series/tests/test_imputer.py index 4f38c13201d..d0275454e02 100644 --- a/sktime/transformations/series/tests/test_imputer.py +++ b/sktime/transformations/series/tests/test_imputer.py @@ -54,3 +54,24 @@ def test_imputer(method, Z): t = Imputer(method=method, forecaster=forecaster, value=value) y_hat = t.fit_transform(Z) assert not y_hat.isnull().to_numpy().any() + + +def test_imputer_forecaster_y(): + """Test that forecaster imputer works with y. + + Failure case in bug #5284. + """ + from sklearn.linear_model import LinearRegression + + from sktime.datasets import load_airline + from sktime.forecasting.compose import YfromX + + X = load_airline() + y = load_airline() + + model_reg = YfromX(LinearRegression()) + model_reg.fit(X, y) + transformer = Imputer(method="forecaster", forecaster=model_reg) + + transformer.fit(X=X, y=y) + transformer.transform(X=X, y=y)
doccano__doccano-1531
TemplateDoesNotExist Error on start from README instructions How to reproduce the behaviour --------- I was following the instructions on the main README to install and start doccano with pip (copied here) ``` pip install doccano doccano init doccano createuser --username admin --password pass doccano webserver --port 8000 ``` and then in another terminal ``` doccano task ``` This all looks fine, until I try the next step (visiting in the browser). I get the following error (included the last line of the non-error log for reference) ``` [2021-06-10 09:56:42 -0700] [1046] [INFO] Handling signal: winch Internal Server Error: / Traceback (most recent call last): File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/core/handlers/exception.py", line 47, in inner response = get_response(request) File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/core/handlers/base.py", line 204, in _get_response response = response.render() File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/response.py", line 105, in render self.content = self.rendered_content File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/response.py", line 81, in rendered_content template = self.resolve_template(self.template_name) File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/response.py", line 63, in resolve_template return select_template(template, using=self.using) File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/loader.py", line 47, in select_template raise TemplateDoesNotExist(', '.join(template_name_list), chain=chain) django.template.exceptions.TemplateDoesNotExist: index.html Internal Server Error: /favicon.ico Traceback (most recent call last): File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/core/handlers/exception.py", line 47, in inner response = get_response(request) File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/core/handlers/base.py", line 204, in _get_response response = response.render() File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/response.py", line 105, in render self.content = self.rendered_content File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/response.py", line 81, in rendered_content template = self.resolve_template(self.template_name) File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/response.py", line 63, in resolve_template return select_template(template, using=self.using) File "/projects/creisle_prj/git/doccano/venv_pipenv/lib/python3.7/site-packages/django/template/loader.py", line 47, in select_template raise TemplateDoesNotExist(', '.join(template_name_list), chain=chain) django.template.exceptions.TemplateDoesNotExist: index.html ``` Your Environment --------- * Operating System: centos07 * Python Version Used: 3.7.2 (virtual environment) * When you install doccano: 2021-Jun-10 (Today) * How did you install doccano (Heroku button etc): pip
[ { "content": "import argparse\nimport multiprocessing\nimport os\nimport platform\nimport subprocess\nimport sys\n\nfrom .app.celery import app\nbase = os.path.abspath(os.path.dirname(__file__))\nsys.path.append(base)\nmanage_path = os.path.join(base, 'manage.py')\nparser = argparse.ArgumentParser(description='doccano, text annotation for machine learning practitioners.')\n\n\ndef number_of_workers():\n return (multiprocessing.cpu_count() * 2) + 1\n\n\ndef run_on_nix(args):\n import gunicorn.app.base\n import gunicorn.util\n\n class StandaloneApplication(gunicorn.app.base.BaseApplication):\n\n def __init__(self, options=None):\n self.options = options or {}\n super().__init__()\n\n def load_config(self):\n config = {key: value for key, value in self.options.items()\n if key in self.cfg.settings and value is not None}\n for key, value in config.items():\n self.cfg.set(key.lower(), value)\n\n def load(self):\n return gunicorn.util.import_app('app.wsgi')\n\n options = {\n 'bind': '%s:%s' % ('0.0.0.0', args.port),\n 'workers': number_of_workers(),\n 'chdir': base\n }\n StandaloneApplication(options).run()\n\n\ndef run_on_windows(args):\n from waitress import serve\n from app.wsgi import application\n serve(application, port=args.port)\n\n\ndef command_db_init(args):\n print('Setup Database.')\n subprocess.call([sys.executable, manage_path, 'wait_for_db'], shell=False)\n subprocess.call([sys.executable, manage_path, 'migrate'], shell=False)\n subprocess.call([sys.executable, manage_path, 'create_roles'], shell=False)\n\n\ndef command_user_create(args):\n print('Create admin user.')\n subprocess.call([sys.executable, manage_path, 'create_admin',\n '--username', args.username,\n '--password', args.password,\n '--email', args.email,\n '--noinput'], shell=False)\n\n\ndef command_run_webserver(args):\n print(f'Starting server with port {args.port}.')\n if platform.system() == 'Windows':\n run_on_windows(args)\n else:\n run_on_nix(args)\n\n\ndef command_run_task_queue(args):\n print('Starting task queue.')\n app.worker_main(\n argv=[\n '--app=app',\n '--workdir={}'.format(base),\n 'worker',\n '--loglevel=info',\n '--concurrency={}'.format(args.concurrency),\n ]\n )\n\n\ndef command_help(args):\n print(parser.parse_args([args.command, '--help']))\n\n\ndef main():\n # Create a command line parser.\n subparsers = parser.add_subparsers()\n\n # Create a parser for db initialization.\n parser_init = subparsers.add_parser('init', help='see `init -h`')\n\n parser_init.set_defaults(handler=command_db_init)\n\n # Create a parser for user creation.\n parser_create_user = subparsers.add_parser('createuser', help='see `createuser -h`')\n parser_create_user.add_argument('--username', type=str, default='admin', help='admin username')\n parser_create_user.add_argument('--password', type=str, default='password', help='admin password')\n parser_create_user.add_argument('--email', type=str, default='[email protected]', help='admin email')\n parser_create_user.set_defaults(handler=command_user_create)\n\n # Create a parser for web server.\n parser_server = subparsers.add_parser('webserver', help='see `webserver -h`')\n parser_server.add_argument('--port', type=int, default=8000, help='port number')\n parser_server.set_defaults(handler=command_run_webserver)\n\n # Create a parser for task queue.\n parser_queue = subparsers.add_parser('task', help='see `task -h`')\n parser_queue.add_argument('--concurrency', type=int, default=2, help='concurrency')\n parser_queue.set_defaults(handler=command_run_task_queue)\n\n # Create a parser for help.\n parser_help = subparsers.add_parser('help', help='see `help -h`')\n parser_help.add_argument('command', help='command name which help is shown')\n parser_help.set_defaults(handler=command_help)\n\n # Dispatch handler.\n args = parser.parse_args()\n if hasattr(args, 'handler'):\n args.handler(args)\n else:\n # If specified unknown command, show help.\n parser.print_help()\n\n\nif __name__ == '__main__':\n main()\n", "path": "backend/cli.py" } ]
[ { "content": "import argparse\nimport multiprocessing\nimport os\nimport platform\nimport subprocess\nimport sys\n\nfrom .app.celery import app\nos.environ['DEBUG'] = 'False'\nbase = os.path.abspath(os.path.dirname(__file__))\nsys.path.append(base)\nmanage_path = os.path.join(base, 'manage.py')\nparser = argparse.ArgumentParser(description='doccano, text annotation for machine learning practitioners.')\n\n\ndef number_of_workers():\n return (multiprocessing.cpu_count() * 2) + 1\n\n\ndef run_on_nix(args):\n import gunicorn.app.base\n import gunicorn.util\n\n class StandaloneApplication(gunicorn.app.base.BaseApplication):\n\n def __init__(self, options=None):\n self.options = options or {}\n super().__init__()\n\n def load_config(self):\n config = {key: value for key, value in self.options.items()\n if key in self.cfg.settings and value is not None}\n for key, value in config.items():\n self.cfg.set(key.lower(), value)\n\n def load(self):\n return gunicorn.util.import_app('app.wsgi')\n\n options = {\n 'bind': '%s:%s' % ('0.0.0.0', args.port),\n 'workers': number_of_workers(),\n 'chdir': base\n }\n StandaloneApplication(options).run()\n\n\ndef run_on_windows(args):\n from waitress import serve\n from app.wsgi import application\n serve(application, port=args.port)\n\n\ndef command_db_init(args):\n print('Setup Database.')\n subprocess.call([sys.executable, manage_path, 'wait_for_db'], shell=False)\n subprocess.call([sys.executable, manage_path, 'migrate'], shell=False)\n subprocess.call([sys.executable, manage_path, 'create_roles'], shell=False)\n\n\ndef command_user_create(args):\n print('Create admin user.')\n subprocess.call([sys.executable, manage_path, 'create_admin',\n '--username', args.username,\n '--password', args.password,\n '--email', args.email,\n '--noinput'], shell=False)\n\n\ndef command_run_webserver(args):\n print(f'Starting server with port {args.port}.')\n if platform.system() == 'Windows':\n run_on_windows(args)\n else:\n run_on_nix(args)\n\n\ndef command_run_task_queue(args):\n print('Starting task queue.')\n app.worker_main(\n argv=[\n '--app=app',\n '--workdir={}'.format(base),\n 'worker',\n '--loglevel=info',\n '--concurrency={}'.format(args.concurrency),\n ]\n )\n\n\ndef command_help(args):\n print(parser.parse_args([args.command, '--help']))\n\n\ndef main():\n # Create a command line parser.\n subparsers = parser.add_subparsers()\n\n # Create a parser for db initialization.\n parser_init = subparsers.add_parser('init', help='see `init -h`')\n\n parser_init.set_defaults(handler=command_db_init)\n\n # Create a parser for user creation.\n parser_create_user = subparsers.add_parser('createuser', help='see `createuser -h`')\n parser_create_user.add_argument('--username', type=str, default='admin', help='admin username')\n parser_create_user.add_argument('--password', type=str, default='password', help='admin password')\n parser_create_user.add_argument('--email', type=str, default='[email protected]', help='admin email')\n parser_create_user.set_defaults(handler=command_user_create)\n\n # Create a parser for web server.\n parser_server = subparsers.add_parser('webserver', help='see `webserver -h`')\n parser_server.add_argument('--port', type=int, default=8000, help='port number')\n parser_server.set_defaults(handler=command_run_webserver)\n\n # Create a parser for task queue.\n parser_queue = subparsers.add_parser('task', help='see `task -h`')\n parser_queue.add_argument('--concurrency', type=int, default=2, help='concurrency')\n parser_queue.set_defaults(handler=command_run_task_queue)\n\n # Create a parser for help.\n parser_help = subparsers.add_parser('help', help='see `help -h`')\n parser_help.add_argument('command', help='command name which help is shown')\n parser_help.set_defaults(handler=command_help)\n\n # Dispatch handler.\n args = parser.parse_args()\n if hasattr(args, 'handler'):\n args.handler(args)\n else:\n # If specified unknown command, show help.\n parser.print_help()\n\n\nif __name__ == '__main__':\n main()\n", "path": "backend/cli.py" } ]
diff --git a/backend/cli.py b/backend/cli.py index b6ada48835..90c23915b8 100644 --- a/backend/cli.py +++ b/backend/cli.py @@ -6,6 +6,7 @@ import sys from .app.celery import app +os.environ['DEBUG'] = 'False' base = os.path.abspath(os.path.dirname(__file__)) sys.path.append(base) manage_path = os.path.join(base, 'manage.py')
huggingface__optimum-425
AttributeError: type object 'ORTModelForCustomTasks' has no attribute 'export_feature' ### System Info ```shell Mac OS X Python 3.9.10 transformers 4.22.2 onnxruntime 1.12.1 onnx 1.12.0 torch 1.12.1 ``` ### Who can help? @lewtun, @michaelbenayoun @JingyaHuang, @echarlaix ### Information - [ ] The official example scripts - [X] My own modified scripts ### Tasks - [ ] An officially supported task in the `examples` folder (such as GLUE/SQuAD, ...) - [ ] My own task or dataset (give details below) ### Reproduction ```python from optimum.onnxruntime import ORTModelForCustomTasks ort_model = ORTModelForCustomTasks.from_pretrained('microsoft/mdeberta-v3-base', from_transformers=True) ``` ### Expected behavior ### Possible solution It works if add `export_feature`: ```python from optimum.onnxruntime import ORTModelForCustomTasks class ORTModelFixed(ORTModelForCustomTasks): export_feature = 'default' def __init__(self, model=None, config=None, **kwargs): super().__init__(model=model, config=config, **kwargs) ort_model = ORTModelFixed.from_pretrained('microsoft/mdeberta-v3-base', from_transformers=True) ``` It will work w/o upper tittle-mentioned AttributeError
[ { "content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport os\nimport shutil\nfrom pathlib import Path\nfrom typing import Any, Dict, List, Optional, Union\n\nimport torch\nfrom transformers import (\n AutoConfig,\n AutoModel,\n AutoModelForCausalLM,\n AutoModelForImageClassification,\n AutoModelForMultipleChoice,\n AutoModelForQuestionAnswering,\n AutoModelForSequenceClassification,\n AutoModelForTokenClassification,\n PretrainedConfig,\n)\nfrom transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, default_cache_path\nfrom transformers.generation_utils import GenerationMixin\nfrom transformers.modeling_outputs import (\n BaseModelOutput,\n CausalLMOutputWithCrossAttentions,\n ImageClassifierOutput,\n ModelOutput,\n MultipleChoiceModelOutput,\n QuestionAnsweringModelOutput,\n SequenceClassifierOutput,\n TokenClassifierOutput,\n)\nfrom transformers.onnx import FeaturesManager, export\nfrom transformers.onnx.utils import get_preprocessor\n\nimport onnxruntime as ort\nfrom huggingface_hub import HfApi, hf_hub_download\n\nfrom ..modeling_base import FROM_PRETRAINED_START_DOCSTRING, OptimizedModel\nfrom .utils import ONNX_WEIGHTS_NAME, get_device_for_provider, get_provider_for_device\n\n\nlogger = logging.getLogger(__name__)\n\n\n_TOKENIZER_FOR_DOC = \"AutoTokenizer\"\n_FEATURE_EXTRACTOR_FOR_DOC = \"AutoFeatureExtractor\"\n\nONNX_MODEL_START_DOCSTRING = r\"\"\"\n This model inherits from [~`onnxruntime.modeling_ort.ORTModel`]. Check the superclass documentation for the generic methods the\n library implements for all its model (such as downloading or saving)\n Parameters:\n config (`transformers.PretrainedConfig`): [PretrainedConfig](https://huggingface.co/docs/transformers/main_classes/configuration#transformers.PretrainedConfig) is the Model configuration class with all the parameters of the model.\n Initializing with a config file does not load the weights associated with the model, only the\n configuration. Check out the [`~onnxruntime.modeling_ort.ORTModel.from_pretrained`] method to load the model weights.\n model (`onnxruntime.InferenceSession`): [onnxruntime.InferenceSession](https://onnxruntime.ai/docs/api/python/api_summary.html#inferencesession) is the main class used to run a model. Check out the [`~onnxruntime.modeling_ort.ORTModel.load_model`] method for more information.\n\"\"\"\n\nONNX_TEXT_INPUTS_DOCSTRING = r\"\"\"\n Args:\n input_ids (`torch.Tensor` of shape `({0})`):\n Indices of input sequence tokens in the vocabulary.\n Indices can be obtained using [`AutoTokenizer`](https://huggingface.co/docs/transformers/autoclass_tutorial#autotokenizer).\n See [`PreTrainedTokenizer.encode`](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizerBase.encode) and\n [`PreTrainedTokenizer.__call__`](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizerBase.__call__) for details.\n [What are input IDs?](https://huggingface.co/docs/transformers/glossary#input-ids)\n attention_mask (`torch.Tensor` of shape `({0})`, *optional*):\n Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:\n - 1 for tokens that are **not masked**,\n - 0 for tokens that are **masked**.\n [What are attention masks?](https://huggingface.co/docs/transformers/glossary#attention-mask)\n token_type_ids (`torch.Tensor` of shape `({0})`, *optional*):\n Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`:\n - 1 for tokens that are **sentence A**,\n - 0 for tokens that are **sentence B**.\n [What are token type IDs?](https://huggingface.co/docs/transformers/glossary#token-type-ids)\n\"\"\"\n\nONNX_IMAGE_INPUTS_DOCSTRING = r\"\"\"\n Args:\n pixel_values (`torch.Tensor` of shape `({0})`):\n Pixel values corresponding to the images in the current batch.\n Pixel values can be obtained from encoded images using [`AutoFeatureExtractor`](https://huggingface.co/docs/transformers/autoclass_tutorial#autofeatureextractor).\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Base ORTModel class for implementing models using ONNX Runtime. The ORTModel implements generic methods for interacting\n with the Hugging Face Hub as well as exporting vanilla transformers models to ONNX using `transformers.onnx` toolchain.\n The ORTModel implements additionally generic methods for optimizing and quantizing Onnx models.\n \"\"\",\n)\nclass ORTModel(OptimizedModel):\n base_model_prefix = \"onnx_model\"\n auto_model_class = AutoModel\n\n def __init__(self, model: ort.InferenceSession = None, config=None, **kwargs):\n self.model = model\n self.config = config\n self.model_save_dir = kwargs.get(\"model_save_dir\", None)\n self.latest_model_name = kwargs.get(\"latest_model_name\", \"model.onnx\")\n self.providers = model.get_providers()\n self._device = get_device_for_provider(self.providers[0])\n\n if self._device == None:\n logger.warning(\n f\"ORTModel outputs will be sent to CPU as the device could not be inferred from the execution provider {self.providers[0]}.\"\n f\" Use `ort_model.to()` to send the outputs to the wanted device.\"\n )\n\n # registers the ORTModelForXXX classes into the transformers AutoModel classes\n # to avoid warnings when create a pipeline https://github.com/huggingface/transformers/blob/cad61b68396a1a387287a8e2e2fef78a25b79383/src/transformers/pipelines/base.py#L863\n AutoConfig.register(self.base_model_prefix, AutoConfig)\n self.auto_model_class.register(AutoConfig, self.__class__)\n\n @property\n def device(self) -> torch.device:\n \"\"\"\n `torch.device`: The device on which the module is (assuming that all the module parameters are on the same\n device).\n \"\"\"\n return self._device\n\n @device.setter\n def device(self, value: torch.device):\n self._device = value\n\n def to(self, device: torch.device):\n \"\"\"\n Changes the ONNX Runtime provider according to the device.\n \"\"\"\n # convert string device input (ie. \"cuda\") to torch.device\n if type(device) == str:\n device = torch.device(device)\n\n self.device = device\n provider = get_provider_for_device(self.device)\n self.model.set_providers([provider])\n self.providers = self.model.get_providers()\n return self\n\n def forward(self, *args, **kwargs):\n raise NotImplementedError\n\n @staticmethod\n def load_model(\n path: Union[str, Path],\n provider: Optional[str] = \"CPUExecutionProvider\",\n session_options: Optional[ort.SessionOptions] = None,\n provider_options: Optional[Dict] = None,\n **kwargs\n ):\n \"\"\"\n Loads an ONNX Inference session with a given provider. Default provider is `CPUExecutionProvider` to match the default behaviour in PyTorch/TensorFlow/JAX.\n\n Arguments:\n path (`str` or `Path`):\n Directory from which to load the model.\n provider (`str`, *optional*):\n ONNX Runtime provider to use for loading the model. See https://onnxruntime.ai/docs/execution-providers/\n for possible providers. Defaults to `CPUExecutionProvider`.\n session_options (`onnxruntime.SessionOptions`, *optional*):\n ONNX Runtime session options to use for loading the model. Defaults to `None`.\n provider_options (`Dict`, **optional**):\n Provider option dictionaries corresponding to the provider used. See available options\n for each provider: https://onnxruntime.ai/docs/api/c/group___global.html . Defaults to `None`.\n \"\"\"\n available_providers = ort.get_available_providers()\n if provider not in available_providers:\n raise ValueError(\n f\"Asked to use {provider} as an ONNX Runtime execution provider, but the available execution providers are {available_providers}.\"\n )\n\n # `providers` list must of be of the same length as `provider_options` list\n return ort.InferenceSession(\n path,\n providers=[provider],\n sess_options=session_options,\n provider_options=None if provider_options is None else [provider_options],\n )\n\n def _save_pretrained(self, save_directory: Union[str, Path], file_name: Optional[str] = None, **kwargs):\n \"\"\"\n Saves a model and its configuration file to a directory, so that it can be re-loaded using the\n [`~optimum.onnxruntime.modeling_ort.ORTModel.from_pretrained`] class method. It will always save the latest_model_name.\n Arguments:\n save_directory (`str` or `Path`):\n Directory where to save the model file.\n file_name(`str`, *optional*):\n Overwrites the default model file name from `\"model.onnx\"` to `file_name`. This allows you to save the model with\n a different name.\n \"\"\"\n model_file_name = file_name if file_name is not None else ONNX_WEIGHTS_NAME\n\n src_path = self.model_save_dir.joinpath(self.latest_model_name)\n dst_path = Path(save_directory).joinpath(model_file_name)\n shutil.copyfile(src_path, dst_path)\n\n @classmethod\n @add_start_docstrings(FROM_PRETRAINED_START_DOCSTRING)\n def from_pretrained(\n cls,\n model_id: Union[str, Path],\n from_transformers: bool = False,\n force_download: bool = False,\n use_auth_token: Optional[str] = None,\n cache_dir: Optional[str] = None,\n provider: Optional[str] = \"CPUExecutionProvider\",\n session_options: Optional[ort.SessionOptions] = None,\n provider_options: Optional[Dict] = None,\n *args,\n **kwargs\n ):\n \"\"\"\n provider (`str`, *optional*):\n ONNX Runtime providers to use for loading the model. See https://onnxruntime.ai/docs/execution-providers/ for\n possible providers. Defaults to `CPUExecutionProvider`.\n session_options (`onnxruntime.SessionOptions`, *optional*),:\n ONNX Runtime session options to use for loading the model. Defaults to `None`.\n\n Returns:\n `ORTModel`: The loaded ORTModel model.\n \"\"\"\n return super().from_pretrained(\n model_id,\n from_transformers,\n force_download,\n use_auth_token,\n cache_dir,\n provider=provider,\n session_options=session_options,\n provider_options=provider_options,\n *args,\n **kwargs,\n )\n\n @classmethod\n def _from_pretrained(\n cls,\n model_id: Union[str, Path],\n use_auth_token: Optional[Union[bool, str, None]] = None,\n revision: Optional[Union[str, None]] = None,\n force_download: bool = False,\n cache_dir: Optional[str] = None,\n file_name: Optional[str] = None,\n **kwargs,\n ):\n \"\"\"\n Loads a model and its configuration file from a directory or the HF Hub.\n Implements: https://github.com/huggingface/huggingface_hub/blob/e67de48368bc1843e40afc1cc9d236402b9609ee/src/huggingface_hub/hub_mixin.py#L73\n Arguments:\n model_id (`str` or `Path`):\n Directory from which to load\n use_auth_token (`str` or `bool`):\n Is needed to load models from a private repository\n revision (`str`):\n Revision is the specific model version to use. It can be a branch name, a tag name, or a commit id\n cache_dir (`Union[str, Path]`, *optional*):\n Path to a directory in which a downloaded pretrained model configuration should be cached if the\n standard cache should not be used.\n force_download (`bool`, *optional*, defaults to `False`):\n Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n cached versions if they exist.\n file_name(`str`):\n Overwrites the default model file name from `\"model.onnx\"` to `file_name`. This allows you to load different model files from the same\n repository or directory.\n local_files_only(`bool`, *optional*, defaults to `False`):\n Whether or not to only look at local files (i.e., do not try to download the model).\n kwargs (`Dict`, *optional*):\n kwargs will be passed to the model during initialization\n \"\"\"\n local_files_only = kwargs.pop(\"local_files_only\", False)\n config_dict = kwargs.pop(\"config\", {})\n model_file_name = file_name if file_name is not None else ONNX_WEIGHTS_NAME\n # load model from local directory\n if os.path.isdir(model_id):\n config = PretrainedConfig.from_dict(config_dict)\n model = ORTModel.load_model(os.path.join(model_id, model_file_name), **kwargs)\n kwargs[\"model_save_dir\"] = Path(model_id)\n kwargs[\"latest_model_name\"] = model_file_name\n # load model from hub\n else:\n # download model\n model_cache_path = hf_hub_download(\n repo_id=model_id,\n filename=model_file_name,\n use_auth_token=use_auth_token,\n revision=revision,\n cache_dir=cache_dir,\n force_download=force_download,\n local_files_only=local_files_only,\n )\n kwargs[\"model_save_dir\"] = Path(model_cache_path).parent\n kwargs[\"latest_model_name\"] = Path(model_cache_path).name\n model = ORTModel.load_model(model_cache_path, **kwargs)\n config = PretrainedConfig.from_dict(config_dict)\n\n return cls(model=model, config=config, **kwargs)\n\n @classmethod\n def _from_transformers(\n cls,\n model_id: str,\n save_dir: Union[str, Path] = default_cache_path,\n use_auth_token: Optional[Union[bool, str, None]] = None,\n revision: Optional[Union[str, None]] = None,\n force_download: bool = False,\n cache_dir: Optional[str] = None,\n **kwargs,\n ):\n \"\"\"\n Converts a vanilla Transformers model into an optimized model using `transformers.onnx.export_onnx`.\n Arguments:\n model_id (`str` or `Path`):\n Directory from which to load\n save_dir (`str` or `Path`):\n Directory where the onnx model should be saved, default to `transformers.file_utils.default_cache_path`, which is the cache dir for\n transformers.\n use_auth_token (`str` or `bool`):\n Is needed to load models from a private repository\n revision (`str`):\n Revision is the specific model version to use. It can be a branch name, a tag name, or a commit id\n cache_dir (`Union[str, Path]`, *optional*):\n Path to a directory in which a downloaded pretrained model configuration should be cached if the\n standard cache should not be used.\n force_download (`bool`, *optional*, defaults to `False`):\n Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n cached versions if they exist.\n kwargs (`Dict`, *optional*):\n kwargs will be passed to the model during initialization\n \"\"\"\n # create local save dir in cache dir\n save_dir = Path(save_dir).joinpath(model_id)\n save_dir.mkdir(parents=True, exist_ok=True)\n kwargs[\"model_save_dir\"] = save_dir\n\n # reads pipeline task from ORTModelForXXX class if available else tries to extract from hub\n if cls.export_feature is not None:\n task = cls.export_feature\n else:\n task = HfApi().model_info(model_id, revision=revision).pipeline_tag\n if task in [\"sentiment-analysis\", \"text-classification\", \"zero-shot-classification\"]:\n task = \"sequence-classification\"\n elif task in [\"feature-extraction\", \"fill-mask\"]:\n task = \"default\"\n # 2. convert to temp dir\n # FIXME: transformers.onnx conversion doesn't support private models\n preprocessor = get_preprocessor(model_id)\n model = FeaturesManager.get_model_from_feature(task, model_id)\n _, model_onnx_config = FeaturesManager.check_supported_model_or_raise(model, feature=task)\n onnx_config = model_onnx_config(model.config)\n\n # export model\n export(\n preprocessor=preprocessor,\n model=model,\n config=onnx_config,\n opset=onnx_config.default_onnx_opset,\n output=save_dir.joinpath(ONNX_WEIGHTS_NAME),\n )\n kwargs[\"config\"] = model.config.__dict__\n # 3. load normal model\n return cls._from_pretrained(save_dir.as_posix(), **kwargs)\n\n\nFEATURE_EXTRACTION_EXAMPLE = r\"\"\"\n Example of feature extraction:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"My name is Philipp and I live in Germany.\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n >>> list(logits.shape)\n ```\n\n Example using `transformers.pipeline`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_extractor = pipeline(\"feature-extraction\", model=model, tokenizer=tokenizer)\n\n >>> text = \"My name is Philipp and I live in Germany.\"\n >>> pred = onnx_extractor(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a MaskedLMOutput for feature-extraction tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForFeatureExtraction(ORTModel):\n \"\"\"\n Feature Extraction model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"default\"\n auto_model_class = AutoModel\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + FEATURE_EXTRACTION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForFeatureExtraction\",\n checkpoint=\"optimum/all-MiniLM-L6-v2\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n last_hidden_state = torch.from_numpy(outputs[self.model_outputs[\"last_hidden_state\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return BaseModelOutput(last_hidden_state=last_hidden_state)\n\n\nQUESTION_ANSWERING_EXAMPLE = r\"\"\"\n Example of question answering:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> question, text = \"Who was Jim Henson?\", \"Jim Henson was a nice puppet\"\n >>> inputs = tokenizer(question, text, return_tensors=\"pt\")\n >>> start_positions = torch.tensor([1])\n >>> end_positions = torch.tensor([3])\n\n >>> outputs = model(**inputs, start_positions=start_positions, end_positions=end_positions)\n >>> start_scores = outputs.start_logits\n >>> end_scores = outputs.end_logits\n ```\n Example using `transformers.pipeline`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_qa = pipeline(\"question-answering\", model=model, tokenizer=tokenizer)\n\n >>> question, text = \"Who was Jim Henson?\", \"Jim Henson was a nice puppet\"\n >>> pred = onnx_qa(question, text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a QuestionAnsweringModelOutput for extractive question-answering tasks like SQuAD.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForQuestionAnswering(ORTModel):\n \"\"\"\n Question Answering model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"question-answering\"\n auto_model_class = AutoModelForQuestionAnswering\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + QUESTION_ANSWERING_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForQuestionAnswering\",\n checkpoint=\"optimum/roberta-base-squad2\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n start_logits = torch.from_numpy(outputs[self.model_outputs[\"start_logits\"]]).to(self.device)\n end_logits = torch.from_numpy(outputs[self.model_outputs[\"end_logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return QuestionAnsweringModelOutput(start_logits=start_logits, end_logits=end_logits)\n\n\nSEQUENCE_CLASSIFICATION_EXAMPLE = r\"\"\"\n Example of single-label classification:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"Hello, my dog is cute\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n >>> list(logits.shape)\n ```\n\n Example using `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_classifier = pipeline(\"text-classification\", model=model, tokenizer=tokenizer)\n\n >>> text = \"Hello, my dog is cute\"\n >>> pred = onnx_classifier(text)\n ```\n\n Example using zero-shot-classification `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"optimum/distilbert-base-uncased-mnli\")\n >>> model = {model_class}.from_pretrained(\"optimum/distilbert-base-uncased-mnli\")\n >>> onnx_z0 = pipeline(\"zero-shot-classification\", model=model, tokenizer=tokenizer)\n\n >>> sequence_to_classify = \"Who are you voting for in 2020?\"\n >>> candidate_labels = [\"Europe\", \"public health\", \"politics\", \"elections\"]\n >>> pred = onnx_z0(sequence_to_classify, candidate_labels, multi_class=True)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a sequence classification/regression head on top (a linear layer on top of the\n pooled output) e.g. for GLUE tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForSequenceClassification(ORTModel):\n \"\"\"\n Sequence Classification model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"sequence-classification\"\n auto_model_class = AutoModelForSequenceClassification\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n self.model_inputs = {input_key.name: idx for idx, input_key in enumerate(self.model.get_inputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + SEQUENCE_CLASSIFICATION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForSequenceClassification\",\n checkpoint=\"optimum/distilbert-base-uncased-finetuned-sst-2-english\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return SequenceClassifierOutput(logits=logits)\n\n\nTOKEN_CLASSIFICATION_EXAMPLE = r\"\"\"\n Example of token classification:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"My name is Philipp and I live in Germany.\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n >>> list(logits.shape)\n ```\n\n Example using `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_ner = pipeline(\"token-classification\", model=model, tokenizer=tokenizer)\n\n >>> text = \"My name is Philipp and I live in Germany.\"\n >>> pred = onnx_ner(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g.\n for Named-Entity-Recognition (NER) tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForTokenClassification(ORTModel):\n \"\"\"\n Token Classification model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"token-classification\"\n auto_model_class = AutoModelForTokenClassification\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + TOKEN_CLASSIFICATION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForTokenClassification\",\n checkpoint=\"optimum/bert-base-NER\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return TokenClassifierOutput(logits=logits)\n\n\nMULTIPLE_CHOICE_EXAMPLE = r\"\"\"\n Example of mutliple choice:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\", from_transformers=True)\n\n >>> num_choices = 4\n >>> first_sentence = [\"Members of the procession walk down the street holding small horn brass instruments.\"] * num_choices\n >>> second_sentence = [\n \"A drum line passes by walking down the street playing their instruments.\",\n \"A drum line has heard approaching them.\",\n \"A drum line arrives and they're outside dancing and asleep.\",\n \"A drum line turns the lead singer watches the performance.\"\n]\n >>> inputs = tokenizer(first_sentence, second_sentence, truncation=True, padding=True)\n # Unflatten the inputs values expanding it to the shape [batch_size, num_choices, seq_length]\n >>> for k, v in inputs.items():\n >>> inputs[k] = [v[i: i + num_choices] for i in range(0, len(v), num_choices)]\n >>> inputs = dict(inputs.convert_to_tensors(tensor_type=\"pt\"))\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a\n softmax) e.g. for RocStories/SWAG tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForMultipleChoice(ORTModel):\n \"\"\"\n Multiple choice model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"multiple-choice\"\n auto_model_class = AutoModelForMultipleChoice\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + MULTIPLE_CHOICE_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForMultipleChoice\",\n checkpoint=\"ehdwns1516/bert-base-uncased_SWAG\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # Converts pytorch inputs into numpy inputs\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n\n # Run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n\n return MultipleChoiceModelOutput(logits=logits)\n\n\nTEXT_GENERATION_EXAMPLE = r\"\"\"\n Example of text generation:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"My name is Philipp and I live in Germany.\", return_tensors=\"pt\")\n\n >>> gen_tokens = model.generate(**inputs,do_sample=True,temperature=0.9, min_length=20,max_length=20)\n >>> tokenizer.batch_decode(gen_tokens)\n ```\n\n Example using `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_gen = pipeline(\"text-generation\", model=model, tokenizer=tokenizer)\n\n >>> text = \"My name is Philipp and I live in Germany.\"\n >>> gen = onnx_gen(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a causal language modeling head on top (linear layer with weights tied to the input\n embeddings).\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForCausalLM(ORTModel, GenerationMixin):\n \"\"\"\n Causal LM model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"causal-lm\"\n auto_model_class = AutoModelForCausalLM\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.main_input_name = \"input_ids\"\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n def prepare_inputs_for_generation(self, input_ids: torch.LongTensor, **kwargs) -> Dict[str, Any]:\n \"\"\"\n Implement in subclasses of [`PreTrainedModel`] for custom behavior to prepare inputs in the generate method.\n \"\"\"\n inputs = {\"input_ids\": input_ids}\n if kwargs.get(\"attention_mask\", None) is not None:\n inputs[\"attention_mask\"] = kwargs[\"attention_mask\"]\n return inputs\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + TEXT_GENERATION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForCausalLM\",\n checkpoint=\"optimum/gpt2\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return CausalLMOutputWithCrossAttentions(logits=logits)\n\n # Adapted from https://github.com/huggingface/transformers/blob/99289c08a1b16a805dd4ee46de029e9fd23cba3d/src/transformers/generation_utils.py#L490\n def _prepare_attention_mask_for_generation(\n self,\n inputs: torch.Tensor,\n pad_token_id: int,\n eos_token_id: int,\n ) -> torch.LongTensor:\n \"\"\"\n Overrides the base method of `GenerationMixin` to ensure input IDs and\n attention mask are on the same device.\n \"\"\"\n is_input_ids = len(inputs.shape) == 2 and inputs.dtype in [torch.int, torch.long]\n is_pad_token_in_inputs = (pad_token_id is not None) and (pad_token_id in inputs)\n is_pad_token_not_equal_to_eos_token_id = (eos_token_id is None) or (\n (eos_token_id is not None) and (pad_token_id != eos_token_id)\n )\n # Check if input is input_ids and padded -> only then is attention_mask defined\n if is_input_ids and is_pad_token_in_inputs and is_pad_token_not_equal_to_eos_token_id:\n return inputs.ne(pad_token_id).long()\n else:\n # Ensure attention mask is on the same device as the input IDs\n return torch.ones(inputs.shape[:2], dtype=torch.long, device=inputs.device)\n\n\nIMAGE_CLASSIFICATION_EXAMPLE = r\"\"\"\n Example of image classification:\n\n ```python\n >>> import requests\n >>> from PIL import Image\n >>> from optimum.onnxruntime import {model_class}\n >>> from transformers import {processor_class}\n\n >>> url = \"http://images.cocodataset.org/val2017/000000039769.jpg\"\n >>> image = Image.open(requests.get(url, stream=True).raw)\n\n >>> preprocessor = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = preprocessor(images=image, return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n ```\n\n Example using `transformers.pipeline`:\n\n ```python\n >>> import requests\n >>> from PIL import Image\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> preprocessor = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_image_classifier = pipeline(\"image-classification\", model=model, feature_extractor=preprocessor)\n\n >>> url = \"http://images.cocodataset.org/val2017/000000039769.jpg\"\n >>> pred = onnx_image_classifier(url)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model for image-classification tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForImageClassification(ORTModel):\n \"\"\"\n Image Classification model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"image-classification\"\n auto_model_class = AutoModelForImageClassification\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_IMAGE_INPUTS_DOCSTRING.format(\"batch_size, num_channels, height, width\")\n + IMAGE_CLASSIFICATION_EXAMPLE.format(\n processor_class=_FEATURE_EXTRACTOR_FOR_DOC,\n model_class=\"ORTModelForImageClassification\",\n checkpoint=\"optimum/vit-base-patch16-224\",\n )\n )\n def forward(\n self,\n pixel_values: torch.Tensor,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"pixel_values\": pixel_values.cpu().detach().numpy(),\n }\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n # converts output to namedtuple for pipelines post-processing\n return ImageClassifierOutput(\n logits=torch.from_numpy(outputs[self.model_outputs[\"logits\"]]),\n )\n\n\nCUSTOM_TASKS_EXAMPLE = r\"\"\"\n Example of custom tasks(e.g. a sentence transformers taking `pooler_output` as output):\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"I love burritos!\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> last_hidden_state = outputs.last_hidden_state\n >>> pooler_output = outputs.pooler_output\n ```\n\n Example using `transformers.pipelines`(only if the task is supported):\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_extractor = pipeline(\"feature-extraction\", model=model, tokenizer=tokenizer)\n\n >>> text = \"I love burritos!\"\n >>> pred = onnx_extractor(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model for any custom tasks. It can be used to leverage the inference acceleration with any custom exported ONNX model.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForCustomTasks(ORTModel):\n \"\"\"\n Onnx Model for any custom tasks.\n \"\"\"\n\n auto_model_class = AutoModel\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n\n @add_start_docstrings_to_model_forward(\n CUSTOM_TASKS_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForCustomTasks\",\n checkpoint=\"optimum/sbert-all-MiniLM-L6-with-pooler\",\n )\n )\n def forward(self, **kwargs):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = self._prepare_onnx_inputs(**kwargs)\n # run inference\n onnx_outputs = self.model.run(None, onnx_inputs)\n outputs = self._prepare_onnx_outputs(onnx_outputs)\n # converts outputs to namedtuple for pipelines post-processing if applicable\n return ModelOutput(outputs)\n\n def _prepare_onnx_inputs(self, **kwargs):\n model_inputs = {input_key.name: idx for idx, input_key in enumerate(self.model.get_inputs())}\n onnx_inputs = {}\n # converts pytorch inputs into numpy inputs for onnx\n for input in model_inputs.keys():\n onnx_inputs[input] = kwargs.pop(input).cpu().detach().numpy()\n\n return onnx_inputs\n\n def _prepare_onnx_outputs(self, onnx_outputs):\n model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n outputs = {}\n # converts onnxruntime outputs into tensor for standard outputs\n for output, idx in model_outputs.items():\n outputs[output] = torch.from_numpy(onnx_outputs[idx]).to(self.device)\n\n return outputs\n", "path": "optimum/onnxruntime/modeling_ort.py" } ]
[ { "content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport os\nimport shutil\nfrom pathlib import Path\nfrom typing import Any, Dict, List, Optional, Union\n\nimport torch\nfrom transformers import (\n AutoConfig,\n AutoModel,\n AutoModelForCausalLM,\n AutoModelForImageClassification,\n AutoModelForMultipleChoice,\n AutoModelForQuestionAnswering,\n AutoModelForSequenceClassification,\n AutoModelForTokenClassification,\n PretrainedConfig,\n)\nfrom transformers.file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, default_cache_path\nfrom transformers.generation_utils import GenerationMixin\nfrom transformers.modeling_outputs import (\n BaseModelOutput,\n CausalLMOutputWithCrossAttentions,\n ImageClassifierOutput,\n ModelOutput,\n MultipleChoiceModelOutput,\n QuestionAnsweringModelOutput,\n SequenceClassifierOutput,\n TokenClassifierOutput,\n)\nfrom transformers.onnx import FeaturesManager, export\nfrom transformers.onnx.utils import get_preprocessor\n\nimport onnxruntime as ort\nfrom huggingface_hub import HfApi, hf_hub_download\n\nfrom ..modeling_base import FROM_PRETRAINED_START_DOCSTRING, OptimizedModel\nfrom .utils import ONNX_WEIGHTS_NAME, get_device_for_provider, get_provider_for_device\n\n\nlogger = logging.getLogger(__name__)\n\n\n_TOKENIZER_FOR_DOC = \"AutoTokenizer\"\n_FEATURE_EXTRACTOR_FOR_DOC = \"AutoFeatureExtractor\"\n\nONNX_MODEL_START_DOCSTRING = r\"\"\"\n This model inherits from [~`onnxruntime.modeling_ort.ORTModel`]. Check the superclass documentation for the generic methods the\n library implements for all its model (such as downloading or saving)\n Parameters:\n config (`transformers.PretrainedConfig`): [PretrainedConfig](https://huggingface.co/docs/transformers/main_classes/configuration#transformers.PretrainedConfig) is the Model configuration class with all the parameters of the model.\n Initializing with a config file does not load the weights associated with the model, only the\n configuration. Check out the [`~onnxruntime.modeling_ort.ORTModel.from_pretrained`] method to load the model weights.\n model (`onnxruntime.InferenceSession`): [onnxruntime.InferenceSession](https://onnxruntime.ai/docs/api/python/api_summary.html#inferencesession) is the main class used to run a model. Check out the [`~onnxruntime.modeling_ort.ORTModel.load_model`] method for more information.\n\"\"\"\n\nONNX_TEXT_INPUTS_DOCSTRING = r\"\"\"\n Args:\n input_ids (`torch.Tensor` of shape `({0})`):\n Indices of input sequence tokens in the vocabulary.\n Indices can be obtained using [`AutoTokenizer`](https://huggingface.co/docs/transformers/autoclass_tutorial#autotokenizer).\n See [`PreTrainedTokenizer.encode`](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizerBase.encode) and\n [`PreTrainedTokenizer.__call__`](https://huggingface.co/docs/transformers/main_classes/tokenizer#transformers.PreTrainedTokenizerBase.__call__) for details.\n [What are input IDs?](https://huggingface.co/docs/transformers/glossary#input-ids)\n attention_mask (`torch.Tensor` of shape `({0})`, *optional*):\n Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:\n - 1 for tokens that are **not masked**,\n - 0 for tokens that are **masked**.\n [What are attention masks?](https://huggingface.co/docs/transformers/glossary#attention-mask)\n token_type_ids (`torch.Tensor` of shape `({0})`, *optional*):\n Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0, 1]`:\n - 1 for tokens that are **sentence A**,\n - 0 for tokens that are **sentence B**.\n [What are token type IDs?](https://huggingface.co/docs/transformers/glossary#token-type-ids)\n\"\"\"\n\nONNX_IMAGE_INPUTS_DOCSTRING = r\"\"\"\n Args:\n pixel_values (`torch.Tensor` of shape `({0})`):\n Pixel values corresponding to the images in the current batch.\n Pixel values can be obtained from encoded images using [`AutoFeatureExtractor`](https://huggingface.co/docs/transformers/autoclass_tutorial#autofeatureextractor).\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Base ORTModel class for implementing models using ONNX Runtime. The ORTModel implements generic methods for interacting\n with the Hugging Face Hub as well as exporting vanilla transformers models to ONNX using `transformers.onnx` toolchain.\n The ORTModel implements additionally generic methods for optimizing and quantizing Onnx models.\n \"\"\",\n)\nclass ORTModel(OptimizedModel):\n base_model_prefix = \"onnx_model\"\n auto_model_class = AutoModel\n\n def __init__(self, model: ort.InferenceSession = None, config=None, **kwargs):\n self.model = model\n self.config = config\n self.model_save_dir = kwargs.get(\"model_save_dir\", None)\n self.latest_model_name = kwargs.get(\"latest_model_name\", \"model.onnx\")\n self.providers = model.get_providers()\n self._device = get_device_for_provider(self.providers[0])\n\n if self._device == None:\n logger.warning(\n f\"ORTModel outputs will be sent to CPU as the device could not be inferred from the execution provider {self.providers[0]}.\"\n f\" Use `ort_model.to()` to send the outputs to the wanted device.\"\n )\n\n # registers the ORTModelForXXX classes into the transformers AutoModel classes\n # to avoid warnings when create a pipeline https://github.com/huggingface/transformers/blob/cad61b68396a1a387287a8e2e2fef78a25b79383/src/transformers/pipelines/base.py#L863\n AutoConfig.register(self.base_model_prefix, AutoConfig)\n self.auto_model_class.register(AutoConfig, self.__class__)\n\n @property\n def device(self) -> torch.device:\n \"\"\"\n `torch.device`: The device on which the module is (assuming that all the module parameters are on the same\n device).\n \"\"\"\n return self._device\n\n @device.setter\n def device(self, value: torch.device):\n self._device = value\n\n def to(self, device: torch.device):\n \"\"\"\n Changes the ONNX Runtime provider according to the device.\n \"\"\"\n # convert string device input (ie. \"cuda\") to torch.device\n if type(device) == str:\n device = torch.device(device)\n\n self.device = device\n provider = get_provider_for_device(self.device)\n self.model.set_providers([provider])\n self.providers = self.model.get_providers()\n return self\n\n def forward(self, *args, **kwargs):\n raise NotImplementedError\n\n @staticmethod\n def load_model(\n path: Union[str, Path],\n provider: Optional[str] = \"CPUExecutionProvider\",\n session_options: Optional[ort.SessionOptions] = None,\n provider_options: Optional[Dict] = None,\n **kwargs\n ):\n \"\"\"\n Loads an ONNX Inference session with a given provider. Default provider is `CPUExecutionProvider` to match the default behaviour in PyTorch/TensorFlow/JAX.\n\n Arguments:\n path (`str` or `Path`):\n Directory from which to load the model.\n provider (`str`, *optional*):\n ONNX Runtime provider to use for loading the model. See https://onnxruntime.ai/docs/execution-providers/\n for possible providers. Defaults to `CPUExecutionProvider`.\n session_options (`onnxruntime.SessionOptions`, *optional*):\n ONNX Runtime session options to use for loading the model. Defaults to `None`.\n provider_options (`Dict`, **optional**):\n Provider option dictionaries corresponding to the provider used. See available options\n for each provider: https://onnxruntime.ai/docs/api/c/group___global.html . Defaults to `None`.\n \"\"\"\n available_providers = ort.get_available_providers()\n if provider not in available_providers:\n raise ValueError(\n f\"Asked to use {provider} as an ONNX Runtime execution provider, but the available execution providers are {available_providers}.\"\n )\n\n # `providers` list must of be of the same length as `provider_options` list\n return ort.InferenceSession(\n path,\n providers=[provider],\n sess_options=session_options,\n provider_options=None if provider_options is None else [provider_options],\n )\n\n def _save_pretrained(self, save_directory: Union[str, Path], file_name: Optional[str] = None, **kwargs):\n \"\"\"\n Saves a model and its configuration file to a directory, so that it can be re-loaded using the\n [`~optimum.onnxruntime.modeling_ort.ORTModel.from_pretrained`] class method. It will always save the latest_model_name.\n Arguments:\n save_directory (`str` or `Path`):\n Directory where to save the model file.\n file_name(`str`, *optional*):\n Overwrites the default model file name from `\"model.onnx\"` to `file_name`. This allows you to save the model with\n a different name.\n \"\"\"\n model_file_name = file_name if file_name is not None else ONNX_WEIGHTS_NAME\n\n src_path = self.model_save_dir.joinpath(self.latest_model_name)\n dst_path = Path(save_directory).joinpath(model_file_name)\n shutil.copyfile(src_path, dst_path)\n\n @classmethod\n @add_start_docstrings(FROM_PRETRAINED_START_DOCSTRING)\n def from_pretrained(\n cls,\n model_id: Union[str, Path],\n from_transformers: bool = False,\n force_download: bool = False,\n use_auth_token: Optional[str] = None,\n cache_dir: Optional[str] = None,\n provider: Optional[str] = \"CPUExecutionProvider\",\n session_options: Optional[ort.SessionOptions] = None,\n provider_options: Optional[Dict] = None,\n *args,\n **kwargs\n ):\n \"\"\"\n provider (`str`, *optional*):\n ONNX Runtime providers to use for loading the model. See https://onnxruntime.ai/docs/execution-providers/ for\n possible providers. Defaults to `CPUExecutionProvider`.\n session_options (`onnxruntime.SessionOptions`, *optional*),:\n ONNX Runtime session options to use for loading the model. Defaults to `None`.\n\n Returns:\n `ORTModel`: The loaded ORTModel model.\n \"\"\"\n return super().from_pretrained(\n model_id,\n from_transformers,\n force_download,\n use_auth_token,\n cache_dir,\n provider=provider,\n session_options=session_options,\n provider_options=provider_options,\n *args,\n **kwargs,\n )\n\n @classmethod\n def _from_pretrained(\n cls,\n model_id: Union[str, Path],\n use_auth_token: Optional[Union[bool, str, None]] = None,\n revision: Optional[Union[str, None]] = None,\n force_download: bool = False,\n cache_dir: Optional[str] = None,\n file_name: Optional[str] = None,\n **kwargs,\n ):\n \"\"\"\n Loads a model and its configuration file from a directory or the HF Hub.\n Implements: https://github.com/huggingface/huggingface_hub/blob/e67de48368bc1843e40afc1cc9d236402b9609ee/src/huggingface_hub/hub_mixin.py#L73\n Arguments:\n model_id (`str` or `Path`):\n Directory from which to load\n use_auth_token (`str` or `bool`):\n Is needed to load models from a private repository\n revision (`str`):\n Revision is the specific model version to use. It can be a branch name, a tag name, or a commit id\n cache_dir (`Union[str, Path]`, *optional*):\n Path to a directory in which a downloaded pretrained model configuration should be cached if the\n standard cache should not be used.\n force_download (`bool`, *optional*, defaults to `False`):\n Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n cached versions if they exist.\n file_name(`str`):\n Overwrites the default model file name from `\"model.onnx\"` to `file_name`. This allows you to load different model files from the same\n repository or directory.\n local_files_only(`bool`, *optional*, defaults to `False`):\n Whether or not to only look at local files (i.e., do not try to download the model).\n kwargs (`Dict`, *optional*):\n kwargs will be passed to the model during initialization\n \"\"\"\n local_files_only = kwargs.pop(\"local_files_only\", False)\n config_dict = kwargs.pop(\"config\", {})\n model_file_name = file_name if file_name is not None else ONNX_WEIGHTS_NAME\n # load model from local directory\n if os.path.isdir(model_id):\n config = PretrainedConfig.from_dict(config_dict)\n model = ORTModel.load_model(os.path.join(model_id, model_file_name), **kwargs)\n kwargs[\"model_save_dir\"] = Path(model_id)\n kwargs[\"latest_model_name\"] = model_file_name\n # load model from hub\n else:\n # download model\n model_cache_path = hf_hub_download(\n repo_id=model_id,\n filename=model_file_name,\n use_auth_token=use_auth_token,\n revision=revision,\n cache_dir=cache_dir,\n force_download=force_download,\n local_files_only=local_files_only,\n )\n kwargs[\"model_save_dir\"] = Path(model_cache_path).parent\n kwargs[\"latest_model_name\"] = Path(model_cache_path).name\n model = ORTModel.load_model(model_cache_path, **kwargs)\n config = PretrainedConfig.from_dict(config_dict)\n\n return cls(model=model, config=config, **kwargs)\n\n @classmethod\n def _from_transformers(\n cls,\n model_id: str,\n save_dir: Union[str, Path] = default_cache_path,\n use_auth_token: Optional[Union[bool, str, None]] = None,\n revision: Optional[Union[str, None]] = None,\n force_download: bool = False,\n cache_dir: Optional[str] = None,\n **kwargs,\n ):\n \"\"\"\n Converts a vanilla Transformers model into an optimized model using `transformers.onnx.export_onnx`.\n Arguments:\n model_id (`str` or `Path`):\n Directory from which to load\n save_dir (`str` or `Path`):\n Directory where the onnx model should be saved, default to `transformers.file_utils.default_cache_path`, which is the cache dir for\n transformers.\n use_auth_token (`str` or `bool`):\n Is needed to load models from a private repository\n revision (`str`):\n Revision is the specific model version to use. It can be a branch name, a tag name, or a commit id\n cache_dir (`Union[str, Path]`, *optional*):\n Path to a directory in which a downloaded pretrained model configuration should be cached if the\n standard cache should not be used.\n force_download (`bool`, *optional*, defaults to `False`):\n Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n cached versions if they exist.\n kwargs (`Dict`, *optional*):\n kwargs will be passed to the model during initialization\n \"\"\"\n # create local save dir in cache dir\n save_dir = Path(save_dir).joinpath(model_id)\n save_dir.mkdir(parents=True, exist_ok=True)\n kwargs[\"model_save_dir\"] = save_dir\n\n # reads pipeline task from ORTModelForXXX class if available else tries to extract from hub\n if cls.export_feature is not None:\n task = cls.export_feature\n else:\n task = HfApi().model_info(model_id, revision=revision).pipeline_tag\n if task in [\"sentiment-analysis\", \"text-classification\", \"zero-shot-classification\"]:\n task = \"sequence-classification\"\n elif task in [\"feature-extraction\", \"fill-mask\"]:\n task = \"default\"\n # 2. convert to temp dir\n # FIXME: transformers.onnx conversion doesn't support private models\n preprocessor = get_preprocessor(model_id)\n model = FeaturesManager.get_model_from_feature(task, model_id)\n _, model_onnx_config = FeaturesManager.check_supported_model_or_raise(model, feature=task)\n onnx_config = model_onnx_config(model.config)\n\n # export model\n export(\n preprocessor=preprocessor,\n model=model,\n config=onnx_config,\n opset=onnx_config.default_onnx_opset,\n output=save_dir.joinpath(ONNX_WEIGHTS_NAME),\n )\n kwargs[\"config\"] = model.config.__dict__\n # 3. load normal model\n return cls._from_pretrained(save_dir.as_posix(), **kwargs)\n\n\nFEATURE_EXTRACTION_EXAMPLE = r\"\"\"\n Example of feature extraction:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"My name is Philipp and I live in Germany.\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n >>> list(logits.shape)\n ```\n\n Example using `transformers.pipeline`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_extractor = pipeline(\"feature-extraction\", model=model, tokenizer=tokenizer)\n\n >>> text = \"My name is Philipp and I live in Germany.\"\n >>> pred = onnx_extractor(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a MaskedLMOutput for feature-extraction tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForFeatureExtraction(ORTModel):\n \"\"\"\n Feature Extraction model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"default\"\n auto_model_class = AutoModel\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + FEATURE_EXTRACTION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForFeatureExtraction\",\n checkpoint=\"optimum/all-MiniLM-L6-v2\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n last_hidden_state = torch.from_numpy(outputs[self.model_outputs[\"last_hidden_state\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return BaseModelOutput(last_hidden_state=last_hidden_state)\n\n\nQUESTION_ANSWERING_EXAMPLE = r\"\"\"\n Example of question answering:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> question, text = \"Who was Jim Henson?\", \"Jim Henson was a nice puppet\"\n >>> inputs = tokenizer(question, text, return_tensors=\"pt\")\n >>> start_positions = torch.tensor([1])\n >>> end_positions = torch.tensor([3])\n\n >>> outputs = model(**inputs, start_positions=start_positions, end_positions=end_positions)\n >>> start_scores = outputs.start_logits\n >>> end_scores = outputs.end_logits\n ```\n Example using `transformers.pipeline`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_qa = pipeline(\"question-answering\", model=model, tokenizer=tokenizer)\n\n >>> question, text = \"Who was Jim Henson?\", \"Jim Henson was a nice puppet\"\n >>> pred = onnx_qa(question, text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a QuestionAnsweringModelOutput for extractive question-answering tasks like SQuAD.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForQuestionAnswering(ORTModel):\n \"\"\"\n Question Answering model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"question-answering\"\n auto_model_class = AutoModelForQuestionAnswering\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + QUESTION_ANSWERING_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForQuestionAnswering\",\n checkpoint=\"optimum/roberta-base-squad2\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n start_logits = torch.from_numpy(outputs[self.model_outputs[\"start_logits\"]]).to(self.device)\n end_logits = torch.from_numpy(outputs[self.model_outputs[\"end_logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return QuestionAnsweringModelOutput(start_logits=start_logits, end_logits=end_logits)\n\n\nSEQUENCE_CLASSIFICATION_EXAMPLE = r\"\"\"\n Example of single-label classification:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"Hello, my dog is cute\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n >>> list(logits.shape)\n ```\n\n Example using `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_classifier = pipeline(\"text-classification\", model=model, tokenizer=tokenizer)\n\n >>> text = \"Hello, my dog is cute\"\n >>> pred = onnx_classifier(text)\n ```\n\n Example using zero-shot-classification `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"optimum/distilbert-base-uncased-mnli\")\n >>> model = {model_class}.from_pretrained(\"optimum/distilbert-base-uncased-mnli\")\n >>> onnx_z0 = pipeline(\"zero-shot-classification\", model=model, tokenizer=tokenizer)\n\n >>> sequence_to_classify = \"Who are you voting for in 2020?\"\n >>> candidate_labels = [\"Europe\", \"public health\", \"politics\", \"elections\"]\n >>> pred = onnx_z0(sequence_to_classify, candidate_labels, multi_class=True)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a sequence classification/regression head on top (a linear layer on top of the\n pooled output) e.g. for GLUE tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForSequenceClassification(ORTModel):\n \"\"\"\n Sequence Classification model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"sequence-classification\"\n auto_model_class = AutoModelForSequenceClassification\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n self.model_inputs = {input_key.name: idx for idx, input_key in enumerate(self.model.get_inputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + SEQUENCE_CLASSIFICATION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForSequenceClassification\",\n checkpoint=\"optimum/distilbert-base-uncased-finetuned-sst-2-english\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return SequenceClassifierOutput(logits=logits)\n\n\nTOKEN_CLASSIFICATION_EXAMPLE = r\"\"\"\n Example of token classification:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"My name is Philipp and I live in Germany.\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n >>> list(logits.shape)\n ```\n\n Example using `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_ner = pipeline(\"token-classification\", model=model, tokenizer=tokenizer)\n\n >>> text = \"My name is Philipp and I live in Germany.\"\n >>> pred = onnx_ner(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g.\n for Named-Entity-Recognition (NER) tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForTokenClassification(ORTModel):\n \"\"\"\n Token Classification model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"token-classification\"\n auto_model_class = AutoModelForTokenClassification\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + TOKEN_CLASSIFICATION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForTokenClassification\",\n checkpoint=\"optimum/bert-base-NER\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return TokenClassifierOutput(logits=logits)\n\n\nMULTIPLE_CHOICE_EXAMPLE = r\"\"\"\n Example of mutliple choice:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\", from_transformers=True)\n\n >>> num_choices = 4\n >>> first_sentence = [\"Members of the procession walk down the street holding small horn brass instruments.\"] * num_choices\n >>> second_sentence = [\n \"A drum line passes by walking down the street playing their instruments.\",\n \"A drum line has heard approaching them.\",\n \"A drum line arrives and they're outside dancing and asleep.\",\n \"A drum line turns the lead singer watches the performance.\"\n]\n >>> inputs = tokenizer(first_sentence, second_sentence, truncation=True, padding=True)\n # Unflatten the inputs values expanding it to the shape [batch_size, num_choices, seq_length]\n >>> for k, v in inputs.items():\n >>> inputs[k] = [v[i: i + num_choices] for i in range(0, len(v), num_choices)]\n >>> inputs = dict(inputs.convert_to_tensors(tensor_type=\"pt\"))\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a multiple choice classification head on top (a linear layer on top of the pooled output and a\n softmax) e.g. for RocStories/SWAG tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForMultipleChoice(ORTModel):\n \"\"\"\n Multiple choice model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"multiple-choice\"\n auto_model_class = AutoModelForMultipleChoice\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + MULTIPLE_CHOICE_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForMultipleChoice\",\n checkpoint=\"ehdwns1516/bert-base-uncased_SWAG\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n token_type_ids: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # Converts pytorch inputs into numpy inputs\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n\n if token_type_ids is not None:\n onnx_inputs[\"token_type_ids\"] = token_type_ids.cpu().detach().numpy()\n\n # Run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n\n return MultipleChoiceModelOutput(logits=logits)\n\n\nTEXT_GENERATION_EXAMPLE = r\"\"\"\n Example of text generation:\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n >>> import torch\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"My name is Philipp and I live in Germany.\", return_tensors=\"pt\")\n\n >>> gen_tokens = model.generate(**inputs,do_sample=True,temperature=0.9, min_length=20,max_length=20)\n >>> tokenizer.batch_decode(gen_tokens)\n ```\n\n Example using `transformers.pipelines`:\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_gen = pipeline(\"text-generation\", model=model, tokenizer=tokenizer)\n\n >>> text = \"My name is Philipp and I live in Germany.\"\n >>> gen = onnx_gen(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model with a causal language modeling head on top (linear layer with weights tied to the input\n embeddings).\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForCausalLM(ORTModel, GenerationMixin):\n \"\"\"\n Causal LM model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"causal-lm\"\n auto_model_class = AutoModelForCausalLM\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.main_input_name = \"input_ids\"\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n def prepare_inputs_for_generation(self, input_ids: torch.LongTensor, **kwargs) -> Dict[str, Any]:\n \"\"\"\n Implement in subclasses of [`PreTrainedModel`] for custom behavior to prepare inputs in the generate method.\n \"\"\"\n inputs = {\"input_ids\": input_ids}\n if kwargs.get(\"attention_mask\", None) is not None:\n inputs[\"attention_mask\"] = kwargs[\"attention_mask\"]\n return inputs\n\n @add_start_docstrings_to_model_forward(\n ONNX_TEXT_INPUTS_DOCSTRING.format(\"batch_size, sequence_length\")\n + TEXT_GENERATION_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForCausalLM\",\n checkpoint=\"optimum/gpt2\",\n )\n )\n def forward(\n self,\n input_ids: Optional[torch.Tensor] = None,\n attention_mask: Optional[torch.Tensor] = None,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"input_ids\": input_ids.cpu().detach().numpy(),\n \"attention_mask\": attention_mask.cpu().detach().numpy(),\n }\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n logits = torch.from_numpy(outputs[self.model_outputs[\"logits\"]]).to(self.device)\n # converts output to namedtuple for pipelines post-processing\n return CausalLMOutputWithCrossAttentions(logits=logits)\n\n # Adapted from https://github.com/huggingface/transformers/blob/99289c08a1b16a805dd4ee46de029e9fd23cba3d/src/transformers/generation_utils.py#L490\n def _prepare_attention_mask_for_generation(\n self,\n inputs: torch.Tensor,\n pad_token_id: int,\n eos_token_id: int,\n ) -> torch.LongTensor:\n \"\"\"\n Overrides the base method of `GenerationMixin` to ensure input IDs and\n attention mask are on the same device.\n \"\"\"\n is_input_ids = len(inputs.shape) == 2 and inputs.dtype in [torch.int, torch.long]\n is_pad_token_in_inputs = (pad_token_id is not None) and (pad_token_id in inputs)\n is_pad_token_not_equal_to_eos_token_id = (eos_token_id is None) or (\n (eos_token_id is not None) and (pad_token_id != eos_token_id)\n )\n # Check if input is input_ids and padded -> only then is attention_mask defined\n if is_input_ids and is_pad_token_in_inputs and is_pad_token_not_equal_to_eos_token_id:\n return inputs.ne(pad_token_id).long()\n else:\n # Ensure attention mask is on the same device as the input IDs\n return torch.ones(inputs.shape[:2], dtype=torch.long, device=inputs.device)\n\n\nIMAGE_CLASSIFICATION_EXAMPLE = r\"\"\"\n Example of image classification:\n\n ```python\n >>> import requests\n >>> from PIL import Image\n >>> from optimum.onnxruntime import {model_class}\n >>> from transformers import {processor_class}\n\n >>> url = \"http://images.cocodataset.org/val2017/000000039769.jpg\"\n >>> image = Image.open(requests.get(url, stream=True).raw)\n\n >>> preprocessor = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = preprocessor(images=image, return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> logits = outputs.logits\n ```\n\n Example using `transformers.pipeline`:\n\n ```python\n >>> import requests\n >>> from PIL import Image\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> preprocessor = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_image_classifier = pipeline(\"image-classification\", model=model, feature_extractor=preprocessor)\n\n >>> url = \"http://images.cocodataset.org/val2017/000000039769.jpg\"\n >>> pred = onnx_image_classifier(url)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model for image-classification tasks.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForImageClassification(ORTModel):\n \"\"\"\n Image Classification model for ONNX.\n \"\"\"\n\n # used in from_transformers to export model to onnx\n export_feature = \"image-classification\"\n auto_model_class = AutoModelForImageClassification\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n # create {name:idx} dict for model outputs\n self.model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n\n @add_start_docstrings_to_model_forward(\n ONNX_IMAGE_INPUTS_DOCSTRING.format(\"batch_size, num_channels, height, width\")\n + IMAGE_CLASSIFICATION_EXAMPLE.format(\n processor_class=_FEATURE_EXTRACTOR_FOR_DOC,\n model_class=\"ORTModelForImageClassification\",\n checkpoint=\"optimum/vit-base-patch16-224\",\n )\n )\n def forward(\n self,\n pixel_values: torch.Tensor,\n **kwargs,\n ):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = {\n \"pixel_values\": pixel_values.cpu().detach().numpy(),\n }\n # run inference\n outputs = self.model.run(None, onnx_inputs)\n # converts output to namedtuple for pipelines post-processing\n return ImageClassifierOutput(\n logits=torch.from_numpy(outputs[self.model_outputs[\"logits\"]]),\n )\n\n\nCUSTOM_TASKS_EXAMPLE = r\"\"\"\n Example of custom tasks(e.g. a sentence transformers taking `pooler_output` as output):\n\n ```python\n >>> from transformers import {processor_class}\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n\n >>> inputs = tokenizer(\"I love burritos!\", return_tensors=\"pt\")\n\n >>> outputs = model(**inputs)\n >>> last_hidden_state = outputs.last_hidden_state\n >>> pooler_output = outputs.pooler_output\n ```\n\n Example using `transformers.pipelines`(only if the task is supported):\n\n ```python\n >>> from transformers import {processor_class}, pipeline\n >>> from optimum.onnxruntime import {model_class}\n\n >>> tokenizer = {processor_class}.from_pretrained(\"{checkpoint}\")\n >>> model = {model_class}.from_pretrained(\"{checkpoint}\")\n >>> onnx_extractor = pipeline(\"feature-extraction\", model=model, tokenizer=tokenizer)\n\n >>> text = \"I love burritos!\"\n >>> pred = onnx_extractor(text)\n ```\n\"\"\"\n\n\n@add_start_docstrings(\n \"\"\"\n Onnx Model for any custom tasks. It can be used to leverage the inference acceleration with any custom exported ONNX model.\n \"\"\",\n ONNX_MODEL_START_DOCSTRING,\n)\nclass ORTModelForCustomTasks(ORTModel):\n \"\"\"\n Onnx Model for any custom tasks.\n \"\"\"\n\n export_feature = \"default\"\n auto_model_class = AutoModel\n\n def __init__(self, model=None, config=None, **kwargs):\n super().__init__(model, config, **kwargs)\n\n @add_start_docstrings_to_model_forward(\n CUSTOM_TASKS_EXAMPLE.format(\n processor_class=_TOKENIZER_FOR_DOC,\n model_class=\"ORTModelForCustomTasks\",\n checkpoint=\"optimum/sbert-all-MiniLM-L6-with-pooler\",\n )\n )\n def forward(self, **kwargs):\n # converts pytorch inputs into numpy inputs for onnx\n onnx_inputs = self._prepare_onnx_inputs(**kwargs)\n # run inference\n onnx_outputs = self.model.run(None, onnx_inputs)\n outputs = self._prepare_onnx_outputs(onnx_outputs)\n # converts outputs to namedtuple for pipelines post-processing if applicable\n return ModelOutput(outputs)\n\n def _prepare_onnx_inputs(self, **kwargs):\n model_inputs = {input_key.name: idx for idx, input_key in enumerate(self.model.get_inputs())}\n onnx_inputs = {}\n # converts pytorch inputs into numpy inputs for onnx\n for input in model_inputs.keys():\n onnx_inputs[input] = kwargs.pop(input).cpu().detach().numpy()\n\n return onnx_inputs\n\n def _prepare_onnx_outputs(self, onnx_outputs):\n model_outputs = {output_key.name: idx for idx, output_key in enumerate(self.model.get_outputs())}\n outputs = {}\n # converts onnxruntime outputs into tensor for standard outputs\n for output, idx in model_outputs.items():\n outputs[output] = torch.from_numpy(onnx_outputs[idx]).to(self.device)\n\n return outputs\n", "path": "optimum/onnxruntime/modeling_ort.py" } ]
diff --git a/optimum/onnxruntime/modeling_ort.py b/optimum/onnxruntime/modeling_ort.py index d46a891ab6..2ba6b103a8 100644 --- a/optimum/onnxruntime/modeling_ort.py +++ b/optimum/onnxruntime/modeling_ort.py @@ -1051,6 +1051,7 @@ class ORTModelForCustomTasks(ORTModel): Onnx Model for any custom tasks. """ + export_feature = "default" auto_model_class = AutoModel def __init__(self, model=None, config=None, **kwargs):
sopel-irc__sopel-1774
db: get_uri() assumes SQLite @RustyBower This slipped by me completely in the whole DB-overhaul process. Obviously this function is useless for instances not using SQLite, in its current state. Not that there will be any of those among upgrade instances (at least, not immediately), but… https://github.com/sopel-irc/sopel/blob/b33173ca5f52905f9ca45c0890e9340779c0c06d/sopel/db.py#L189-L191 Honestly, is this function even still useful? Should we just deprecate it and move on?
[ { "content": "# coding=utf-8\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport errno\nimport json\nimport os.path\nimport sys\n\nfrom sopel.tools import Identifier\n\nfrom sqlalchemy import create_engine, Column, ForeignKey, Integer, String\nfrom sqlalchemy.engine.url import URL\nfrom sqlalchemy.exc import OperationalError, SQLAlchemyError\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import scoped_session, sessionmaker\n\nif sys.version_info.major >= 3:\n unicode = str\n basestring = str\n\n\ndef _deserialize(value):\n if value is None:\n return None\n # sqlite likes to return ints for strings that look like ints, even though\n # the column type is string. That's how you do dynamic typing wrong.\n value = unicode(value)\n # Just in case someone's mucking with the DB in a way we can't account for,\n # ignore json parsing errors\n try:\n value = json.loads(value)\n except ValueError:\n pass\n return value\n\n\nBASE = declarative_base()\nMYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',\n 'mysql_charset': 'utf8mb4',\n 'mysql_collate': 'utf8mb4_unicode_ci'}\n\n\nclass NickIDs(BASE):\n \"\"\"\n NickIDs SQLAlchemy Class\n \"\"\"\n __tablename__ = 'nick_ids'\n nick_id = Column(Integer, primary_key=True)\n\n\nclass Nicknames(BASE):\n \"\"\"\n Nicknames SQLAlchemy Class\n \"\"\"\n __tablename__ = 'nicknames'\n __table_args__ = MYSQL_TABLE_ARGS\n nick_id = Column(Integer, ForeignKey('nick_ids.nick_id'), primary_key=True)\n slug = Column(String(255), primary_key=True)\n canonical = Column(String(255))\n\n\nclass NickValues(BASE):\n \"\"\"\n NickValues SQLAlchemy Class\n \"\"\"\n __tablename__ = 'nick_values'\n __table_args__ = MYSQL_TABLE_ARGS\n nick_id = Column(Integer, ForeignKey('nick_ids.nick_id'), primary_key=True)\n key = Column(String(255), primary_key=True)\n value = Column(String(255))\n\n\nclass ChannelValues(BASE):\n \"\"\"\n ChannelValues SQLAlchemy Class\n \"\"\"\n __tablename__ = 'channel_values'\n __table_args__ = MYSQL_TABLE_ARGS\n channel = Column(String(255), primary_key=True)\n key = Column(String(255), primary_key=True)\n value = Column(String(255))\n\n\nclass PluginValues(BASE):\n \"\"\"\n PluginValues SQLAlchemy Class\n \"\"\"\n __tablename__ = 'plugin_values'\n __table_args__ = MYSQL_TABLE_ARGS\n plugin = Column(String(255), primary_key=True)\n key = Column(String(255), primary_key=True)\n value = Column(String(255))\n\n\nclass SopelDB(object):\n \"\"\"*Availability: 5.0+*\n\n This defines an interface for basic, common operations on a sqlite\n database. It simplifies those common operations, and allows direct access\n to the database, wherever the user has configured it to be.\n\n When configured with a relative filename, it is assumed to be in the directory\n set (or defaulted to) in the core setting ``homedir``.\n \"\"\"\n\n def __init__(self, config):\n # MySQL - mysql://username:password@localhost/db\n # SQLite - sqlite:////home/sopel/.sopel/default.db\n db_type = config.core.db_type\n\n # Handle SQLite explicitly as a default\n if db_type == 'sqlite':\n path = config.core.db_filename\n if path is None:\n path = os.path.join(config.core.homedir, config.basename + '.db')\n path = os.path.expanduser(path)\n if not os.path.isabs(path):\n path = os.path.normpath(os.path.join(config.core.homedir, path))\n if not os.path.isdir(os.path.dirname(path)):\n raise OSError(\n errno.ENOENT,\n 'Cannot create database file. '\n 'No such directory: \"{}\". Check that configuration setting '\n 'core.db_filename is valid'.format(os.path.dirname(path)),\n path\n )\n self.filename = path\n self.url = 'sqlite:///%s' % path\n # Otherwise, handle all other database engines\n else:\n query = {}\n if db_type == 'mysql':\n drivername = config.core.db_driver or 'mysql'\n query = {'charset': 'utf8mb4'}\n elif db_type == 'postgres':\n drivername = config.core.db_driver or 'postgresql'\n elif db_type == 'oracle':\n drivername = config.core.db_driver or 'oracle'\n elif db_type == 'mssql':\n drivername = config.core.db_driver or 'mssql+pymssql'\n elif db_type == 'firebird':\n drivername = config.core.db_driver or 'firebird+fdb'\n elif db_type == 'sybase':\n drivername = config.core.db_driver or 'sybase+pysybase'\n else:\n raise Exception('Unknown db_type')\n\n db_user = config.core.db_user\n db_pass = config.core.db_pass\n db_host = config.core.db_host\n db_port = config.core.db_port # Optional\n db_name = config.core.db_name # Optional, depending on DB\n\n # Ensure we have all our variables defined\n if db_user is None or db_pass is None or db_host is None:\n raise Exception('Please make sure the following core '\n 'configuration values are defined: '\n 'db_user, db_pass, db_host')\n self.url = URL(drivername=drivername, username=db_user,\n password=db_pass, host=db_host, port=db_port,\n database=db_name, query=query)\n\n self.engine = create_engine(self.url, pool_recycle=3600)\n\n # Catch any errors connecting to database\n try:\n self.engine.connect()\n except OperationalError:\n print(\"OperationalError: Unable to connect to database.\")\n raise\n\n # Create our tables\n BASE.metadata.create_all(self.engine)\n\n self.ssession = scoped_session(sessionmaker(bind=self.engine))\n\n def connect(self):\n \"\"\"Return a raw database connection object.\"\"\"\n return self.engine.connect()\n\n def execute(self, *args, **kwargs):\n \"\"\"Execute an arbitrary SQL query against the database.\n\n Returns a cursor object, on which things like `.fetchall()` can be\n called per PEP 249.\"\"\"\n with self.connect() as conn:\n return conn.execute(*args, **kwargs)\n\n def get_uri(self):\n \"\"\"Returns a URL for the database, usable to connect with SQLAlchemy.\"\"\"\n return 'sqlite:///{}'.format(self.filename)\n\n # NICK FUNCTIONS\n\n def get_nick_id(self, nick, create=True):\n \"\"\"Return the internal identifier for a given nick.\n\n This identifier is unique to a user, and shared across all of that\n user's aliases. If create is True, a new ID will be created if one does\n not already exist\"\"\"\n session = self.ssession()\n slug = nick.lower()\n try:\n nickname = session.query(Nicknames) \\\n .filter(Nicknames.slug == slug) \\\n .one_or_none()\n\n if nickname is None:\n if not create:\n raise ValueError('No ID exists for the given nick')\n # Generate a new ID\n nick_id = NickIDs()\n session.add(nick_id)\n session.commit()\n\n # Create a new Nickname\n nickname = Nicknames(nick_id=nick_id.nick_id, slug=slug, canonical=nick)\n session.add(nickname)\n session.commit()\n return nickname.nick_id\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def alias_nick(self, nick, alias):\n \"\"\"Create an alias for a nick.\n\n Raises ValueError if the alias already exists. If nick does not already\n exist, it will be added along with the alias.\"\"\"\n nick = Identifier(nick)\n alias = Identifier(alias)\n nick_id = self.get_nick_id(nick)\n session = self.ssession()\n try:\n result = session.query(Nicknames) \\\n .filter(Nicknames.slug == alias.lower()) \\\n .filter(Nicknames.canonical == alias) \\\n .one_or_none()\n if result:\n raise ValueError('Given alias is the only entry in its group.')\n nickname = Nicknames(nick_id=nick_id, slug=alias.lower(), canonical=alias)\n session.add(nickname)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def set_nick_value(self, nick, key, value):\n \"\"\"Sets the value for a given key to be associated with the nick.\"\"\"\n nick = Identifier(nick)\n value = json.dumps(value, ensure_ascii=False)\n nick_id = self.get_nick_id(nick)\n session = self.ssession()\n try:\n result = session.query(NickValues) \\\n .filter(NickValues.nick_id == nick_id) \\\n .filter(NickValues.key == key) \\\n .one_or_none()\n # NickValue exists, update\n if result:\n result.value = value\n session.commit()\n # DNE - Insert\n else:\n new_nickvalue = NickValues(nick_id=nick_id, key=key, value=value)\n session.add(new_nickvalue)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_nick_value(self, nick, key):\n \"\"\"Deletes the value for a given key associated with a nick.\"\"\"\n nick = Identifier(nick)\n nick_id = self.get_nick_id(nick)\n session = self.ssession()\n try:\n result = session.query(NickValues) \\\n .filter(NickValues.nick_id == nick_id) \\\n .filter(NickValues.key == key) \\\n .one_or_none()\n # NickValue exists, delete\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def get_nick_value(self, nick, key, default=None):\n \"\"\"Retrieves the value for a given key associated with a nick.\"\"\"\n nick = Identifier(nick)\n session = self.ssession()\n try:\n result = session.query(NickValues) \\\n .filter(Nicknames.nick_id == NickValues.nick_id) \\\n .filter(Nicknames.slug == nick.lower()) \\\n .filter(NickValues.key == key) \\\n .one_or_none()\n if result is not None:\n result = result.value\n elif default is not None:\n result = default\n return _deserialize(result)\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def unalias_nick(self, alias):\n \"\"\"Removes an alias.\n\n Raises ValueError if there is not at least one other nick in the group.\n To delete an entire group, use `delete_group`.\n \"\"\"\n alias = Identifier(alias)\n nick_id = self.get_nick_id(alias, False)\n session = self.ssession()\n try:\n count = session.query(Nicknames) \\\n .filter(Nicknames.nick_id == nick_id) \\\n .count()\n if count <= 1:\n raise ValueError('Given alias is the only entry in its group.')\n session.query(Nicknames).filter(Nicknames.slug == alias.lower()).delete()\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_nick_group(self, nick):\n \"\"\"Removes a nickname, and all associated aliases and settings.\"\"\"\n nick = Identifier(nick)\n nick_id = self.get_nick_id(nick, False)\n session = self.ssession()\n try:\n session.query(Nicknames).filter(Nicknames.nick_id == nick_id).delete()\n session.query(NickValues).filter(NickValues.nick_id == nick_id).delete()\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def merge_nick_groups(self, first_nick, second_nick):\n \"\"\"Merges the nick groups for the specified nicks.\n\n Takes two nicks, which may or may not be registered. Unregistered\n nicks will be registered. Keys which are set for only one of the given\n nicks will be preserved. Where multiple nicks have values for a given\n key, the value set for the first nick will be used.\n\n Note that merging of data only applies to the native key-value store.\n If modules define their own tables which rely on the nick table, they\n will need to have their merging done separately.\"\"\"\n first_id = self.get_nick_id(Identifier(first_nick))\n second_id = self.get_nick_id(Identifier(second_nick))\n session = self.ssession()\n try:\n # Get second_id's values\n res = session.query(NickValues).filter(NickValues.nick_id == second_id).all()\n # Update first_id with second_id values if first_id doesn't have that key\n for row in res:\n first_res = session.query(NickValues) \\\n .filter(NickValues.nick_id == first_id) \\\n .filter(NickValues.key == row.key) \\\n .one_or_none()\n if not first_res:\n self.set_nick_value(first_nick, row.key, _deserialize(row.value))\n session.query(NickValues).filter(NickValues.nick_id == second_id).delete()\n session.query(Nicknames) \\\n .filter(Nicknames.nick_id == second_id) \\\n .update({'nick_id': first_id})\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n # CHANNEL FUNCTIONS\n\n def set_channel_value(self, channel, key, value):\n \"\"\"Sets the value for a given key to be associated with the channel.\"\"\"\n channel = Identifier(channel).lower()\n value = json.dumps(value, ensure_ascii=False)\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n # ChannelValue exists, update\n if result:\n result.value = value\n session.commit()\n # DNE - Insert\n else:\n new_channelvalue = ChannelValues(channel=channel, key=key, value=value)\n session.add(new_channelvalue)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_channel_value(self, channel, key):\n \"\"\"Deletes the value for a given key associated with a channel.\"\"\"\n channel = Identifier(channel).lower()\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n # ChannelValue exists, delete\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def get_channel_value(self, channel, key, default=None):\n \"\"\"Retrieves the value for a given key associated with a channel.\"\"\"\n channel = Identifier(channel).lower()\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n if result is not None:\n result = result.value\n elif default is not None:\n result = default\n return _deserialize(result)\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n # PLUGIN FUNCTIONS\n\n def set_plugin_value(self, plugin, key, value):\n \"\"\"Sets the value for a given key to be associated with a plugin.\"\"\"\n plugin = plugin.lower()\n value = json.dumps(value, ensure_ascii=False)\n session = self.ssession()\n try:\n result = session.query(PluginValues) \\\n .filter(PluginValues.plugin == plugin)\\\n .filter(PluginValues.key == key) \\\n .one_or_none()\n # PluginValue exists, update\n if result:\n result.value = value\n session.commit()\n # DNE - Insert\n else:\n new_pluginvalue = PluginValues(plugin=plugin, key=key, value=value)\n session.add(new_pluginvalue)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_plugin_value(self, plugin, key):\n \"\"\"Deletes the value for a given key associated with a plugin.\"\"\"\n plugin = plugin.lower()\n session = self.ssession()\n try:\n result = session.query(PluginValues) \\\n .filter(PluginValues.plugin == plugin)\\\n .filter(PluginValues.key == key) \\\n .one_or_none()\n # PluginValue exists, update\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def get_plugin_value(self, plugin, key, default=None):\n \"\"\"Retrieves the value for a given key associated with a plugin.\"\"\"\n plugin = plugin.lower()\n session = self.ssession()\n try:\n result = session.query(PluginValues) \\\n .filter(PluginValues.plugin == plugin)\\\n .filter(PluginValues.key == key) \\\n .one_or_none()\n if result is not None:\n result = result.value\n elif default is not None:\n result = default\n return _deserialize(result)\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n # NICK AND CHANNEL FUNCTIONS\n\n def get_nick_or_channel_value(self, name, key, default=None):\n \"\"\"Gets the value `key` associated to the nick or channel `name`.\"\"\"\n name = Identifier(name)\n if name.is_nick():\n return self.get_nick_value(name, key, default)\n else:\n return self.get_channel_value(name, key, default)\n\n def get_preferred_value(self, names, key):\n \"\"\"Gets the value for the first name which has it set.\n\n `names` is a list of channel and/or user names. Returns None if none of\n the names have the key set.\"\"\"\n for name in names:\n value = self.get_nick_or_channel_value(name, key)\n if value is not None:\n return value\n", "path": "sopel/db.py" } ]
[ { "content": "# coding=utf-8\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport errno\nimport json\nimport os.path\nimport sys\n\nfrom sopel.tools import Identifier\n\nfrom sqlalchemy import create_engine, Column, ForeignKey, Integer, String\nfrom sqlalchemy.engine.url import URL\nfrom sqlalchemy.exc import OperationalError, SQLAlchemyError\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import scoped_session, sessionmaker\n\nif sys.version_info.major >= 3:\n unicode = str\n basestring = str\n\n\ndef _deserialize(value):\n if value is None:\n return None\n # sqlite likes to return ints for strings that look like ints, even though\n # the column type is string. That's how you do dynamic typing wrong.\n value = unicode(value)\n # Just in case someone's mucking with the DB in a way we can't account for,\n # ignore json parsing errors\n try:\n value = json.loads(value)\n except ValueError:\n pass\n return value\n\n\nBASE = declarative_base()\nMYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',\n 'mysql_charset': 'utf8mb4',\n 'mysql_collate': 'utf8mb4_unicode_ci'}\n\n\nclass NickIDs(BASE):\n \"\"\"\n NickIDs SQLAlchemy Class\n \"\"\"\n __tablename__ = 'nick_ids'\n nick_id = Column(Integer, primary_key=True)\n\n\nclass Nicknames(BASE):\n \"\"\"\n Nicknames SQLAlchemy Class\n \"\"\"\n __tablename__ = 'nicknames'\n __table_args__ = MYSQL_TABLE_ARGS\n nick_id = Column(Integer, ForeignKey('nick_ids.nick_id'), primary_key=True)\n slug = Column(String(255), primary_key=True)\n canonical = Column(String(255))\n\n\nclass NickValues(BASE):\n \"\"\"\n NickValues SQLAlchemy Class\n \"\"\"\n __tablename__ = 'nick_values'\n __table_args__ = MYSQL_TABLE_ARGS\n nick_id = Column(Integer, ForeignKey('nick_ids.nick_id'), primary_key=True)\n key = Column(String(255), primary_key=True)\n value = Column(String(255))\n\n\nclass ChannelValues(BASE):\n \"\"\"\n ChannelValues SQLAlchemy Class\n \"\"\"\n __tablename__ = 'channel_values'\n __table_args__ = MYSQL_TABLE_ARGS\n channel = Column(String(255), primary_key=True)\n key = Column(String(255), primary_key=True)\n value = Column(String(255))\n\n\nclass PluginValues(BASE):\n \"\"\"\n PluginValues SQLAlchemy Class\n \"\"\"\n __tablename__ = 'plugin_values'\n __table_args__ = MYSQL_TABLE_ARGS\n plugin = Column(String(255), primary_key=True)\n key = Column(String(255), primary_key=True)\n value = Column(String(255))\n\n\nclass SopelDB(object):\n \"\"\"*Availability: 5.0+*\n\n This defines an interface for basic, common operations on a sqlite\n database. It simplifies those common operations, and allows direct access\n to the database, wherever the user has configured it to be.\n\n When configured with a relative filename, it is assumed to be in the directory\n set (or defaulted to) in the core setting ``homedir``.\n \"\"\"\n\n def __init__(self, config):\n # MySQL - mysql://username:password@localhost/db\n # SQLite - sqlite:////home/sopel/.sopel/default.db\n db_type = config.core.db_type\n\n # Handle SQLite explicitly as a default\n if db_type == 'sqlite':\n path = config.core.db_filename\n if path is None:\n path = os.path.join(config.core.homedir, config.basename + '.db')\n path = os.path.expanduser(path)\n if not os.path.isabs(path):\n path = os.path.normpath(os.path.join(config.core.homedir, path))\n if not os.path.isdir(os.path.dirname(path)):\n raise OSError(\n errno.ENOENT,\n 'Cannot create database file. '\n 'No such directory: \"{}\". Check that configuration setting '\n 'core.db_filename is valid'.format(os.path.dirname(path)),\n path\n )\n self.filename = path\n self.url = 'sqlite:///%s' % path\n # Otherwise, handle all other database engines\n else:\n query = {}\n if db_type == 'mysql':\n drivername = config.core.db_driver or 'mysql'\n query = {'charset': 'utf8mb4'}\n elif db_type == 'postgres':\n drivername = config.core.db_driver or 'postgresql'\n elif db_type == 'oracle':\n drivername = config.core.db_driver or 'oracle'\n elif db_type == 'mssql':\n drivername = config.core.db_driver or 'mssql+pymssql'\n elif db_type == 'firebird':\n drivername = config.core.db_driver or 'firebird+fdb'\n elif db_type == 'sybase':\n drivername = config.core.db_driver or 'sybase+pysybase'\n else:\n raise Exception('Unknown db_type')\n\n db_user = config.core.db_user\n db_pass = config.core.db_pass\n db_host = config.core.db_host\n db_port = config.core.db_port # Optional\n db_name = config.core.db_name # Optional, depending on DB\n\n # Ensure we have all our variables defined\n if db_user is None or db_pass is None or db_host is None:\n raise Exception('Please make sure the following core '\n 'configuration values are defined: '\n 'db_user, db_pass, db_host')\n self.url = URL(drivername=drivername, username=db_user,\n password=db_pass, host=db_host, port=db_port,\n database=db_name, query=query)\n\n self.engine = create_engine(self.url, pool_recycle=3600)\n\n # Catch any errors connecting to database\n try:\n self.engine.connect()\n except OperationalError:\n print(\"OperationalError: Unable to connect to database.\")\n raise\n\n # Create our tables\n BASE.metadata.create_all(self.engine)\n\n self.ssession = scoped_session(sessionmaker(bind=self.engine))\n\n def connect(self):\n \"\"\"Return a raw database connection object.\"\"\"\n return self.engine.connect()\n\n def execute(self, *args, **kwargs):\n \"\"\"Execute an arbitrary SQL query against the database.\n\n Returns a cursor object, on which things like `.fetchall()` can be\n called per PEP 249.\"\"\"\n with self.connect() as conn:\n return conn.execute(*args, **kwargs)\n\n def get_uri(self):\n \"\"\"Returns a URL for the database, usable to connect with SQLAlchemy.\"\"\"\n return self.url\n\n # NICK FUNCTIONS\n\n def get_nick_id(self, nick, create=True):\n \"\"\"Return the internal identifier for a given nick.\n\n This identifier is unique to a user, and shared across all of that\n user's aliases. If create is True, a new ID will be created if one does\n not already exist\"\"\"\n session = self.ssession()\n slug = nick.lower()\n try:\n nickname = session.query(Nicknames) \\\n .filter(Nicknames.slug == slug) \\\n .one_or_none()\n\n if nickname is None:\n if not create:\n raise ValueError('No ID exists for the given nick')\n # Generate a new ID\n nick_id = NickIDs()\n session.add(nick_id)\n session.commit()\n\n # Create a new Nickname\n nickname = Nicknames(nick_id=nick_id.nick_id, slug=slug, canonical=nick)\n session.add(nickname)\n session.commit()\n return nickname.nick_id\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def alias_nick(self, nick, alias):\n \"\"\"Create an alias for a nick.\n\n Raises ValueError if the alias already exists. If nick does not already\n exist, it will be added along with the alias.\"\"\"\n nick = Identifier(nick)\n alias = Identifier(alias)\n nick_id = self.get_nick_id(nick)\n session = self.ssession()\n try:\n result = session.query(Nicknames) \\\n .filter(Nicknames.slug == alias.lower()) \\\n .filter(Nicknames.canonical == alias) \\\n .one_or_none()\n if result:\n raise ValueError('Given alias is the only entry in its group.')\n nickname = Nicknames(nick_id=nick_id, slug=alias.lower(), canonical=alias)\n session.add(nickname)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def set_nick_value(self, nick, key, value):\n \"\"\"Sets the value for a given key to be associated with the nick.\"\"\"\n nick = Identifier(nick)\n value = json.dumps(value, ensure_ascii=False)\n nick_id = self.get_nick_id(nick)\n session = self.ssession()\n try:\n result = session.query(NickValues) \\\n .filter(NickValues.nick_id == nick_id) \\\n .filter(NickValues.key == key) \\\n .one_or_none()\n # NickValue exists, update\n if result:\n result.value = value\n session.commit()\n # DNE - Insert\n else:\n new_nickvalue = NickValues(nick_id=nick_id, key=key, value=value)\n session.add(new_nickvalue)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_nick_value(self, nick, key):\n \"\"\"Deletes the value for a given key associated with a nick.\"\"\"\n nick = Identifier(nick)\n nick_id = self.get_nick_id(nick)\n session = self.ssession()\n try:\n result = session.query(NickValues) \\\n .filter(NickValues.nick_id == nick_id) \\\n .filter(NickValues.key == key) \\\n .one_or_none()\n # NickValue exists, delete\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def get_nick_value(self, nick, key, default=None):\n \"\"\"Retrieves the value for a given key associated with a nick.\"\"\"\n nick = Identifier(nick)\n session = self.ssession()\n try:\n result = session.query(NickValues) \\\n .filter(Nicknames.nick_id == NickValues.nick_id) \\\n .filter(Nicknames.slug == nick.lower()) \\\n .filter(NickValues.key == key) \\\n .one_or_none()\n if result is not None:\n result = result.value\n elif default is not None:\n result = default\n return _deserialize(result)\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def unalias_nick(self, alias):\n \"\"\"Removes an alias.\n\n Raises ValueError if there is not at least one other nick in the group.\n To delete an entire group, use `delete_group`.\n \"\"\"\n alias = Identifier(alias)\n nick_id = self.get_nick_id(alias, False)\n session = self.ssession()\n try:\n count = session.query(Nicknames) \\\n .filter(Nicknames.nick_id == nick_id) \\\n .count()\n if count <= 1:\n raise ValueError('Given alias is the only entry in its group.')\n session.query(Nicknames).filter(Nicknames.slug == alias.lower()).delete()\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_nick_group(self, nick):\n \"\"\"Removes a nickname, and all associated aliases and settings.\"\"\"\n nick = Identifier(nick)\n nick_id = self.get_nick_id(nick, False)\n session = self.ssession()\n try:\n session.query(Nicknames).filter(Nicknames.nick_id == nick_id).delete()\n session.query(NickValues).filter(NickValues.nick_id == nick_id).delete()\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def merge_nick_groups(self, first_nick, second_nick):\n \"\"\"Merges the nick groups for the specified nicks.\n\n Takes two nicks, which may or may not be registered. Unregistered\n nicks will be registered. Keys which are set for only one of the given\n nicks will be preserved. Where multiple nicks have values for a given\n key, the value set for the first nick will be used.\n\n Note that merging of data only applies to the native key-value store.\n If modules define their own tables which rely on the nick table, they\n will need to have their merging done separately.\"\"\"\n first_id = self.get_nick_id(Identifier(first_nick))\n second_id = self.get_nick_id(Identifier(second_nick))\n session = self.ssession()\n try:\n # Get second_id's values\n res = session.query(NickValues).filter(NickValues.nick_id == second_id).all()\n # Update first_id with second_id values if first_id doesn't have that key\n for row in res:\n first_res = session.query(NickValues) \\\n .filter(NickValues.nick_id == first_id) \\\n .filter(NickValues.key == row.key) \\\n .one_or_none()\n if not first_res:\n self.set_nick_value(first_nick, row.key, _deserialize(row.value))\n session.query(NickValues).filter(NickValues.nick_id == second_id).delete()\n session.query(Nicknames) \\\n .filter(Nicknames.nick_id == second_id) \\\n .update({'nick_id': first_id})\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n # CHANNEL FUNCTIONS\n\n def set_channel_value(self, channel, key, value):\n \"\"\"Sets the value for a given key to be associated with the channel.\"\"\"\n channel = Identifier(channel).lower()\n value = json.dumps(value, ensure_ascii=False)\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n # ChannelValue exists, update\n if result:\n result.value = value\n session.commit()\n # DNE - Insert\n else:\n new_channelvalue = ChannelValues(channel=channel, key=key, value=value)\n session.add(new_channelvalue)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_channel_value(self, channel, key):\n \"\"\"Deletes the value for a given key associated with a channel.\"\"\"\n channel = Identifier(channel).lower()\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n # ChannelValue exists, delete\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def get_channel_value(self, channel, key, default=None):\n \"\"\"Retrieves the value for a given key associated with a channel.\"\"\"\n channel = Identifier(channel).lower()\n session = self.ssession()\n try:\n result = session.query(ChannelValues) \\\n .filter(ChannelValues.channel == channel)\\\n .filter(ChannelValues.key == key) \\\n .one_or_none()\n if result is not None:\n result = result.value\n elif default is not None:\n result = default\n return _deserialize(result)\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n # PLUGIN FUNCTIONS\n\n def set_plugin_value(self, plugin, key, value):\n \"\"\"Sets the value for a given key to be associated with a plugin.\"\"\"\n plugin = plugin.lower()\n value = json.dumps(value, ensure_ascii=False)\n session = self.ssession()\n try:\n result = session.query(PluginValues) \\\n .filter(PluginValues.plugin == plugin)\\\n .filter(PluginValues.key == key) \\\n .one_or_none()\n # PluginValue exists, update\n if result:\n result.value = value\n session.commit()\n # DNE - Insert\n else:\n new_pluginvalue = PluginValues(plugin=plugin, key=key, value=value)\n session.add(new_pluginvalue)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def delete_plugin_value(self, plugin, key):\n \"\"\"Deletes the value for a given key associated with a plugin.\"\"\"\n plugin = plugin.lower()\n session = self.ssession()\n try:\n result = session.query(PluginValues) \\\n .filter(PluginValues.plugin == plugin)\\\n .filter(PluginValues.key == key) \\\n .one_or_none()\n # PluginValue exists, update\n if result:\n session.delete(result)\n session.commit()\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n def get_plugin_value(self, plugin, key, default=None):\n \"\"\"Retrieves the value for a given key associated with a plugin.\"\"\"\n plugin = plugin.lower()\n session = self.ssession()\n try:\n result = session.query(PluginValues) \\\n .filter(PluginValues.plugin == plugin)\\\n .filter(PluginValues.key == key) \\\n .one_or_none()\n if result is not None:\n result = result.value\n elif default is not None:\n result = default\n return _deserialize(result)\n except SQLAlchemyError:\n session.rollback()\n raise\n finally:\n session.close()\n\n # NICK AND CHANNEL FUNCTIONS\n\n def get_nick_or_channel_value(self, name, key, default=None):\n \"\"\"Gets the value `key` associated to the nick or channel `name`.\"\"\"\n name = Identifier(name)\n if name.is_nick():\n return self.get_nick_value(name, key, default)\n else:\n return self.get_channel_value(name, key, default)\n\n def get_preferred_value(self, names, key):\n \"\"\"Gets the value for the first name which has it set.\n\n `names` is a list of channel and/or user names. Returns None if none of\n the names have the key set.\"\"\"\n for name in names:\n value = self.get_nick_or_channel_value(name, key)\n if value is not None:\n return value\n", "path": "sopel/db.py" } ]
diff --git a/sopel/db.py b/sopel/db.py index 471d035d8b..028fdc98cb 100644 --- a/sopel/db.py +++ b/sopel/db.py @@ -188,7 +188,7 @@ def execute(self, *args, **kwargs): def get_uri(self): """Returns a URL for the database, usable to connect with SQLAlchemy.""" - return 'sqlite:///{}'.format(self.filename) + return self.url # NICK FUNCTIONS
streamlink__streamlink-5926
plugins.mangomolo: error: No plugin can handle URL ### Checklist - [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose) - [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink) - [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22) - [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master) ### Streamlink version [cli][info] Your Streamlink version (6.7.2) is up to date! ### Description Unable to get stream for Kuwaiti channels.. error message: "error: No plugin can handle URL:" sample URLs: https://www.media.gov.kw/LiveTV.aspx https://www.media.gov.kw/LiveTV.aspx?PanChannel=Drama ### Debug log ```text user@desktop:~ $ streamlink https://www.media.gov.kw/LiveTV.aspx --loglevel=debug [cli][debug] OS: Linux-6.1.21+-armv6l-with-glibc2.31 [cli][debug] Python: 3.9.2 [cli][debug] OpenSSL: OpenSSL 1.1.1w 11 Sep 2023 [cli][debug] Streamlink: 6.7.2 [cli][debug] Dependencies: [cli][debug] certifi: 2023.7.22 [cli][debug] exceptiongroup: 1.1.3 [cli][debug] isodate: 0.6.1 [cli][debug] lxml: 4.9.3 [cli][debug] pycountry: 20.7.3 [cli][debug] pycryptodome: 3.18.0 [cli][debug] PySocks: 1.7.1 [cli][debug] requests: 2.31.0 [cli][debug] trio: 0.22.2 [cli][debug] trio-websocket: 0.10.3 [cli][debug] typing-extensions: 4.7.1 [cli][debug] urllib3: 2.0.4 [cli][debug] websocket-client: 1.6.2 [cli][debug] Arguments: [cli][debug] url=https://www.media.gov.kw/LiveTV.aspx [cli][debug] --loglevel=debug error: No plugin can handle URL: https://www.media.gov.kw/LiveTV.aspx ```
[ { "content": "\"\"\"\n$description OTT video platform owned by Alpha Technology Group\n$url player.mangomolo.com\n$url media.gov.kw\n$type live\n\"\"\"\n\nimport logging\nimport re\n\nfrom streamlink.exceptions import NoStreamsError\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\nfrom streamlink.utils.url import update_scheme\n\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(\n name=\"mangomoloplayer\",\n pattern=re.compile(r\"https?://player\\.mangomolo\\.com/v1/\"),\n)\n@pluginmatcher(\n name=\"mediagovkw\",\n pattern=re.compile(r\"https?://media\\.gov\\.kw/\"),\n)\nclass Mangomolo(Plugin):\n def _get_player_url(self):\n player_url = self.session.http.get(self.url, schema=validate.Schema(\n validate.parse_html(),\n validate.xml_xpath_string(\".//iframe[contains(@src,'//player.mangomolo.com/v1/')][1]/@src\"),\n ))\n if not player_url:\n log.error(\"Could not find embedded player\")\n raise NoStreamsError\n\n self.url = update_scheme(\"https://\", player_url)\n\n def _get_streams(self):\n headers = {}\n if not self.matches[\"mangomoloplayer\"]:\n headers[\"Referer\"] = self.url\n self._get_player_url()\n\n hls_url = self.session.http.get(self.url, headers=headers, schema=validate.Schema(\n re.compile(r\"src\\s*:\\s*(?P<q>[\\\"'])(?P<url>https?://\\S+?\\.m3u8\\S*?)(?P=q)\"),\n validate.none_or_all(validate.get(\"url\")),\n ))\n if hls_url:\n return HLSStream.parse_variant_playlist(self.session, hls_url)\n\n\n__plugin__ = Mangomolo\n", "path": "src/streamlink/plugins/mangomolo.py" } ]
[ { "content": "\"\"\"\n$description OTT video platform owned by Alpha Technology Group\n$url player.mangomolo.com\n$url media.gov.kw\n$type live\n\"\"\"\n\nimport logging\nimport re\n\nfrom streamlink.exceptions import NoStreamsError\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\nfrom streamlink.utils.url import update_scheme\n\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(\n name=\"mangomoloplayer\",\n pattern=re.compile(r\"https?://player\\.mangomolo\\.com/v1/\"),\n)\n@pluginmatcher(\n name=\"mediagovkw\",\n pattern=re.compile(r\"https?://(www\\.)?media\\.gov\\.kw/\"),\n)\nclass Mangomolo(Plugin):\n def _get_player_url(self):\n player_url = self.session.http.get(self.url, schema=validate.Schema(\n validate.parse_html(),\n validate.xml_xpath_string(\".//iframe[contains(@src,'//player.mangomolo.com/v1/')][1]/@src\"),\n ))\n if not player_url:\n log.error(\"Could not find embedded player\")\n raise NoStreamsError\n\n self.url = update_scheme(\"https://\", player_url)\n\n def _get_streams(self):\n headers = {}\n if not self.matches[\"mangomoloplayer\"]:\n headers[\"Referer\"] = self.url\n self._get_player_url()\n\n hls_url = self.session.http.get(self.url, headers=headers, schema=validate.Schema(\n re.compile(r\"src\\s*:\\s*(?P<q>[\\\"'])(?P<url>https?://\\S+?\\.m3u8\\S*?)(?P=q)\"),\n validate.none_or_all(validate.get(\"url\")),\n ))\n if hls_url:\n return HLSStream.parse_variant_playlist(self.session, hls_url)\n\n\n__plugin__ = Mangomolo\n", "path": "src/streamlink/plugins/mangomolo.py" } ]
diff --git a/src/streamlink/plugins/mangomolo.py b/src/streamlink/plugins/mangomolo.py index 186732b6c03..4f6e00dbfb7 100644 --- a/src/streamlink/plugins/mangomolo.py +++ b/src/streamlink/plugins/mangomolo.py @@ -24,7 +24,7 @@ ) @pluginmatcher( name="mediagovkw", - pattern=re.compile(r"https?://media\.gov\.kw/"), + pattern=re.compile(r"https?://(www\.)?media\.gov\.kw/"), ) class Mangomolo(Plugin): def _get_player_url(self): diff --git a/tests/plugins/test_mangomolo.py b/tests/plugins/test_mangomolo.py index d80e8f6f2a3..e34244d9e7e 100644 --- a/tests/plugins/test_mangomolo.py +++ b/tests/plugins/test_mangomolo.py @@ -15,8 +15,16 @@ class TestPluginCanHandleUrlMangomolo(PluginCanHandleUrl): "mediagovkw", "https://media.gov.kw/LiveTV.aspx?PanChannel=KTV1", ), + ( + "mediagovkw", + "https://www.media.gov.kw/LiveTV.aspx?PanChannel=KTV1", + ), ( "mediagovkw", "https://media.gov.kw/LiveTV.aspx?PanChannel=KTVSports", ), + ( + "mediagovkw", + "https://www.media.gov.kw/LiveTV.aspx?PanChannel=KTVSports", + ), ]
psychopy__psychopy-2333
Demos -> Hardware -> testSoundLatency.py not working in v3.0.6 Running Demo -> Hardware -> testSoundLatency.py results in the following error message: ``` ##### Running: C:\Program Files (x86)\PsychoPy3\lib\site-packages\psychopy\demos\coder\hardware\testSoundLatency.py ##### pygame 1.9.4 Hello from the pygame community. https://www.pygame.org/contribute.html Traceback (most recent call last): File "C:\Program Files (x86)\PsychoPy3\lib\site-packages\psychopy\demos\coder\hardware\testSoundLatency.py", line 16, in <module> from labjack import u3 ModuleNotFoundError: No module named 'labjack' ``` Windows 7, 64 bit, PsychoPy 3.0.6 64 bit standalone
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nDemo for using labjack DAC devices\n\nSee also\n http: //labjack.com/support/labjackpython\nbut note that the version shipped with standalone PsychoPy\nhas u3 (and others below an umbrella called labjack) so the import\nline is slightly different to the documentation on LabJack's website\n\"\"\"\n\nfrom __future__ import absolute_import, division, print_function\n\nfrom builtins import range\nfrom psychopy import visual, core, event, sound\nfrom labjack import u3\n\n# sound.setAudioAPI('pyaudio')\n\nwin = visual.Window([800, 800])\nstim = visual.GratingStim(win, color=-1, sf=0)\nsnd = sound.Sound(880)\nprint(snd)\n# setup labjack U3\nports = u3.U3()\nFIO4 = 6004 # the address of line FIO4\n\nwhile True:\n # do this repeatedly for timing tests\n ports.writeRegister(FIO4, 0) # start low\n\n # draw black square\n stim.draw()\n win.flip()\n\n # wait for a key press\n if 'q' in event.waitKeys():\n break\n\n # set to white, flip window and raise level port FIO4\n stim.setColor(1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 1)\n snd.play()\n for frameN in range(4):\n stim.draw()\n win.flip()\n\n # set color back to black and set FIO4 to low again\n stim.setColor(-1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 0)\n\nwin.close()\ncore.quit()\n\n# The contents of this file are in the public domain.\n", "path": "psychopy/demos/coder/hardware/labjack_u3.py" } ]
[ { "content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nDemo for using labjack DAC devices\n\nSee also\n http: //labjack.com/support/labjackpython\nbut note that the version shipped with standalone PsychoPy\nhas u3 (and others below an umbrella called labjack) so the import\nline is slightly different to the documentation on LabJack's website\n\"\"\"\n\nfrom __future__ import absolute_import, division, print_function\n\nfrom builtins import range\nfrom psychopy import visual, core, event, sound\ntry:\n from labjack import u3\nexcept ImportError:\n import u3\n\n# sound.setAudioAPI('pyaudio')\n\nwin = visual.Window([800, 800])\nstim = visual.GratingStim(win, color=-1, sf=0)\nsnd = sound.Sound(880)\nprint(snd)\n# setup labjack U3\nports = u3.U3()\nFIO4 = 6004 # the address of line FIO4\n\nwhile True:\n # do this repeatedly for timing tests\n ports.writeRegister(FIO4, 0) # start low\n\n # draw black square\n stim.draw()\n win.flip()\n\n # wait for a key press\n if 'q' in event.waitKeys():\n break\n\n # set to white, flip window and raise level port FIO4\n stim.setColor(1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 1)\n snd.play()\n for frameN in range(4):\n stim.draw()\n win.flip()\n\n # set color back to black and set FIO4 to low again\n stim.setColor(-1)\n stim.draw()\n win.flip()\n ports.writeRegister(FIO4, 0)\n\nwin.close()\ncore.quit()\n\n# The contents of this file are in the public domain.\n", "path": "psychopy/demos/coder/hardware/labjack_u3.py" } ]
diff --git a/psychopy/demos/coder/hardware/labjack_u3.py b/psychopy/demos/coder/hardware/labjack_u3.py index abb4c1af90..9294b41436 100644 --- a/psychopy/demos/coder/hardware/labjack_u3.py +++ b/psychopy/demos/coder/hardware/labjack_u3.py @@ -15,7 +15,10 @@ from builtins import range from psychopy import visual, core, event, sound -from labjack import u3 +try: + from labjack import u3 +except ImportError: + import u3 # sound.setAudioAPI('pyaudio')
comic__grand-challenge.org-755
Handle NoneType comparison in _scores_to_ranks ``` TypeError: '<' not supported between instances of 'NoneType' and 'float' ```
[ { "content": "from collections import OrderedDict\nfrom typing import Tuple, NamedTuple, List, Callable, Iterable, Dict\n\nfrom grandchallenge.evaluation.models import Result\nfrom grandchallenge.evaluation.templatetags.evaluation_extras import (\n get_jsonpath\n)\n\n\nclass Metric(NamedTuple):\n path: str\n reverse: bool\n\n\nclass Positions(NamedTuple):\n ranks: Dict[str, float]\n rank_scores: Dict[str, float]\n rank_per_metric: Dict[str, Dict[str, float]]\n\n\ndef rank_results(\n *,\n results: Tuple[Result, ...],\n metrics: Tuple[Metric, ...],\n score_method: Callable,\n) -> Positions:\n \"\"\"\n Calculates the overall rank for each result, along with the rank_score\n and the rank per metric.\n \"\"\"\n\n results = _filter_valid_results(results=results, metrics=metrics)\n\n rank_per_metric = _get_rank_per_metric(results=results, metrics=metrics)\n\n rank_scores = {\n pk: score_method([m for m in metrics.values()])\n for pk, metrics in rank_per_metric.items()\n }\n\n return Positions(\n ranks=_scores_to_ranks(scores=rank_scores, reverse=False),\n rank_scores=rank_scores,\n rank_per_metric=rank_per_metric,\n )\n\n\ndef _filter_valid_results(\n *, results: Iterable[Result], metrics: Tuple[Metric, ...]\n) -> List[Result]:\n \"\"\" Ensure that all of the metrics are in every result \"\"\"\n return [\n res\n for res in results\n if all(get_jsonpath(res.metrics, m.path) != \"\" for m in metrics)\n ]\n\n\ndef _get_rank_per_metric(\n *, results: Iterable[Result], metrics: Tuple[Metric, ...]\n) -> Dict[str, Dict[str, float]]:\n \"\"\"\n Takes results and calculates the rank for each of the individual metrics\n\n Returns a dictionary where the key is the pk of the result, and the\n values is another dictionary where the key is the path of the metric and\n the value is the rank of this result for this metric\n \"\"\"\n metric_rank = {}\n for metric in metrics:\n # Extract the value of the metric for this primary key and sort on the\n # value of the metric\n metric_scores = {\n res.pk: get_jsonpath(res.metrics, metric.path) for res in results\n }\n metric_rank[metric.path] = _scores_to_ranks(\n scores=metric_scores, reverse=metric.reverse\n )\n\n return {\n res.pk: {\n metric_path: ranks[res.pk]\n for metric_path, ranks in metric_rank.items()\n }\n for res in results\n }\n\n\ndef _scores_to_ranks(\n *, scores: Dict, reverse: bool = False\n) -> Dict[str, float]:\n \"\"\"\n Go from a score (a scalar) to a rank (integer). If two scalars are the\n same then they will have the same rank.\n\n Takes a dictionary where the keys are the pk of the results and the values\n are the scores.\n\n Outputs a dictionary where they keys are the pk of the results and the\n values are the ranks.\n \"\"\"\n scores = OrderedDict(\n sorted(scores.items(), key=lambda t: t[1], reverse=reverse)\n )\n\n ranks = {}\n current_score = current_rank = None\n\n for idx, (pk, score) in enumerate(scores.items()):\n if score != current_score:\n current_score = score\n current_rank = idx + 1\n\n ranks[pk] = current_rank\n\n return ranks\n", "path": "app/grandchallenge/evaluation/utils.py" } ]
[ { "content": "from collections import OrderedDict\nfrom typing import Tuple, NamedTuple, List, Callable, Iterable, Dict\n\nfrom grandchallenge.evaluation.models import Result\nfrom grandchallenge.evaluation.templatetags.evaluation_extras import (\n get_jsonpath\n)\n\n\nclass Metric(NamedTuple):\n path: str\n reverse: bool\n\n\nclass Positions(NamedTuple):\n ranks: Dict[str, float]\n rank_scores: Dict[str, float]\n rank_per_metric: Dict[str, Dict[str, float]]\n\n\ndef rank_results(\n *,\n results: Tuple[Result, ...],\n metrics: Tuple[Metric, ...],\n score_method: Callable,\n) -> Positions:\n \"\"\"\n Calculates the overall rank for each result, along with the rank_score\n and the rank per metric.\n \"\"\"\n\n results = _filter_valid_results(results=results, metrics=metrics)\n\n rank_per_metric = _get_rank_per_metric(results=results, metrics=metrics)\n\n rank_scores = {\n pk: score_method([m for m in metrics.values()])\n for pk, metrics in rank_per_metric.items()\n }\n\n return Positions(\n ranks=_scores_to_ranks(scores=rank_scores, reverse=False),\n rank_scores=rank_scores,\n rank_per_metric=rank_per_metric,\n )\n\n\ndef _filter_valid_results(\n *, results: Iterable[Result], metrics: Tuple[Metric, ...]\n) -> List[Result]:\n \"\"\" Ensure that all of the metrics are in every result \"\"\"\n return [\n res\n for res in results\n if all(\n get_jsonpath(res.metrics, m.path) not in [\"\", None]\n for m in metrics\n )\n ]\n\n\ndef _get_rank_per_metric(\n *, results: Iterable[Result], metrics: Tuple[Metric, ...]\n) -> Dict[str, Dict[str, float]]:\n \"\"\"\n Takes results and calculates the rank for each of the individual metrics\n\n Returns a dictionary where the key is the pk of the result, and the\n values is another dictionary where the key is the path of the metric and\n the value is the rank of this result for this metric\n \"\"\"\n metric_rank = {}\n for metric in metrics:\n # Extract the value of the metric for this primary key and sort on the\n # value of the metric\n metric_scores = {\n res.pk: get_jsonpath(res.metrics, metric.path) for res in results\n }\n metric_rank[metric.path] = _scores_to_ranks(\n scores=metric_scores, reverse=metric.reverse\n )\n\n return {\n res.pk: {\n metric_path: ranks[res.pk]\n for metric_path, ranks in metric_rank.items()\n }\n for res in results\n }\n\n\ndef _scores_to_ranks(\n *, scores: Dict, reverse: bool = False\n) -> Dict[str, float]:\n \"\"\"\n Go from a score (a scalar) to a rank (integer). If two scalars are the\n same then they will have the same rank.\n\n Takes a dictionary where the keys are the pk of the results and the values\n are the scores.\n\n Outputs a dictionary where they keys are the pk of the results and the\n values are the ranks.\n \"\"\"\n scores = OrderedDict(\n sorted(scores.items(), key=lambda t: t[1], reverse=reverse)\n )\n\n ranks = {}\n current_score = current_rank = None\n\n for idx, (pk, score) in enumerate(scores.items()):\n if score != current_score:\n current_score = score\n current_rank = idx + 1\n\n ranks[pk] = current_rank\n\n return ranks\n", "path": "app/grandchallenge/evaluation/utils.py" } ]
diff --git a/app/grandchallenge/evaluation/utils.py b/app/grandchallenge/evaluation/utils.py index e76a7f87a4..1366b997b9 100644 --- a/app/grandchallenge/evaluation/utils.py +++ b/app/grandchallenge/evaluation/utils.py @@ -52,7 +52,10 @@ def _filter_valid_results( return [ res for res in results - if all(get_jsonpath(res.metrics, m.path) != "" for m in metrics) + if all( + get_jsonpath(res.metrics, m.path) not in ["", None] + for m in metrics + ) ] diff --git a/app/tests/evaluation_tests/test_utils.py b/app/tests/evaluation_tests/test_utils.py index 87245d0050..a9d7023b0a 100644 --- a/app/tests/evaluation_tests/test_utils.py +++ b/app/tests/evaluation_tests/test_utils.py @@ -231,6 +231,39 @@ def test_results_display(settings): assert_ranks(queryset, expected_ranks) [email protected]_db +def test_null_results(settings): + # Override the celery settings + settings.task_eager_propagates = (True,) + settings.task_always_eager = (True,) + settings.broker_url = ("memory://",) + settings.backend = "memory" + + challenge = ChallengeFactory() + + with mute_signals(post_save): + user1 = UserFactory() + queryset = ( + ResultFactory( + job__submission__challenge=challenge, + metrics={"a": 0.6}, + job__submission__creator=user1, + ), + ResultFactory( + job__submission__challenge=challenge, + metrics={"a": None}, + job__submission__creator=user1, + ), + ) + + challenge.evaluation_config.score_jsonpath = "a" + challenge.evaluation_config.result_display_choice = Config.ALL + challenge.evaluation_config.save() + + expected_ranks = [1, 0] + assert_ranks(queryset, expected_ranks) + + def assert_ranks(queryset, expected_ranks, expected_rank_scores=None): for r in queryset: r.refresh_from_db()
ESMCI__cime-4035
cheyenne needs a module load python Now that we require python 3.5+, we need to do a module load python on cheyenne. The lack of this module load is responsible for a failure in `J_TestCreateNewcase.test_f_createnewcase_with_user_compset` if you run the whole `J_TestCreateNewcase` suite, and may cause other problems as well. I'll get a fix in shortly.
[ { "content": "\"\"\"\nEncapsulate the importing of python utils and logging setup, things\nthat every script should do.\n\"\"\"\n# pylint: disable=unused-import\n\nimport sys, os\nimport __main__ as main\n_CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"..\",\"..\")\n_LIB_DIR = os.path.join(_CIMEROOT, \"scripts\", \"lib\")\nsys.path.append(_LIB_DIR)\n\n# Important: Allows external tools to link up with CIME\nos.environ[\"CIMEROOT\"] = _CIMEROOT\n\nimport CIME.utils\nCIME.utils.check_minimum_python_version(2, 7)\nCIME.utils.stop_buffering_output()\nimport logging, argparse\n", "path": "scripts/Tools/standard_script_setup.py" } ]
[ { "content": "\"\"\"\nEncapsulate the importing of python utils and logging setup, things\nthat every script should do.\n\"\"\"\n# pylint: disable=unused-import\n\nimport sys, os\nimport __main__ as main\n_CIMEROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"..\",\"..\")\n_LIB_DIR = os.path.join(_CIMEROOT, \"scripts\", \"lib\")\nsys.path.append(_LIB_DIR)\n\n# Important: Allows external tools to link up with CIME\nos.environ[\"CIMEROOT\"] = _CIMEROOT\n\nimport CIME.utils\nCIME.utils.check_minimum_python_version(3, 6)\nCIME.utils.stop_buffering_output()\nimport logging, argparse\n", "path": "scripts/Tools/standard_script_setup.py" } ]
diff --git a/config/cesm/machines/config_machines.xml b/config/cesm/machines/config_machines.xml index da05b3f4a59..70d92bcd2f2 100644 --- a/config/cesm/machines/config_machines.xml +++ b/config/cesm/machines/config_machines.xml @@ -561,6 +561,7 @@ This allows using a different mpirun command to launch unit tests <modules> <command name="purge"/> <command name="load">ncarenv/1.3</command> + <command name="load">python/3.7.9</command> <command name="load">cmake</command> </modules> <modules compiler="intel"> diff --git a/scripts/Tools/standard_script_setup.py b/scripts/Tools/standard_script_setup.py index a3f4801faf0..46a48078cf5 100644 --- a/scripts/Tools/standard_script_setup.py +++ b/scripts/Tools/standard_script_setup.py @@ -14,6 +14,6 @@ os.environ["CIMEROOT"] = _CIMEROOT import CIME.utils -CIME.utils.check_minimum_python_version(2, 7) +CIME.utils.check_minimum_python_version(3, 6) CIME.utils.stop_buffering_output() import logging, argparse
python-poetry__poetry-277
Discrepancy regarding license between doc and poetry init <!-- Hi there! Thank you for discovering and submitting an issue. Before you submit this; let's make sure of a few things. Please make sure the following boxes are ticked if they are correct. If not, please try and fulfill these first. --> <!-- Checked checkbox should look like this: [x] --> - [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version. - [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate. - [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option). <!-- Once those are done, if you're able to fill in the following list with your information, it'd be very helpful to whoever handles the issue. --> - **OS version and name**: Manjaro Linux - **Poetry version**: 0.11.1 - **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: ## Issue <!-- Now feel free to write your issue, but please be descriptive! Thanks again 🙌 ❤️ --> During the `license` prompt of `poetry init`, a valid license is required as input. Acording to the documentation, a license is highly recommended, but not actually required. This descrepancy should be removed by updating either the documentation or the code.
[ { "content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom typing import List\nfrom typing import Tuple\n\nfrom .command import Command\nfrom .venv_command import VenvCommand\n\n\nclass InitCommand(Command):\n \"\"\"\n Creates a basic <comment>pyproject.toml</> file in the current directory.\n\n init\n {--name= : Name of the package}\n {--description= : Description of the package}\n {--author= : Author name of the package}\n {--dependency=* : Package to require with an optional version constraint,\n e.g. requests:^2.10.0 or requests=2.11.1}\n {--dev-dependency=* : Package to require for development with an optional version constraint,\n e.g. requests:^2.10.0 or requests=2.11.1}\n {--l|license= : License of the package}\n \"\"\"\n\n help = \"\"\"\\\nThe <info>init</info> command creates a basic <comment>pyproject.toml</> file in the current directory.\n\"\"\"\n\n def __init__(self):\n super(InitCommand, self).__init__()\n\n self._pool = None\n\n def handle(self):\n from poetry.layouts import layout\n from poetry.utils._compat import Path\n from poetry.vcs.git import GitConfig\n\n if (Path.cwd() / \"pyproject.toml\").exists():\n self.error(\"A pyproject.toml file already exists.\")\n return 1\n\n vcs_config = GitConfig()\n\n self.line(\n [\n \"\",\n \"This command will guide you through creating your <info>poetry.toml</> config.\",\n \"\",\n ]\n )\n\n name = self.option(\"name\")\n if not name:\n name = Path.cwd().name.lower()\n\n question = self.create_question(\n \"Package name [<comment>{}</comment>]: \".format(name), default=name\n )\n name = self.ask(question)\n\n version = \"0.1.0\"\n question = self.create_question(\n \"Version [<comment>{}</comment>]: \".format(version), default=version\n )\n version = self.ask(question)\n\n description = self.option(\"description\") or \"\"\n question = self.create_question(\n \"Description [<comment>{}</comment>]: \".format(description),\n default=description,\n )\n description = self.ask(question)\n\n author = self.option(\"author\")\n if not author and vcs_config and vcs_config.get(\"user.name\"):\n author = vcs_config[\"user.name\"]\n author_email = vcs_config.get(\"user.email\")\n if author_email:\n author += \" <{}>\".format(author_email)\n\n question = self.create_question(\n \"Author [<comment>{}</comment>, n to skip]: \".format(author), default=author\n )\n question.validator = lambda v: self._validate_author(v, author)\n author = self.ask(question)\n\n if not author:\n authors = []\n else:\n authors = [author]\n\n license = self.option(\"license\") or \"\"\n\n question = self.create_question(\n \"License [<comment>{}</comment>]: \".format(license), default=license\n )\n question.validator = self._validate_license\n license = self.ask(question)\n\n question = self.create_question(\"Compatible Python versions [*]: \", default=\"*\")\n python = self.ask(question)\n\n self.line(\"\")\n\n requirements = {}\n\n question = \"Would you like to define your dependencies\" \" (require) interactively?\"\n if self.confirm(question, True):\n requirements = self._format_requirements(\n self._determine_requirements(self.option(\"dependency\"))\n )\n\n dev_requirements = {}\n\n question = \"Would you like to define your dev dependencies\" \" (require-dev) interactively\"\n if self.confirm(question, True):\n dev_requirements = self._format_requirements(\n self._determine_requirements(self.option(\"dev-dependency\"))\n )\n\n layout_ = layout(\"standard\")(\n name,\n version,\n description=description,\n author=authors[0] if authors else None,\n license=license,\n python=python,\n dependencies=requirements,\n dev_dependencies=dev_requirements,\n )\n\n content = layout_.generate_poetry_content()\n if self.input.is_interactive():\n self.line(\"<info>Generated file</info>\")\n self.line([\"\", content, \"\"])\n\n if not self.confirm(\"Do you confirm generation?\", True):\n self.line(\"<error>Command aborted</error>\")\n\n return 1\n\n with (Path.cwd() / \"pyproject.toml\").open(\"w\") as f:\n f.write(content)\n\n def _determine_requirements(\n self, requires, allow_prereleases=False # type: List[str] # type: bool\n ): # type: (...) -> List[str]\n if not requires:\n requires = []\n\n package = self.ask(\"Search for package:\")\n while package is not None:\n matches = self._get_pool().search(package)\n\n if not matches:\n self.line(\"<error>Unable to find package</error>\")\n package = False\n else:\n choices = []\n\n for found_package in matches:\n choices.append(found_package.pretty_name)\n\n self.line(\n \"Found <info>{}</info> packages matching <info>{}</info>\".format(\n len(matches), package\n )\n )\n\n package = self.choice(\n \"\\nEnter package # to add, or the complete package name if it is not listed\",\n choices,\n attempts=3,\n )\n\n # no constraint yet, determine the best version automatically\n if package is not False and \" \" not in package:\n question = self.create_question(\n \"Enter the version constraint to require \"\n \"(or leave blank to use the latest version):\"\n )\n question.attempts = 3\n question.validator = lambda x: (x or \"\").strip() or False\n\n constraint = self.ask(question)\n\n if constraint is False:\n _, constraint = self._find_best_version_for_package(package)\n\n self.line(\n \"Using version <info>{}</info> for <info>{}</info>\".format(\n constraint, package\n )\n )\n\n package += \" {}\".format(constraint)\n\n if package is not False:\n requires.append(package)\n\n package = self.ask(\"\\nSearch for a package:\")\n\n return requires\n\n requires = self._parse_name_version_pairs(requires)\n result = []\n for requirement in requires:\n if \"version\" not in requirement:\n # determine the best version automatically\n name, version = self._find_best_version_for_package(\n requirement[\"name\"], allow_prereleases=allow_prereleases\n )\n requirement[\"version\"] = version\n requirement[\"name\"] = name\n\n self.line(\n \"Using version <info>{}</> for <info>{}</>\".format(version, name)\n )\n else:\n # check that the specified version/constraint exists\n # before we proceed\n name, _ = self._find_best_version_for_package(\n requirement[\"name\"],\n requirement[\"version\"],\n allow_prereleases=allow_prereleases,\n )\n\n requirement[\"name\"] = name\n\n result.append(\"{} {}\".format(requirement[\"name\"], requirement[\"version\"]))\n\n return result\n\n def _find_best_version_for_package(\n self, name, required_version=None, allow_prereleases=False\n ): # type: (...) -> Tuple[str, str]\n from poetry.version.version_selector import VersionSelector\n\n selector = VersionSelector(self._get_pool())\n package = selector.find_best_candidate(\n name, required_version, allow_prereleases=allow_prereleases\n )\n\n if not package:\n # TODO: find similar\n raise ValueError(\n \"Could not find a matching version of package {}\".format(name)\n )\n\n return (package.pretty_name, selector.find_recommended_require_version(package))\n\n def _parse_name_version_pairs(self, pairs): # type: (list) -> list\n result = []\n\n for i in range(len(pairs)):\n pair = re.sub(\"^([^=: ]+)[=: ](.*)$\", \"\\\\1 \\\\2\", pairs[i].strip())\n pair = pair.strip()\n\n if \" \" in pair:\n name, version = pair.split(\" \", 2)\n result.append({\"name\": name, \"version\": version})\n else:\n result.append({\"name\": pair})\n\n return result\n\n def _format_requirements(self, requirements): # type: (List[str]) -> dict\n requires = {}\n requirements = self._parse_name_version_pairs(requirements)\n for requirement in requirements:\n requires[requirement[\"name\"]] = requirement[\"version\"]\n\n return requires\n\n def _validate_author(self, author, default):\n from poetry.packages.package import AUTHOR_REGEX\n\n author = author or default\n\n if author in [\"n\", \"no\"]:\n return\n\n m = AUTHOR_REGEX.match(author)\n if not m:\n raise ValueError(\n \"Invalid author string. Must be in the format: \"\n \"John Smith <[email protected]>\"\n )\n\n return author\n\n def _validate_license(self, license):\n from poetry.spdx import license_by_id\n\n license_by_id(license)\n\n return license\n\n def _get_pool(self):\n from poetry.repositories import Pool\n from poetry.repositories.pypi_repository import PyPiRepository\n\n if isinstance(self, VenvCommand):\n return self.poetry.pool\n\n if self._pool is None:\n self._pool = Pool()\n self._pool.add_repository(PyPiRepository())\n\n return self._pool\n", "path": "poetry/console/commands/init.py" } ]
[ { "content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nimport re\n\nfrom typing import List\nfrom typing import Tuple\n\nfrom .command import Command\nfrom .venv_command import VenvCommand\n\n\nclass InitCommand(Command):\n \"\"\"\n Creates a basic <comment>pyproject.toml</> file in the current directory.\n\n init\n {--name= : Name of the package}\n {--description= : Description of the package}\n {--author= : Author name of the package}\n {--dependency=* : Package to require with an optional version constraint,\n e.g. requests:^2.10.0 or requests=2.11.1}\n {--dev-dependency=* : Package to require for development with an optional version constraint,\n e.g. requests:^2.10.0 or requests=2.11.1}\n {--l|license= : License of the package}\n \"\"\"\n\n help = \"\"\"\\\nThe <info>init</info> command creates a basic <comment>pyproject.toml</> file in the current directory.\n\"\"\"\n\n def __init__(self):\n super(InitCommand, self).__init__()\n\n self._pool = None\n\n def handle(self):\n from poetry.layouts import layout\n from poetry.utils._compat import Path\n from poetry.vcs.git import GitConfig\n\n if (Path.cwd() / \"pyproject.toml\").exists():\n self.error(\"A pyproject.toml file already exists.\")\n return 1\n\n vcs_config = GitConfig()\n\n self.line(\n [\n \"\",\n \"This command will guide you through creating your <info>poetry.toml</> config.\",\n \"\",\n ]\n )\n\n name = self.option(\"name\")\n if not name:\n name = Path.cwd().name.lower()\n\n question = self.create_question(\n \"Package name [<comment>{}</comment>]: \".format(name), default=name\n )\n name = self.ask(question)\n\n version = \"0.1.0\"\n question = self.create_question(\n \"Version [<comment>{}</comment>]: \".format(version), default=version\n )\n version = self.ask(question)\n\n description = self.option(\"description\") or \"\"\n question = self.create_question(\n \"Description [<comment>{}</comment>]: \".format(description),\n default=description,\n )\n description = self.ask(question)\n\n author = self.option(\"author\")\n if not author and vcs_config and vcs_config.get(\"user.name\"):\n author = vcs_config[\"user.name\"]\n author_email = vcs_config.get(\"user.email\")\n if author_email:\n author += \" <{}>\".format(author_email)\n\n question = self.create_question(\n \"Author [<comment>{}</comment>, n to skip]: \".format(author), default=author\n )\n question.validator = lambda v: self._validate_author(v, author)\n author = self.ask(question)\n\n if not author:\n authors = []\n else:\n authors = [author]\n\n license = self.option(\"license\") or \"\"\n\n question = self.create_question(\n \"License [<comment>{}</comment>]: \".format(license), default=license\n )\n question.validator = self._validate_license\n license = self.ask(question)\n\n question = self.create_question(\"Compatible Python versions [*]: \", default=\"*\")\n python = self.ask(question)\n\n self.line(\"\")\n\n requirements = {}\n\n question = \"Would you like to define your dependencies\" \" (require) interactively?\"\n if self.confirm(question, True):\n requirements = self._format_requirements(\n self._determine_requirements(self.option(\"dependency\"))\n )\n\n dev_requirements = {}\n\n question = \"Would you like to define your dev dependencies\" \" (require-dev) interactively\"\n if self.confirm(question, True):\n dev_requirements = self._format_requirements(\n self._determine_requirements(self.option(\"dev-dependency\"))\n )\n\n layout_ = layout(\"standard\")(\n name,\n version,\n description=description,\n author=authors[0] if authors else None,\n license=license,\n python=python,\n dependencies=requirements,\n dev_dependencies=dev_requirements,\n )\n\n content = layout_.generate_poetry_content()\n if self.input.is_interactive():\n self.line(\"<info>Generated file</info>\")\n self.line([\"\", content, \"\"])\n\n if not self.confirm(\"Do you confirm generation?\", True):\n self.line(\"<error>Command aborted</error>\")\n\n return 1\n\n with (Path.cwd() / \"pyproject.toml\").open(\"w\") as f:\n f.write(content)\n\n def _determine_requirements(\n self, requires, allow_prereleases=False # type: List[str] # type: bool\n ): # type: (...) -> List[str]\n if not requires:\n requires = []\n\n package = self.ask(\"Search for package:\")\n while package is not None:\n matches = self._get_pool().search(package)\n\n if not matches:\n self.line(\"<error>Unable to find package</error>\")\n package = False\n else:\n choices = []\n\n for found_package in matches:\n choices.append(found_package.pretty_name)\n\n self.line(\n \"Found <info>{}</info> packages matching <info>{}</info>\".format(\n len(matches), package\n )\n )\n\n package = self.choice(\n \"\\nEnter package # to add, or the complete package name if it is not listed\",\n choices,\n attempts=3,\n )\n\n # no constraint yet, determine the best version automatically\n if package is not False and \" \" not in package:\n question = self.create_question(\n \"Enter the version constraint to require \"\n \"(or leave blank to use the latest version):\"\n )\n question.attempts = 3\n question.validator = lambda x: (x or \"\").strip() or False\n\n constraint = self.ask(question)\n\n if constraint is False:\n _, constraint = self._find_best_version_for_package(package)\n\n self.line(\n \"Using version <info>{}</info> for <info>{}</info>\".format(\n constraint, package\n )\n )\n\n package += \" {}\".format(constraint)\n\n if package is not False:\n requires.append(package)\n\n package = self.ask(\"\\nSearch for a package:\")\n\n return requires\n\n requires = self._parse_name_version_pairs(requires)\n result = []\n for requirement in requires:\n if \"version\" not in requirement:\n # determine the best version automatically\n name, version = self._find_best_version_for_package(\n requirement[\"name\"], allow_prereleases=allow_prereleases\n )\n requirement[\"version\"] = version\n requirement[\"name\"] = name\n\n self.line(\n \"Using version <info>{}</> for <info>{}</>\".format(version, name)\n )\n else:\n # check that the specified version/constraint exists\n # before we proceed\n name, _ = self._find_best_version_for_package(\n requirement[\"name\"],\n requirement[\"version\"],\n allow_prereleases=allow_prereleases,\n )\n\n requirement[\"name\"] = name\n\n result.append(\"{} {}\".format(requirement[\"name\"], requirement[\"version\"]))\n\n return result\n\n def _find_best_version_for_package(\n self, name, required_version=None, allow_prereleases=False\n ): # type: (...) -> Tuple[str, str]\n from poetry.version.version_selector import VersionSelector\n\n selector = VersionSelector(self._get_pool())\n package = selector.find_best_candidate(\n name, required_version, allow_prereleases=allow_prereleases\n )\n\n if not package:\n # TODO: find similar\n raise ValueError(\n \"Could not find a matching version of package {}\".format(name)\n )\n\n return (package.pretty_name, selector.find_recommended_require_version(package))\n\n def _parse_name_version_pairs(self, pairs): # type: (list) -> list\n result = []\n\n for i in range(len(pairs)):\n pair = re.sub(\"^([^=: ]+)[=: ](.*)$\", \"\\\\1 \\\\2\", pairs[i].strip())\n pair = pair.strip()\n\n if \" \" in pair:\n name, version = pair.split(\" \", 2)\n result.append({\"name\": name, \"version\": version})\n else:\n result.append({\"name\": pair})\n\n return result\n\n def _format_requirements(self, requirements): # type: (List[str]) -> dict\n requires = {}\n requirements = self._parse_name_version_pairs(requirements)\n for requirement in requirements:\n requires[requirement[\"name\"]] = requirement[\"version\"]\n\n return requires\n\n def _validate_author(self, author, default):\n from poetry.packages.package import AUTHOR_REGEX\n\n author = author or default\n\n if author in [\"n\", \"no\"]:\n return\n\n m = AUTHOR_REGEX.match(author)\n if not m:\n raise ValueError(\n \"Invalid author string. Must be in the format: \"\n \"John Smith <[email protected]>\"\n )\n\n return author\n\n def _validate_license(self, license):\n from poetry.spdx import license_by_id\n\n if license:\n license_by_id(license)\n\n return license\n\n def _get_pool(self):\n from poetry.repositories import Pool\n from poetry.repositories.pypi_repository import PyPiRepository\n\n if isinstance(self, VenvCommand):\n return self.poetry.pool\n\n if self._pool is None:\n self._pool = Pool()\n self._pool.add_repository(PyPiRepository())\n\n return self._pool\n", "path": "poetry/console/commands/init.py" } ]
diff --git a/poetry/console/commands/init.py b/poetry/console/commands/init.py index e3550d623b2..5bc12869eef 100644 --- a/poetry/console/commands/init.py +++ b/poetry/console/commands/init.py @@ -296,7 +296,8 @@ def _validate_author(self, author, default): def _validate_license(self, license): from poetry.spdx import license_by_id - license_by_id(license) + if license: + license_by_id(license) return license diff --git a/tests/console/commands/test_init.py b/tests/console/commands/test_init.py index 4bfd25c4bf6..6079d723051 100644 --- a/tests/console/commands/test_init.py +++ b/tests/console/commands/test_init.py @@ -113,3 +113,44 @@ def test_interactive_with_dependencies(app, repo, mocker, poetry): """ assert expected in output + + +def test_empty_license(app, mocker, poetry): + command = app.find("init") + command._pool = poetry.pool + + mocker.patch("poetry.utils._compat.Path.open") + p = mocker.patch("poetry.utils._compat.Path.cwd") + p.return_value = Path(__file__) + + tester = CommandTester(command) + tester.set_inputs( + [ + "my-package", # Package name + "1.2.3", # Version + "", # Description + "n", # Author + "", # License + "", # Python + "n", # Interactive packages + "n", # Interactive dev packages + "\n", # Generate + ] + ) + tester.execute([("command", command.name)]) + + output = tester.get_display(True) + expected = """\ +[tool.poetry] +name = "my-package" +version = "1.2.3" +description = "" +authors = ["Your Name <[email protected]>"] + +[tool.poetry.dependencies] +python = "*" + +[tool.poetry.dev-dependencies] +""" + + assert expected in output
sublimelsp__LSP-920
Empty initializationOptions is not sent # Problem If the `initializationOptions` is an empty dict, it won't be sent to the server. ```js // this is not sent "initializationOptions": {}, ``` Some servers (such as [vscode-css-languageserver](https://github.com/vscode-langservers/vscode-css-languageserver)) need `initializationOptions` to be set even if a empty one or a useless one (as a workaround) such as the following. ```js // this will be sent "initializationOptions": {"just_an_useless_key": 1}, ``` # Reference - https://github.com/sublimelsp/LSP-css/pull/2#discussion_r393881421
[ { "content": "from .logging import debug\nfrom .process import start_server\nfrom .protocol import completion_item_kinds, symbol_kinds, WorkspaceFolder, Request, Notification\nfrom .protocol import TextDocumentSyncKindNone\nfrom .rpc import Client, attach_stdio_client, Response\nfrom .settings import settings as global_settings\nfrom .transports import start_tcp_transport, start_tcp_listener, TCPTransport, Transport\nfrom .types import ClientConfig, Settings\nfrom .typing import Callable, Dict, Any, Optional, List, Tuple, Generator\nfrom contextlib import contextmanager\nimport os\nimport threading\n\n\ndef get_initialize_params(workspace_folders: List[WorkspaceFolder], designated_folder: Optional[WorkspaceFolder],\n config: ClientConfig) -> dict:\n initializeParams = {\n \"processId\": os.getpid(),\n \"rootUri\": designated_folder.uri() if designated_folder else None,\n \"rootPath\": designated_folder.path if designated_folder else None,\n \"workspaceFolders\": [folder.to_lsp() for folder in workspace_folders] if workspace_folders else None,\n \"capabilities\": {\n \"textDocument\": {\n \"synchronization\": {\n \"didSave\": True,\n \"willSave\": True,\n \"willSaveWaitUntil\": True\n },\n \"hover\": {\n \"contentFormat\": [\"markdown\", \"plaintext\"]\n },\n \"completion\": {\n \"completionItem\": {\n \"snippetSupport\": True\n },\n \"completionItemKind\": {\n \"valueSet\": completion_item_kinds\n }\n },\n \"signatureHelp\": {\n \"signatureInformation\": {\n \"documentationFormat\": [\"markdown\", \"plaintext\"],\n \"parameterInformation\": {\n \"labelOffsetSupport\": True\n }\n }\n },\n \"references\": {},\n \"documentHighlight\": {},\n \"documentSymbol\": {\n \"symbolKind\": {\n \"valueSet\": symbol_kinds\n }\n },\n \"formatting\": {},\n \"rangeFormatting\": {},\n \"declaration\": {\"linkSupport\": True},\n \"definition\": {\"linkSupport\": True},\n \"typeDefinition\": {\"linkSupport\": True},\n \"implementation\": {\"linkSupport\": True},\n \"codeAction\": {\n \"codeActionLiteralSupport\": {\n \"codeActionKind\": {\n \"valueSet\": []\n }\n }\n },\n \"rename\": {},\n \"colorProvider\": {},\n \"publishDiagnostics\": {\n \"relatedInformation\": True\n }\n },\n \"workspace\": {\n \"applyEdit\": True,\n \"didChangeConfiguration\": {},\n \"executeCommand\": {},\n \"workspaceFolders\": True,\n \"symbol\": {\n \"symbolKind\": {\n \"valueSet\": symbol_kinds\n }\n },\n \"configuration\": True\n }\n }\n }\n if config.init_options:\n initializeParams['initializationOptions'] = config.init_options\n\n return initializeParams\n\n\ndef diff_folders(old: List[WorkspaceFolder],\n new: List[WorkspaceFolder]) -> Tuple[List[WorkspaceFolder], List[WorkspaceFolder]]:\n added = [] # type: List[WorkspaceFolder]\n removed = [] # type: List[WorkspaceFolder]\n for folder in old:\n if folder not in new:\n removed.append(folder)\n for folder in new:\n if folder not in old:\n added.append(folder)\n return added, removed\n\n\ndef get_dotted_value(current: Any, dotted: str) -> Any:\n keys = dotted.split('.')\n for key in keys:\n if isinstance(current, dict):\n current = current.get(key)\n else:\n return None\n return current\n\n\nclass InitializeError(Exception):\n\n def __init__(self, session: 'Session') -> None:\n super().__init__(\"{} did not respond to the initialize request within {} seconds\".format(\n session.config.name, global_settings.initialize_timeout))\n self.session = session\n\n\nclass Session(object):\n def __init__(self,\n config: ClientConfig,\n workspace_folders: List[WorkspaceFolder],\n designated_folder: Optional[WorkspaceFolder],\n client: Client,\n on_pre_initialize: 'Optional[Callable[[Session], None]]' = None,\n on_post_initialize: 'Optional[Callable[[Session, Optional[Dict[str, Any]]], None]]' = None,\n on_post_exit: Optional[Callable[[str], None]] = None) -> None:\n self.config = config\n self._on_post_initialize = on_post_initialize\n self._on_post_exit = on_post_exit\n self.capabilities = dict() # type: Dict[str, Any]\n self.client = client\n self.ready_lock = threading.Lock()\n self._workspace_folders = workspace_folders\n self.designated_folder = designated_folder\n if on_pre_initialize:\n on_pre_initialize(self)\n self._initialize()\n\n def has_capability(self, capability: str) -> bool:\n return capability in self.capabilities and self.capabilities[capability] is not False\n\n def get_capability(self, capability: str) -> Optional[Any]:\n return self.capabilities.get(capability)\n\n def should_notify_did_open(self) -> bool:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return bool(textsync.get('openClose'))\n if isinstance(textsync, int):\n return textsync > TextDocumentSyncKindNone\n return False\n\n def text_sync_kind(self) -> int:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return int(textsync.get('change', TextDocumentSyncKindNone))\n if isinstance(textsync, int):\n return textsync\n return TextDocumentSyncKindNone\n\n def should_notify_did_change(self) -> bool:\n return self.text_sync_kind() > TextDocumentSyncKindNone\n\n def should_notify_will_save(self) -> bool:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return bool(textsync.get('willSave'))\n if isinstance(textsync, int):\n return textsync > TextDocumentSyncKindNone\n return False\n\n def should_request_will_save_wait_until(self) -> bool:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return bool(textsync.get('willSaveWaitUntil'))\n return False\n\n def should_notify_did_save(self) -> Tuple[bool, bool]:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n options = textsync.get('save')\n return True, bool(options.get('includeText')) if isinstance(options, dict) else False\n if isinstance(textsync, int):\n return textsync > TextDocumentSyncKindNone, False\n return False, False\n\n def should_notify_did_close(self) -> bool:\n return self.should_notify_did_open()\n\n @contextmanager\n def acquire_timeout(self) -> Generator[None, None, None]:\n acquired = self.ready_lock.acquire(True, global_settings.initialize_timeout)\n if not acquired:\n raise InitializeError(self)\n yield\n self.ready_lock.release()\n\n def handles_path(self, file_path: Optional[str]) -> bool:\n if not file_path:\n return False\n with self.acquire_timeout():\n # If we're in a window with no folders, or we're a multi-folder session, then we handle any path.\n if not self._workspace_folders or self._unsafe_supports_workspace_folders():\n return True\n # We're in a window with folders, and we're a single-folder session.\n for folder in self._workspace_folders:\n if file_path.startswith(folder.path):\n return True\n return False\n\n def update_folders(self, folders: List[WorkspaceFolder]) -> None:\n with self.acquire_timeout():\n if self._unsafe_supports_workspace_folders():\n added, removed = diff_folders(self._workspace_folders, folders)\n params = {\n \"event\": {\n \"added\": [a.to_lsp() for a in added],\n \"removed\": [r.to_lsp() for r in removed]\n }\n }\n notification = Notification.didChangeWorkspaceFolders(params)\n self.client.send_notification(notification)\n self._workspace_folders = folders\n\n def _initialize(self) -> None:\n self.ready_lock.acquire() # released in _handle_initialize_result or _handle_initialize_error\n params = get_initialize_params(self._workspace_folders, self.designated_folder, self.config)\n self.client.send_request(\n Request.initialize(params),\n self._handle_initialize_result,\n self._handle_initialize_error)\n\n def _unsafe_supports_workspace_folders(self) -> bool:\n assert self.ready_lock.locked()\n workspace_cap = self.capabilities.get(\"workspace\", {})\n workspace_folder_cap = workspace_cap.get(\"workspaceFolders\", {})\n return workspace_folder_cap.get(\"supported\")\n\n def supports_workspace_folders(self) -> bool:\n with self.acquire_timeout():\n return self._unsafe_supports_workspace_folders()\n\n def on_request(self, method: str, handler: Callable) -> None:\n self.client.on_request(method, handler)\n\n def on_notification(self, method: str, handler: Callable) -> None:\n self.client.on_notification(method, handler)\n\n def _handle_initialize_error(self, error: Any) -> None:\n self.ready_lock.release() # acquired in _initialize\n if self._on_post_initialize:\n self._on_post_initialize(self, error)\n\n def _handle_initialize_result(self, result: Any) -> None:\n self.capabilities.update(result.get('capabilities', dict()))\n\n # only keep supported amount of folders\n if self._workspace_folders:\n if self._unsafe_supports_workspace_folders():\n debug('multi folder session:', self._workspace_folders)\n else:\n assert self.designated_folder # mypy\n self._workspace_folders = [self.designated_folder]\n debug('single folder session:', self._workspace_folders)\n else:\n debug(\"session with no workspace folders\")\n\n self.ready_lock.release() # acquired in _initialize\n\n self.on_request(\"workspace/workspaceFolders\", self._handle_request_workspace_folders)\n self.on_request(\"workspace/configuration\", self._handle_request_workspace_configuration)\n if self.config.settings:\n self.client.send_notification(Notification.didChangeConfiguration({'settings': self.config.settings}))\n\n if self._on_post_initialize:\n self._on_post_initialize(self, None)\n\n def _handle_request_workspace_folders(self, _: Any, request_id: Any) -> None:\n self.client.send_response(Response(request_id, [wf.to_lsp() for wf in self._workspace_folders]))\n\n def _handle_request_workspace_configuration(self, params: Dict[str, Any], request_id: Any) -> None:\n items = [] # type: List[Any]\n requested_items = params.get(\"items\") or []\n for requested_item in requested_items:\n if 'section' in requested_item:\n section = requested_item['section']\n if section:\n items.append(get_dotted_value(self.config.settings, section))\n else:\n items.append(self.config.settings)\n else:\n items.append(self.config.settings)\n self.client.send_response(Response(request_id, items))\n\n def end(self) -> None:\n self.client.send_request(\n Request.shutdown(),\n lambda result: self._handle_shutdown_result(),\n lambda error: self._handle_shutdown_result())\n\n def _handle_shutdown_result(self) -> None:\n self.client.exit()\n self.client = None # type: ignore\n self.capabilities.clear()\n if self._on_post_exit:\n self._on_post_exit(self.config.name)\n\n\ndef create_session(config: ClientConfig,\n workspace_folders: List[WorkspaceFolder],\n designated_folder: Optional[WorkspaceFolder],\n env: dict,\n settings: Settings,\n on_pre_initialize: Optional[Callable[[Session], None]] = None,\n on_post_initialize: Optional[Callable[[Session, Optional[Dict[str, Any]]], None]] = None,\n on_post_exit: Optional[Callable[[str], None]] = None,\n on_stderr_log: Optional[Callable[[str], None]] = None,\n bootstrap_client: Optional[Any] = None) -> Optional[Session]:\n\n def with_client(client: Client) -> Session:\n return Session(\n config=config,\n workspace_folders=workspace_folders,\n designated_folder=designated_folder,\n client=client,\n on_pre_initialize=on_pre_initialize,\n on_post_initialize=on_post_initialize,\n on_post_exit=on_post_exit)\n\n session = None\n if config.binary_args:\n tcp_port = config.tcp_port\n server_args = config.binary_args\n\n if config.tcp_mode == \"host\":\n socket = start_tcp_listener(tcp_port or 0)\n tcp_port = socket.getsockname()[1]\n server_args = list(s.replace(\"{port}\", str(tcp_port)) for s in config.binary_args)\n\n working_dir = workspace_folders[0].path if workspace_folders else None\n process = start_server(server_args, working_dir, env, on_stderr_log)\n if process:\n if config.tcp_mode == \"host\":\n client_socket, address = socket.accept()\n transport = TCPTransport(client_socket) # type: Transport\n session = with_client(Client(transport, settings))\n elif tcp_port:\n transport = start_tcp_transport(tcp_port, config.tcp_host)\n if transport:\n session = with_client(Client(transport, settings))\n else:\n # try to terminate the process\n try:\n process.terminate()\n except Exception:\n pass\n else:\n session = with_client(attach_stdio_client(process, settings))\n else:\n if config.tcp_port:\n transport = start_tcp_transport(config.tcp_port)\n session = with_client(Client(transport, settings))\n elif bootstrap_client:\n session = with_client(bootstrap_client)\n else:\n debug(\"No way to start session\")\n return session\n", "path": "plugin/core/sessions.py" } ]
[ { "content": "from .logging import debug\nfrom .process import start_server\nfrom .protocol import completion_item_kinds, symbol_kinds, WorkspaceFolder, Request, Notification\nfrom .protocol import TextDocumentSyncKindNone\nfrom .rpc import Client, attach_stdio_client, Response\nfrom .settings import settings as global_settings\nfrom .transports import start_tcp_transport, start_tcp_listener, TCPTransport, Transport\nfrom .types import ClientConfig, Settings\nfrom .typing import Callable, Dict, Any, Optional, List, Tuple, Generator\nfrom contextlib import contextmanager\nimport os\nimport threading\n\n\ndef get_initialize_params(workspace_folders: List[WorkspaceFolder], designated_folder: Optional[WorkspaceFolder],\n config: ClientConfig) -> dict:\n initializeParams = {\n \"processId\": os.getpid(),\n \"rootUri\": designated_folder.uri() if designated_folder else None,\n \"rootPath\": designated_folder.path if designated_folder else None,\n \"workspaceFolders\": [folder.to_lsp() for folder in workspace_folders] if workspace_folders else None,\n \"capabilities\": {\n \"textDocument\": {\n \"synchronization\": {\n \"didSave\": True,\n \"willSave\": True,\n \"willSaveWaitUntil\": True\n },\n \"hover\": {\n \"contentFormat\": [\"markdown\", \"plaintext\"]\n },\n \"completion\": {\n \"completionItem\": {\n \"snippetSupport\": True\n },\n \"completionItemKind\": {\n \"valueSet\": completion_item_kinds\n }\n },\n \"signatureHelp\": {\n \"signatureInformation\": {\n \"documentationFormat\": [\"markdown\", \"plaintext\"],\n \"parameterInformation\": {\n \"labelOffsetSupport\": True\n }\n }\n },\n \"references\": {},\n \"documentHighlight\": {},\n \"documentSymbol\": {\n \"symbolKind\": {\n \"valueSet\": symbol_kinds\n }\n },\n \"formatting\": {},\n \"rangeFormatting\": {},\n \"declaration\": {\"linkSupport\": True},\n \"definition\": {\"linkSupport\": True},\n \"typeDefinition\": {\"linkSupport\": True},\n \"implementation\": {\"linkSupport\": True},\n \"codeAction\": {\n \"codeActionLiteralSupport\": {\n \"codeActionKind\": {\n \"valueSet\": []\n }\n }\n },\n \"rename\": {},\n \"colorProvider\": {},\n \"publishDiagnostics\": {\n \"relatedInformation\": True\n }\n },\n \"workspace\": {\n \"applyEdit\": True,\n \"didChangeConfiguration\": {},\n \"executeCommand\": {},\n \"workspaceFolders\": True,\n \"symbol\": {\n \"symbolKind\": {\n \"valueSet\": symbol_kinds\n }\n },\n \"configuration\": True\n }\n }\n }\n if config.init_options is not None:\n initializeParams['initializationOptions'] = config.init_options\n\n return initializeParams\n\n\ndef diff_folders(old: List[WorkspaceFolder],\n new: List[WorkspaceFolder]) -> Tuple[List[WorkspaceFolder], List[WorkspaceFolder]]:\n added = [] # type: List[WorkspaceFolder]\n removed = [] # type: List[WorkspaceFolder]\n for folder in old:\n if folder not in new:\n removed.append(folder)\n for folder in new:\n if folder not in old:\n added.append(folder)\n return added, removed\n\n\ndef get_dotted_value(current: Any, dotted: str) -> Any:\n keys = dotted.split('.')\n for key in keys:\n if isinstance(current, dict):\n current = current.get(key)\n else:\n return None\n return current\n\n\nclass InitializeError(Exception):\n\n def __init__(self, session: 'Session') -> None:\n super().__init__(\"{} did not respond to the initialize request within {} seconds\".format(\n session.config.name, global_settings.initialize_timeout))\n self.session = session\n\n\nclass Session(object):\n def __init__(self,\n config: ClientConfig,\n workspace_folders: List[WorkspaceFolder],\n designated_folder: Optional[WorkspaceFolder],\n client: Client,\n on_pre_initialize: 'Optional[Callable[[Session], None]]' = None,\n on_post_initialize: 'Optional[Callable[[Session, Optional[Dict[str, Any]]], None]]' = None,\n on_post_exit: Optional[Callable[[str], None]] = None) -> None:\n self.config = config\n self._on_post_initialize = on_post_initialize\n self._on_post_exit = on_post_exit\n self.capabilities = dict() # type: Dict[str, Any]\n self.client = client\n self.ready_lock = threading.Lock()\n self._workspace_folders = workspace_folders\n self.designated_folder = designated_folder\n if on_pre_initialize:\n on_pre_initialize(self)\n self._initialize()\n\n def has_capability(self, capability: str) -> bool:\n return capability in self.capabilities and self.capabilities[capability] is not False\n\n def get_capability(self, capability: str) -> Optional[Any]:\n return self.capabilities.get(capability)\n\n def should_notify_did_open(self) -> bool:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return bool(textsync.get('openClose'))\n if isinstance(textsync, int):\n return textsync > TextDocumentSyncKindNone\n return False\n\n def text_sync_kind(self) -> int:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return int(textsync.get('change', TextDocumentSyncKindNone))\n if isinstance(textsync, int):\n return textsync\n return TextDocumentSyncKindNone\n\n def should_notify_did_change(self) -> bool:\n return self.text_sync_kind() > TextDocumentSyncKindNone\n\n def should_notify_will_save(self) -> bool:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return bool(textsync.get('willSave'))\n if isinstance(textsync, int):\n return textsync > TextDocumentSyncKindNone\n return False\n\n def should_request_will_save_wait_until(self) -> bool:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n return bool(textsync.get('willSaveWaitUntil'))\n return False\n\n def should_notify_did_save(self) -> Tuple[bool, bool]:\n textsync = self.capabilities.get('textDocumentSync')\n if isinstance(textsync, dict):\n options = textsync.get('save')\n return True, bool(options.get('includeText')) if isinstance(options, dict) else False\n if isinstance(textsync, int):\n return textsync > TextDocumentSyncKindNone, False\n return False, False\n\n def should_notify_did_close(self) -> bool:\n return self.should_notify_did_open()\n\n @contextmanager\n def acquire_timeout(self) -> Generator[None, None, None]:\n acquired = self.ready_lock.acquire(True, global_settings.initialize_timeout)\n if not acquired:\n raise InitializeError(self)\n yield\n self.ready_lock.release()\n\n def handles_path(self, file_path: Optional[str]) -> bool:\n if not file_path:\n return False\n with self.acquire_timeout():\n # If we're in a window with no folders, or we're a multi-folder session, then we handle any path.\n if not self._workspace_folders or self._unsafe_supports_workspace_folders():\n return True\n # We're in a window with folders, and we're a single-folder session.\n for folder in self._workspace_folders:\n if file_path.startswith(folder.path):\n return True\n return False\n\n def update_folders(self, folders: List[WorkspaceFolder]) -> None:\n with self.acquire_timeout():\n if self._unsafe_supports_workspace_folders():\n added, removed = diff_folders(self._workspace_folders, folders)\n params = {\n \"event\": {\n \"added\": [a.to_lsp() for a in added],\n \"removed\": [r.to_lsp() for r in removed]\n }\n }\n notification = Notification.didChangeWorkspaceFolders(params)\n self.client.send_notification(notification)\n self._workspace_folders = folders\n\n def _initialize(self) -> None:\n self.ready_lock.acquire() # released in _handle_initialize_result or _handle_initialize_error\n params = get_initialize_params(self._workspace_folders, self.designated_folder, self.config)\n self.client.send_request(\n Request.initialize(params),\n self._handle_initialize_result,\n self._handle_initialize_error)\n\n def _unsafe_supports_workspace_folders(self) -> bool:\n assert self.ready_lock.locked()\n workspace_cap = self.capabilities.get(\"workspace\", {})\n workspace_folder_cap = workspace_cap.get(\"workspaceFolders\", {})\n return workspace_folder_cap.get(\"supported\")\n\n def supports_workspace_folders(self) -> bool:\n with self.acquire_timeout():\n return self._unsafe_supports_workspace_folders()\n\n def on_request(self, method: str, handler: Callable) -> None:\n self.client.on_request(method, handler)\n\n def on_notification(self, method: str, handler: Callable) -> None:\n self.client.on_notification(method, handler)\n\n def _handle_initialize_error(self, error: Any) -> None:\n self.ready_lock.release() # acquired in _initialize\n if self._on_post_initialize:\n self._on_post_initialize(self, error)\n\n def _handle_initialize_result(self, result: Any) -> None:\n self.capabilities.update(result.get('capabilities', dict()))\n\n # only keep supported amount of folders\n if self._workspace_folders:\n if self._unsafe_supports_workspace_folders():\n debug('multi folder session:', self._workspace_folders)\n else:\n assert self.designated_folder # mypy\n self._workspace_folders = [self.designated_folder]\n debug('single folder session:', self._workspace_folders)\n else:\n debug(\"session with no workspace folders\")\n\n self.ready_lock.release() # acquired in _initialize\n\n self.on_request(\"workspace/workspaceFolders\", self._handle_request_workspace_folders)\n self.on_request(\"workspace/configuration\", self._handle_request_workspace_configuration)\n if self.config.settings:\n self.client.send_notification(Notification.didChangeConfiguration({'settings': self.config.settings}))\n\n if self._on_post_initialize:\n self._on_post_initialize(self, None)\n\n def _handle_request_workspace_folders(self, _: Any, request_id: Any) -> None:\n self.client.send_response(Response(request_id, [wf.to_lsp() for wf in self._workspace_folders]))\n\n def _handle_request_workspace_configuration(self, params: Dict[str, Any], request_id: Any) -> None:\n items = [] # type: List[Any]\n requested_items = params.get(\"items\") or []\n for requested_item in requested_items:\n if 'section' in requested_item:\n section = requested_item['section']\n if section:\n items.append(get_dotted_value(self.config.settings, section))\n else:\n items.append(self.config.settings)\n else:\n items.append(self.config.settings)\n self.client.send_response(Response(request_id, items))\n\n def end(self) -> None:\n self.client.send_request(\n Request.shutdown(),\n lambda result: self._handle_shutdown_result(),\n lambda error: self._handle_shutdown_result())\n\n def _handle_shutdown_result(self) -> None:\n self.client.exit()\n self.client = None # type: ignore\n self.capabilities.clear()\n if self._on_post_exit:\n self._on_post_exit(self.config.name)\n\n\ndef create_session(config: ClientConfig,\n workspace_folders: List[WorkspaceFolder],\n designated_folder: Optional[WorkspaceFolder],\n env: dict,\n settings: Settings,\n on_pre_initialize: Optional[Callable[[Session], None]] = None,\n on_post_initialize: Optional[Callable[[Session, Optional[Dict[str, Any]]], None]] = None,\n on_post_exit: Optional[Callable[[str], None]] = None,\n on_stderr_log: Optional[Callable[[str], None]] = None,\n bootstrap_client: Optional[Any] = None) -> Optional[Session]:\n\n def with_client(client: Client) -> Session:\n return Session(\n config=config,\n workspace_folders=workspace_folders,\n designated_folder=designated_folder,\n client=client,\n on_pre_initialize=on_pre_initialize,\n on_post_initialize=on_post_initialize,\n on_post_exit=on_post_exit)\n\n session = None\n if config.binary_args:\n tcp_port = config.tcp_port\n server_args = config.binary_args\n\n if config.tcp_mode == \"host\":\n socket = start_tcp_listener(tcp_port or 0)\n tcp_port = socket.getsockname()[1]\n server_args = list(s.replace(\"{port}\", str(tcp_port)) for s in config.binary_args)\n\n working_dir = workspace_folders[0].path if workspace_folders else None\n process = start_server(server_args, working_dir, env, on_stderr_log)\n if process:\n if config.tcp_mode == \"host\":\n client_socket, address = socket.accept()\n transport = TCPTransport(client_socket) # type: Transport\n session = with_client(Client(transport, settings))\n elif tcp_port:\n transport = start_tcp_transport(tcp_port, config.tcp_host)\n if transport:\n session = with_client(Client(transport, settings))\n else:\n # try to terminate the process\n try:\n process.terminate()\n except Exception:\n pass\n else:\n session = with_client(attach_stdio_client(process, settings))\n else:\n if config.tcp_port:\n transport = start_tcp_transport(config.tcp_port)\n session = with_client(Client(transport, settings))\n elif bootstrap_client:\n session = with_client(bootstrap_client)\n else:\n debug(\"No way to start session\")\n return session\n", "path": "plugin/core/sessions.py" } ]
diff --git a/plugin/core/sessions.py b/plugin/core/sessions.py index 89a836dda..fae375fc6 100644 --- a/plugin/core/sessions.py +++ b/plugin/core/sessions.py @@ -85,7 +85,7 @@ def get_initialize_params(workspace_folders: List[WorkspaceFolder], designated_f } } } - if config.init_options: + if config.init_options is not None: initializeParams['initializationOptions'] = config.init_options return initializeParams diff --git a/tests/test_session.py b/tests/test_session.py index a5bfea29f..442b5cc23 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -1,6 +1,6 @@ from LSP.plugin.core.protocol import TextDocumentSyncKindFull, TextDocumentSyncKindNone, TextDocumentSyncKindIncremental from LSP.plugin.core.protocol import WorkspaceFolder -from LSP.plugin.core.sessions import create_session, Session, InitializeError +from LSP.plugin.core.sessions import create_session, Session, InitializeError, get_initialize_params from LSP.plugin.core.settings import settings as global_settings from LSP.plugin.core.types import ClientConfig from LSP.plugin.core.types import Settings @@ -44,6 +44,20 @@ def make_session(self, bootstrap_client, on_pre_initialize=None, on_post_initial on_post_initialize=on_post_initialize, on_post_exit=on_post_exit)) + def test_initialize_params(self) -> None: + wf = WorkspaceFolder.from_path("/foo/bar/baz") + params = get_initialize_params( + [wf], wf, ClientConfig(name="test", binary_args=[""], tcp_port=None, init_options=None)) + self.assertNotIn("initializationOptions", params) + params = get_initialize_params( + [wf], wf, ClientConfig(name="test", binary_args=[""], tcp_port=None, init_options={})) + self.assertIn("initializationOptions", params) + self.assertEqual(params["initializationOptions"], {}) + params = get_initialize_params( + [wf], wf, ClientConfig(name="test", binary_args=[""], tcp_port=None, init_options={"foo": "bar"})) + self.assertIn("initializationOptions", params) + self.assertEqual(params["initializationOptions"], {"foo": "bar"}) + # @unittest.skip("need an example config") def test_can_create_session(self): config = ClientConfig(
openfun__richie-290
Person plugin form list every pages, not only Person pages ## Bug Report **Expected behavior/code** Select box in PersonPlugin form should list only extended page with Person model. **Actual Behavior** Currently the select box is listing every CMS pages. **Steps to Reproduce** 1. Edit a page; 2. Try to add a Person plugin into a placeholder which allow it; 3. Click to open the select box from opened form for added Person plugin. **Environment** - Richie version: 0.1.0 (from my own branch synchronized from master 200c8a3) - Platform: Ubuntu 18.04 LTS **Possible Solution** Adding a filter inside plugin form machinery to retain only the extend page with Person.
[ { "content": "\"\"\"\nDeclare and configure the model for the person application\n\"\"\"\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom cms.api import Page\nfrom cms.extensions import PageExtension\nfrom cms.models.pluginmodel import CMSPlugin\nfrom parler.models import TranslatableModel, TranslatedFields\n\n\nclass PersonTitle(TranslatableModel):\n \"\"\"\n PersonTitle define i18ned list of people titles and there abbreviations\n Instances of this models should only be created by CMS administrators\n \"\"\"\n\n translations = TranslatedFields(\n title=models.CharField(_(\"Title\"), max_length=200),\n abbreviation=models.CharField(_(\"Title abbreviation\"), max_length=10),\n )\n\n class Meta:\n verbose_name = _(\"person title\")\n\n def __str__(self):\n \"\"\"Human representation of a person title\"\"\"\n return \"{model}: {title} ({abbreviation})\".format(\n model=self._meta.verbose_name.title(),\n title=self.title,\n abbreviation=self.abbreviation,\n )\n\n\nclass Person(PageExtension):\n \"\"\"\n The person page extension represents and records people information.\n It could be a course or news article author.\n\n This model should be used to record structured data about the person whereas the\n associated page object is where we record the less structured information to display on the\n page to present the person.\n \"\"\"\n\n first_name = models.CharField(max_length=200, verbose_name=_(\"First name\"))\n last_name = models.CharField(max_length=200, verbose_name=_(\"Last name\"))\n\n person_title = models.ForeignKey(\"PersonTitle\", related_name=\"persons\")\n\n ROOT_REVERSE_ID = \"persons\"\n TEMPLATE_DETAIL = \"persons/cms/person_detail.html\"\n\n class Meta:\n verbose_name = _(\"person\")\n\n def __str__(self):\n \"\"\"Human representation of a person\"\"\"\n return \"{model}: {title} ({full_name})\".format(\n model=self._meta.verbose_name.title(),\n title=self.extended_object.get_title(),\n full_name=self.get_full_name(),\n )\n\n def save(self, *args, **kwargs):\n \"\"\"\n Enforce validation on each instance save\n \"\"\"\n self.full_clean()\n super().save(*args, **kwargs)\n\n def get_full_name(self):\n \"\"\"\n Return person's full name\n \"\"\"\n return \"{person_title} {first_name} {last_name}\".format(\n person_title=self.person_title.title,\n first_name=self.first_name,\n last_name=self.last_name,\n )\n\n\nclass PersonPluginModel(CMSPlugin):\n \"\"\"\n Person plugin model handles the relation from PersonPlugin\n to their Person instance\n \"\"\"\n\n page = models.ForeignKey(Page)\n\n class Meta:\n verbose_name = _(\"person plugin model\")\n\n def __str__(self):\n \"\"\"Human representation of a person plugin\"\"\"\n return \"{model:s}: {id:d}\".format(\n model=self._meta.verbose_name.title(), id=self.id\n )\n", "path": "src/richie/apps/persons/models.py" } ]
[ { "content": "\"\"\"\nDeclare and configure the model for the person application\n\"\"\"\nfrom django.db import models\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom cms.api import Page\nfrom cms.extensions import PageExtension\nfrom cms.models.pluginmodel import CMSPlugin\nfrom parler.models import TranslatableModel, TranslatedFields\n\n\nclass PersonTitle(TranslatableModel):\n \"\"\"\n PersonTitle define i18ned list of people titles and there abbreviations\n Instances of this models should only be created by CMS administrators\n \"\"\"\n\n translations = TranslatedFields(\n title=models.CharField(_(\"Title\"), max_length=200),\n abbreviation=models.CharField(_(\"Title abbreviation\"), max_length=10),\n )\n\n class Meta:\n verbose_name = _(\"person title\")\n\n def __str__(self):\n \"\"\"Human representation of a person title\"\"\"\n return \"{model}: {title} ({abbreviation})\".format(\n model=self._meta.verbose_name.title(),\n title=self.title,\n abbreviation=self.abbreviation,\n )\n\n\nclass Person(PageExtension):\n \"\"\"\n The person page extension represents and records people information.\n It could be a course or news article author.\n\n This model should be used to record structured data about the person whereas the\n associated page object is where we record the less structured information to display on the\n page to present the person.\n \"\"\"\n\n first_name = models.CharField(max_length=200, verbose_name=_(\"First name\"))\n last_name = models.CharField(max_length=200, verbose_name=_(\"Last name\"))\n\n person_title = models.ForeignKey(\"PersonTitle\", related_name=\"persons\")\n\n ROOT_REVERSE_ID = \"persons\"\n TEMPLATE_DETAIL = \"persons/cms/person_detail.html\"\n\n class Meta:\n verbose_name = _(\"person\")\n\n def __str__(self):\n \"\"\"Human representation of a person\"\"\"\n return \"{model}: {title} ({full_name})\".format(\n model=self._meta.verbose_name.title(),\n title=self.extended_object.get_title(),\n full_name=self.get_full_name(),\n )\n\n def save(self, *args, **kwargs):\n \"\"\"\n Enforce validation on each instance save\n \"\"\"\n self.full_clean()\n super().save(*args, **kwargs)\n\n def get_full_name(self):\n \"\"\"\n Return person's full name\n \"\"\"\n return \"{person_title} {first_name} {last_name}\".format(\n person_title=self.person_title.title,\n first_name=self.first_name,\n last_name=self.last_name,\n )\n\n\nclass PersonPluginModel(CMSPlugin):\n \"\"\"\n Person plugin model handles the relation from PersonPlugin\n to their Person instance\n \"\"\"\n\n page = models.ForeignKey(Page, limit_choices_to={\"person__isnull\": False})\n\n class Meta:\n verbose_name = _(\"person plugin model\")\n\n def __str__(self):\n \"\"\"Human representation of a person plugin\"\"\"\n return \"{model:s}: {id:d}\".format(\n model=self._meta.verbose_name.title(), id=self.id\n )\n", "path": "src/richie/apps/persons/models.py" } ]
diff --git a/sandbox/static/css/main.css b/sandbox/static/css/main.css new file mode 100644 index 0000000000..615a3b59b5 --- /dev/null +++ b/sandbox/static/css/main.css @@ -0,0 +1,2593 @@ +/* local */ +/*! + * Bootstrap Reboot v4.1.1 (https://getbootstrap.com/) + * Copyright 2011-2018 The Bootstrap Authors + * Copyright 2011-2018 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE) + * Forked from Normalize.css, licensed MIT (https://github.com/necolas/normalize.css/blob/master/LICENSE.md) + */ +*, +*::before, +*::after { + box-sizing: border-box; } + +html { + font-family: sans-serif; + line-height: 1.15; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; + -ms-overflow-style: scrollbar; + -webkit-tap-highlight-color: rgba(0, 0, 0, 0); } + +@-ms-viewport { + width: device-width; } + +article, aside, figcaption, figure, footer, header, hgroup, main, nav, section { + display: block; } + +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + font-size: 1rem; + font-weight: 400; + line-height: 1.5; + color: #212529; + text-align: left; + background-color: #f8f9fa; } + +[tabindex="-1"]:focus { + outline: 0 !important; } + +hr { + box-sizing: content-box; + height: 0; + overflow: visible; } + +h1, h2, h3, h4, h5, h6 { + margin-top: 0; + margin-bottom: 0.5rem; } + +p { + margin-top: 0; + margin-bottom: 1rem; } + +abbr[title], +abbr[data-original-title] { + text-decoration: underline; + text-decoration: underline dotted; + cursor: help; + border-bottom: 0; } + +address { + margin-bottom: 1rem; + font-style: normal; + line-height: inherit; } + +ol, +ul, +dl { + margin-top: 0; + margin-bottom: 1rem; } + +ol ol, +ul ul, +ol ul, +ul ol { + margin-bottom: 0; } + +dt { + font-weight: 700; } + +dd { + margin-bottom: .5rem; + margin-left: 0; } + +blockquote { + margin: 0 0 1rem; } + +dfn { + font-style: italic; } + +b, +strong { + font-weight: bolder; } + +small { + font-size: 80%; } + +sub, +sup { + position: relative; + font-size: 75%; + line-height: 0; + vertical-align: baseline; } + +sub { + bottom: -.25em; } + +sup { + top: -.5em; } + +a { + color: #007bff; + text-decoration: none; + background-color: transparent; + -webkit-text-decoration-skip: objects; } + a:hover { + color: #0056b3; + text-decoration: underline; } + +a:not([href]):not([tabindex]) { + color: inherit; + text-decoration: none; } + a:not([href]):not([tabindex]):hover, a:not([href]):not([tabindex]):focus { + color: inherit; + text-decoration: none; } + a:not([href]):not([tabindex]):focus { + outline: 0; } + +pre, +code, +kbd, +samp { + font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; + font-size: 1em; } + +pre { + margin-top: 0; + margin-bottom: 1rem; + overflow: auto; + -ms-overflow-style: scrollbar; } + +figure { + margin: 0 0 1rem; } + +img { + vertical-align: middle; + border-style: none; } + +svg:not(:root) { + overflow: hidden; } + +table { + border-collapse: collapse; } + +caption { + padding-top: 0.75rem; + padding-bottom: 0.75rem; + color: #6c757d; + text-align: left; + caption-side: bottom; } + +th { + text-align: inherit; } + +label { + display: inline-block; + margin-bottom: 0.5rem; } + +button { + border-radius: 0; } + +button:focus { + outline: 1px dotted; + outline: 5px auto -webkit-focus-ring-color; } + +input, +button, +select, +optgroup, +textarea { + margin: 0; + font-family: inherit; + font-size: inherit; + line-height: inherit; } + +button, +input { + overflow: visible; } + +button, +select { + text-transform: none; } + +button, +html [type="button"], +[type="reset"], +[type="submit"] { + -webkit-appearance: button; } + +button::-moz-focus-inner, +[type="button"]::-moz-focus-inner, +[type="reset"]::-moz-focus-inner, +[type="submit"]::-moz-focus-inner { + padding: 0; + border-style: none; } + +input[type="radio"], +input[type="checkbox"] { + box-sizing: border-box; + padding: 0; } + +input[type="date"], +input[type="time"], +input[type="datetime-local"], +input[type="month"] { + -webkit-appearance: listbox; } + +textarea { + overflow: auto; + resize: vertical; } + +fieldset { + min-width: 0; + padding: 0; + margin: 0; + border: 0; } + +legend { + display: block; + width: 100%; + max-width: 100%; + padding: 0; + margin-bottom: .5rem; + font-size: 1.5rem; + line-height: inherit; + color: inherit; + white-space: normal; } + +progress { + vertical-align: baseline; } + +[type="number"]::-webkit-inner-spin-button, +[type="number"]::-webkit-outer-spin-button { + height: auto; } + +[type="search"] { + outline-offset: -2px; + -webkit-appearance: none; } + +[type="search"]::-webkit-search-cancel-button, +[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; } + +::-webkit-file-upload-button { + font: inherit; + -webkit-appearance: button; } + +output { + display: inline-block; } + +summary { + display: list-item; + cursor: pointer; } + +template { + display: none; } + +[hidden] { + display: none !important; } + +.badge, .search-filter__count { + display: inline-block; + padding: 0.25em 0.4em; + font-size: 75%; + font-weight: 700; + line-height: 1; + text-align: center; + white-space: nowrap; + vertical-align: baseline; + border-radius: 0.25rem; } + .badge:empty, .search-filter__count:empty { + display: none; } + +.btn .badge, .btn .search-filter__count { + position: relative; + top: -1px; } + +.badge-pill, .search-filter__count { + padding-right: 0.6em; + padding-left: 0.6em; + border-radius: 10rem; } + +.badge-primary, .search-filter__count { + color: #fff; + background-color: #007bff; } + .badge-primary[href]:hover, .search-filter__count[href]:hover, .badge-primary[href]:focus, .search-filter__count[href]:focus { + color: #fff; + text-decoration: none; + background-color: #0062cc; } + +.badge-secondary { + color: #fff; + background-color: #6c757d; } + .badge-secondary[href]:hover, .badge-secondary[href]:focus { + color: #fff; + text-decoration: none; + background-color: #545b62; } + +.badge-success { + color: #fff; + background-color: #28a745; } + .badge-success[href]:hover, .badge-success[href]:focus { + color: #fff; + text-decoration: none; + background-color: #1e7e34; } + +.badge-info { + color: #fff; + background-color: #17a2b8; } + .badge-info[href]:hover, .badge-info[href]:focus { + color: #fff; + text-decoration: none; + background-color: #117a8b; } + +.badge-warning { + color: #212529; + background-color: #ffc107; } + .badge-warning[href]:hover, .badge-warning[href]:focus { + color: #212529; + text-decoration: none; + background-color: #d39e00; } + +.badge-danger { + color: #fff; + background-color: #dc3545; } + .badge-danger[href]:hover, .badge-danger[href]:focus { + color: #fff; + text-decoration: none; + background-color: #bd2130; } + +.badge-light { + color: #212529; + background-color: #f8f9fa; } + .badge-light[href]:hover, .badge-light[href]:focus { + color: #212529; + text-decoration: none; + background-color: #dae0e5; } + +.badge-dark { + color: #fff; + background-color: #343a40; } + .badge-dark[href]:hover, .badge-dark[href]:focus { + color: #fff; + text-decoration: none; + background-color: #1d2124; } + +.card, .course-glimpse { + position: relative; + display: flex; + flex-direction: column; + min-width: 0; + word-wrap: break-word; + background-color: #fff; + background-clip: border-box; + border: 1px solid rgba(0, 0, 0, 0.125); + border-radius: 0.25rem; } + .card > hr, .course-glimpse > hr { + margin-right: 0; + margin-left: 0; } + .card > .list-group:first-child .list-group-item:first-child, .course-glimpse > .list-group:first-child .list-group-item:first-child, .card > .search-filter-group__list:first-child .list-group-item:first-child, .course-glimpse > .search-filter-group__list:first-child .list-group-item:first-child, .card > .list-group:first-child .search-filter:first-child, .course-glimpse > .list-group:first-child .search-filter:first-child, .card > .search-filter-group__list:first-child .search-filter:first-child, .course-glimpse > .search-filter-group__list:first-child .search-filter:first-child { + border-top-left-radius: 0.25rem; + border-top-right-radius: 0.25rem; } + .card > .list-group:last-child .list-group-item:last-child, .course-glimpse > .list-group:last-child .list-group-item:last-child, .card > .search-filter-group__list:last-child .list-group-item:last-child, .course-glimpse > .search-filter-group__list:last-child .list-group-item:last-child, .card > .list-group:last-child .search-filter:last-child, .course-glimpse > .list-group:last-child .search-filter:last-child, .card > .search-filter-group__list:last-child .search-filter:last-child, .course-glimpse > .search-filter-group__list:last-child .search-filter:last-child { + border-bottom-right-radius: 0.25rem; + border-bottom-left-radius: 0.25rem; } + +.card-body, .course-glimpse__body { + flex: 1 1 auto; + padding: 1.25rem; } + +.card-title, .course-glimpse__body__title { + margin-bottom: 0.75rem; } + +.card-subtitle { + margin-top: -0.375rem; + margin-bottom: 0; } + +.card-text:last-child { + margin-bottom: 0; } + +.card-link:hover { + text-decoration: none; } + +.card-link + .card-link { + margin-left: 1.25rem; } + +.card-header { + padding: 0.75rem 1.25rem; + margin-bottom: 0; + background-color: rgba(0, 0, 0, 0.03); + border-bottom: 1px solid rgba(0, 0, 0, 0.125); } + .card-header:first-child { + border-radius: calc(0.25rem - 1px) calc(0.25rem - 1px) 0 0; } + .card-header + .list-group .list-group-item:first-child, .card-header + .search-filter-group__list .list-group-item:first-child, .card-header + .list-group .search-filter:first-child, .card-header + .search-filter-group__list .search-filter:first-child { + border-top: 0; } + +.card-footer, .course-glimpse__date { + padding: 0.75rem 1.25rem; + background-color: rgba(0, 0, 0, 0.03); + border-top: 1px solid rgba(0, 0, 0, 0.125); } + .card-footer:last-child, .course-glimpse__date:last-child { + border-radius: 0 0 calc(0.25rem - 1px) calc(0.25rem - 1px); } + +.card-header-tabs { + margin-right: -0.625rem; + margin-bottom: -0.75rem; + margin-left: -0.625rem; + border-bottom: 0; } + +.card-header-pills { + margin-right: -0.625rem; + margin-left: -0.625rem; } + +.card-img-overlay { + position: absolute; + top: 0; + right: 0; + bottom: 0; + left: 0; + padding: 1.25rem; } + +.card-img { + width: 100%; + border-radius: calc(0.25rem - 1px); } + +.card-img-top, .course-glimpse__image { + width: 100%; + border-top-left-radius: calc(0.25rem - 1px); + border-top-right-radius: calc(0.25rem - 1px); } + +.card-img-bottom { + width: 100%; + border-bottom-right-radius: calc(0.25rem - 1px); + border-bottom-left-radius: calc(0.25rem - 1px); } + +.card-deck { + display: flex; + flex-direction: column; } + .card-deck .card, .card-deck .course-glimpse { + margin-bottom: 15px; } + @media (min-width: 576px) { + .card-deck { + flex-flow: row wrap; + margin-right: -15px; + margin-left: -15px; } + .card-deck .card, .card-deck .course-glimpse { + display: flex; + flex: 1 0 0%; + flex-direction: column; + margin-right: 15px; + margin-bottom: 0; + margin-left: 15px; } } + +.card-group { + display: flex; + flex-direction: column; } + .card-group > .card, .card-group > .course-glimpse { + margin-bottom: 15px; } + @media (min-width: 576px) { + .card-group { + flex-flow: row wrap; } + .card-group > .card, .card-group > .course-glimpse { + flex: 1 0 0%; + margin-bottom: 0; } + .card-group > .card + .card, .card-group > .course-glimpse + .card, .card-group > .card + .course-glimpse, .card-group > .course-glimpse + .course-glimpse { + margin-left: 0; + border-left: 0; } + .card-group > .card:first-child, .card-group > .course-glimpse:first-child { + border-top-right-radius: 0; + border-bottom-right-radius: 0; } + .card-group > .card:first-child .card-img-top, .card-group > .course-glimpse:first-child .card-img-top, .card-group > .card:first-child .course-glimpse__image, .card-group > .course-glimpse:first-child .course-glimpse__image, + .card-group > .card:first-child .card-header, + .card-group > .course-glimpse:first-child .card-header { + border-top-right-radius: 0; } + .card-group > .card:first-child .card-img-bottom, .card-group > .course-glimpse:first-child .card-img-bottom, + .card-group > .card:first-child .card-footer, + .card-group > .course-glimpse:first-child .card-footer, + .card-group > .card:first-child .course-glimpse__date, + .card-group > .course-glimpse:first-child .course-glimpse__date { + border-bottom-right-radius: 0; } + .card-group > .card:last-child, .card-group > .course-glimpse:last-child { + border-top-left-radius: 0; + border-bottom-left-radius: 0; } + .card-group > .card:last-child .card-img-top, .card-group > .course-glimpse:last-child .card-img-top, .card-group > .card:last-child .course-glimpse__image, .card-group > .course-glimpse:last-child .course-glimpse__image, + .card-group > .card:last-child .card-header, + .card-group > .course-glimpse:last-child .card-header { + border-top-left-radius: 0; } + .card-group > .card:last-child .card-img-bottom, .card-group > .course-glimpse:last-child .card-img-bottom, + .card-group > .card:last-child .card-footer, + .card-group > .course-glimpse:last-child .card-footer, + .card-group > .card:last-child .course-glimpse__date, + .card-group > .course-glimpse:last-child .course-glimpse__date { + border-bottom-left-radius: 0; } + .card-group > .card:only-child, .card-group > .course-glimpse:only-child { + border-radius: 0.25rem; } + .card-group > .card:only-child .card-img-top, .card-group > .course-glimpse:only-child .card-img-top, .card-group > .card:only-child .course-glimpse__image, .card-group > .course-glimpse:only-child .course-glimpse__image, + .card-group > .card:only-child .card-header, + .card-group > .course-glimpse:only-child .card-header { + border-top-left-radius: 0.25rem; + border-top-right-radius: 0.25rem; } + .card-group > .card:only-child .card-img-bottom, .card-group > .course-glimpse:only-child .card-img-bottom, + .card-group > .card:only-child .card-footer, + .card-group > .course-glimpse:only-child .card-footer, + .card-group > .card:only-child .course-glimpse__date, + .card-group > .course-glimpse:only-child .course-glimpse__date { + border-bottom-right-radius: 0.25rem; + border-bottom-left-radius: 0.25rem; } + .card-group > .card:not(:first-child):not(:last-child):not(:only-child), .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) { + border-radius: 0; } + .card-group > .card:not(:first-child):not(:last-child):not(:only-child) .card-img-top, .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) .card-img-top, .card-group > .card:not(:first-child):not(:last-child):not(:only-child) .course-glimpse__image, .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) .course-glimpse__image, + .card-group > .card:not(:first-child):not(:last-child):not(:only-child) .card-img-bottom, + .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) .card-img-bottom, + .card-group > .card:not(:first-child):not(:last-child):not(:only-child) .card-header, + .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) .card-header, + .card-group > .card:not(:first-child):not(:last-child):not(:only-child) .card-footer, + .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) .card-footer, + .card-group > .card:not(:first-child):not(:last-child):not(:only-child) .course-glimpse__date, + .card-group > .course-glimpse:not(:first-child):not(:last-child):not(:only-child) .course-glimpse__date { + border-radius: 0; } } + +.card-columns .card, .card-columns .course-glimpse { + margin-bottom: 0.75rem; } + +@media (min-width: 576px) { + .card-columns { + column-count: 3; + column-gap: 1.25rem; + orphans: 1; + widows: 1; } + .card-columns .card, .card-columns .course-glimpse { + display: inline-block; + width: 100%; } } + +.accordion .card:not(:first-of-type):not(:last-of-type), .accordion .course-glimpse:not(:first-of-type):not(:last-of-type) { + border-bottom: 0; + border-radius: 0; } + +.accordion .card:not(:first-of-type) .card-header:first-child, .accordion .course-glimpse:not(:first-of-type) .card-header:first-child { + border-radius: 0; } + +.accordion .card:first-of-type, .accordion .course-glimpse:first-of-type { + border-bottom: 0; + border-bottom-right-radius: 0; + border-bottom-left-radius: 0; } + +.accordion .card:last-of-type, .accordion .course-glimpse:last-of-type { + border-top-left-radius: 0; + border-top-right-radius: 0; } + +.dropup, +.dropright, +.dropdown, +.dropleft { + position: relative; } + +.dropdown-toggle::after { + display: inline-block; + width: 0; + height: 0; + margin-left: 0.255em; + vertical-align: 0.255em; + content: ""; + border-top: 0.3em solid; + border-right: 0.3em solid transparent; + border-bottom: 0; + border-left: 0.3em solid transparent; } + +.dropdown-toggle:empty::after { + margin-left: 0; } + +.dropdown-menu, .react-autosuggest__suggestions-container { + position: absolute; + top: 100%; + left: 0; + z-index: 1000; + display: none; + float: left; + min-width: 10rem; + padding: 0.5rem 0; + margin: 0.125rem 0 0; + font-size: 1rem; + color: #212529; + text-align: left; + list-style: none; + background-color: #fff; + background-clip: padding-box; + border: 1px solid rgba(0, 0, 0, 0.15); + border-radius: 0.25rem; } + +.dropdown-menu-right { + right: 0; + left: auto; } + +.dropup .dropdown-menu, .dropup .react-autosuggest__suggestions-container { + top: auto; + bottom: 100%; + margin-top: 0; + margin-bottom: 0.125rem; } + +.dropup .dropdown-toggle::after { + display: inline-block; + width: 0; + height: 0; + margin-left: 0.255em; + vertical-align: 0.255em; + content: ""; + border-top: 0; + border-right: 0.3em solid transparent; + border-bottom: 0.3em solid; + border-left: 0.3em solid transparent; } + +.dropup .dropdown-toggle:empty::after { + margin-left: 0; } + +.dropright .dropdown-menu, .dropright .react-autosuggest__suggestions-container { + top: 0; + right: auto; + left: 100%; + margin-top: 0; + margin-left: 0.125rem; } + +.dropright .dropdown-toggle::after { + display: inline-block; + width: 0; + height: 0; + margin-left: 0.255em; + vertical-align: 0.255em; + content: ""; + border-top: 0.3em solid transparent; + border-right: 0; + border-bottom: 0.3em solid transparent; + border-left: 0.3em solid; } + +.dropright .dropdown-toggle:empty::after { + margin-left: 0; } + +.dropright .dropdown-toggle::after { + vertical-align: 0; } + +.dropleft .dropdown-menu, .dropleft .react-autosuggest__suggestions-container { + top: 0; + right: 100%; + left: auto; + margin-top: 0; + margin-right: 0.125rem; } + +.dropleft .dropdown-toggle::after { + display: inline-block; + width: 0; + height: 0; + margin-left: 0.255em; + vertical-align: 0.255em; + content: ""; } + +.dropleft .dropdown-toggle::after { + display: none; } + +.dropleft .dropdown-toggle::before { + display: inline-block; + width: 0; + height: 0; + margin-right: 0.255em; + vertical-align: 0.255em; + content: ""; + border-top: 0.3em solid transparent; + border-right: 0.3em solid; + border-bottom: 0.3em solid transparent; } + +.dropleft .dropdown-toggle:empty::after { + margin-left: 0; } + +.dropleft .dropdown-toggle::before { + vertical-align: 0; } + +.dropdown-menu[x-placement^="top"], .react-autosuggest__suggestions-container[x-placement^="top"], .dropdown-menu[x-placement^="right"], .react-autosuggest__suggestions-container[x-placement^="right"], .dropdown-menu[x-placement^="bottom"], .react-autosuggest__suggestions-container[x-placement^="bottom"], .dropdown-menu[x-placement^="left"], .react-autosuggest__suggestions-container[x-placement^="left"] { + right: auto; + bottom: auto; } + +.dropdown-divider { + height: 0; + margin: 0.5rem 0; + overflow: hidden; + border-top: 1px solid #e9ecef; } + +.dropdown-item, .react-autosuggest__suggestion { + display: block; + width: 100%; + padding: 0.25rem 1.5rem; + clear: both; + font-weight: 400; + color: #212529; + text-align: inherit; + white-space: nowrap; + background-color: transparent; + border: 0; } + .dropdown-item:hover, .react-autosuggest__suggestion:hover, .dropdown-item:focus, .react-autosuggest__suggestion:focus { + color: #16181b; + text-decoration: none; + background-color: #f8f9fa; } + .dropdown-item.active, .active.react-autosuggest__suggestion, .react-autosuggest__suggestion.react-autosuggest__suggestion--highlighted, .dropdown-item.react-autosuggest__suggestion--highlighted, .dropdown-item:active, .react-autosuggest__suggestion:active { + color: #fff; + text-decoration: none; + background-color: #007bff; } + .dropdown-item.disabled, .disabled.react-autosuggest__suggestion, .dropdown-item:disabled, .react-autosuggest__suggestion:disabled { + color: #6c757d; + background-color: transparent; } + +.dropdown-menu.show, .show.react-autosuggest__suggestions-container { + display: block; } + +.dropdown-header, .react-autosuggest__section-title { + display: block; + padding: 0.5rem 1.5rem; + margin-bottom: 0; + font-size: 0.875rem; + color: #6c757d; + white-space: nowrap; } + +.dropdown-item-text { + display: block; + padding: 0.25rem 1.5rem; + color: #212529; } + +.form-control, .react-autosuggest__input { + display: block; + width: 100%; + padding: 0.375rem 0.75rem; + font-size: 1rem; + line-height: 1.5; + color: #495057; + background-color: #fff; + background-clip: padding-box; + border: 1px solid #ced4da; + border-radius: 0.25rem; + transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; } + @media screen and (prefers-reduced-motion: reduce) { + .form-control, .react-autosuggest__input { + transition: none; } } + .form-control::-ms-expand, .react-autosuggest__input::-ms-expand { + background-color: transparent; + border: 0; } + .form-control:focus, .react-autosuggest__input:focus { + color: #495057; + background-color: #fff; + border-color: #80bdff; + outline: 0; + box-shadow: 0 0 0 0.2rem rgba(0, 123, 255, 0.25); } + .form-control::placeholder, .react-autosuggest__input::placeholder { + color: #6c757d; + opacity: 1; } + .form-control:disabled, .react-autosuggest__input:disabled, .form-control[readonly], .react-autosuggest__input[readonly] { + background-color: #e9ecef; + opacity: 1; } + +select.form-control:not([size]):not([multiple]), select.react-autosuggest__input:not([size]):not([multiple]) { + height: calc(2.25rem + 2px); } + +select.form-control:focus::-ms-value, select.react-autosuggest__input:focus::-ms-value { + color: #495057; + background-color: #fff; } + +.form-control-file, +.form-control-range { + display: block; + width: 100%; } + +.col-form-label { + padding-top: calc(0.375rem + 1px); + padding-bottom: calc(0.375rem + 1px); + margin-bottom: 0; + font-size: inherit; + line-height: 1.5; } + +.col-form-label-lg { + padding-top: calc(0.5rem + 1px); + padding-bottom: calc(0.5rem + 1px); + font-size: 1.25rem; + line-height: 1.5; } + +.col-form-label-sm { + padding-top: calc(0.25rem + 1px); + padding-bottom: calc(0.25rem + 1px); + font-size: 0.875rem; + line-height: 1.5; } + +.form-control-plaintext { + display: block; + width: 100%; + padding-top: 0.375rem; + padding-bottom: 0.375rem; + margin-bottom: 0; + line-height: 1.5; + color: #212529; + background-color: transparent; + border: solid transparent; + border-width: 1px 0; } + .form-control-plaintext.form-control-sm, .form-control-plaintext.form-control-lg { + padding-right: 0; + padding-left: 0; } + +.form-control-sm { + padding: 0.25rem 0.5rem; + font-size: 0.875rem; + line-height: 1.5; + border-radius: 0.2rem; } + +select.form-control-sm:not([size]):not([multiple]) { + height: calc(1.8125rem + 2px); } + +.form-control-lg { + padding: 0.5rem 1rem; + font-size: 1.25rem; + line-height: 1.5; + border-radius: 0.3rem; } + +select.form-control-lg:not([size]):not([multiple]) { + height: calc(2.875rem + 2px); } + +.form-group { + margin-bottom: 1rem; } + +.form-text { + display: block; + margin-top: 0.25rem; } + +.form-row { + display: flex; + flex-wrap: wrap; + margin-right: -5px; + margin-left: -5px; } + .form-row > .col, + .form-row > [class*="col-"] { + padding-right: 5px; + padding-left: 5px; } + +.form-check { + position: relative; + display: block; + padding-left: 1.25rem; } + +.form-check-input { + position: absolute; + margin-top: 0.3rem; + margin-left: -1.25rem; } + .form-check-input:disabled ~ .form-check-label { + color: #6c757d; } + +.form-check-label { + margin-bottom: 0; } + +.form-check-inline { + display: inline-flex; + align-items: center; + padding-left: 0; + margin-right: 0.75rem; } + .form-check-inline .form-check-input { + position: static; + margin-top: 0; + margin-right: 0.3125rem; + margin-left: 0; } + +.valid-feedback { + display: none; + width: 100%; + margin-top: 0.25rem; + font-size: 80%; + color: #28a745; } + +.valid-tooltip { + position: absolute; + top: 100%; + z-index: 5; + display: none; + max-width: 100%; + padding: .5rem; + margin-top: .1rem; + font-size: .875rem; + line-height: 1; + color: #fff; + background-color: rgba(40, 167, 69, 0.8); + border-radius: .2rem; } + +.was-validated .form-control:valid, .was-validated .react-autosuggest__input:valid, .form-control.is-valid, .is-valid.react-autosuggest__input, .was-validated +.custom-select:valid, +.custom-select.is-valid { + border-color: #28a745; } + .was-validated .form-control:valid:focus, .was-validated .react-autosuggest__input:valid:focus, .form-control.is-valid:focus, .is-valid.react-autosuggest__input:focus, .was-validated + .custom-select:valid:focus, + .custom-select.is-valid:focus { + border-color: #28a745; + box-shadow: 0 0 0 0.2rem rgba(40, 167, 69, 0.25); } + .was-validated .form-control:valid ~ .valid-feedback, .was-validated .react-autosuggest__input:valid ~ .valid-feedback, + .was-validated .form-control:valid ~ .valid-tooltip, + .was-validated .react-autosuggest__input:valid ~ .valid-tooltip, .form-control.is-valid ~ .valid-feedback, .is-valid.react-autosuggest__input ~ .valid-feedback, + .form-control.is-valid ~ .valid-tooltip, + .is-valid.react-autosuggest__input ~ .valid-tooltip, .was-validated + .custom-select:valid ~ .valid-feedback, + .was-validated + .custom-select:valid ~ .valid-tooltip, + .custom-select.is-valid ~ .valid-feedback, + .custom-select.is-valid ~ .valid-tooltip { + display: block; } + +.was-validated .form-control-file:valid ~ .valid-feedback, +.was-validated .form-control-file:valid ~ .valid-tooltip, .form-control-file.is-valid ~ .valid-feedback, +.form-control-file.is-valid ~ .valid-tooltip { + display: block; } + +.was-validated .form-check-input:valid ~ .form-check-label, .form-check-input.is-valid ~ .form-check-label { + color: #28a745; } + +.was-validated .form-check-input:valid ~ .valid-feedback, +.was-validated .form-check-input:valid ~ .valid-tooltip, .form-check-input.is-valid ~ .valid-feedback, +.form-check-input.is-valid ~ .valid-tooltip { + display: block; } + +.was-validated .custom-control-input:valid ~ .custom-control-label, .custom-control-input.is-valid ~ .custom-control-label { + color: #28a745; } + .was-validated .custom-control-input:valid ~ .custom-control-label::before, .custom-control-input.is-valid ~ .custom-control-label::before { + background-color: #71dd8a; } + +.was-validated .custom-control-input:valid ~ .valid-feedback, +.was-validated .custom-control-input:valid ~ .valid-tooltip, .custom-control-input.is-valid ~ .valid-feedback, +.custom-control-input.is-valid ~ .valid-tooltip { + display: block; } + +.was-validated .custom-control-input:valid:checked ~ .custom-control-label::before, .custom-control-input.is-valid:checked ~ .custom-control-label::before { + background-color: #34ce57; } + +.was-validated .custom-control-input:valid:focus ~ .custom-control-label::before, .custom-control-input.is-valid:focus ~ .custom-control-label::before { + box-shadow: 0 0 0 1px #f8f9fa, 0 0 0 0.2rem rgba(40, 167, 69, 0.25); } + +.was-validated .custom-file-input:valid ~ .custom-file-label, .custom-file-input.is-valid ~ .custom-file-label { + border-color: #28a745; } + .was-validated .custom-file-input:valid ~ .custom-file-label::before, .custom-file-input.is-valid ~ .custom-file-label::before { + border-color: inherit; } + +.was-validated .custom-file-input:valid ~ .valid-feedback, +.was-validated .custom-file-input:valid ~ .valid-tooltip, .custom-file-input.is-valid ~ .valid-feedback, +.custom-file-input.is-valid ~ .valid-tooltip { + display: block; } + +.was-validated .custom-file-input:valid:focus ~ .custom-file-label, .custom-file-input.is-valid:focus ~ .custom-file-label { + box-shadow: 0 0 0 0.2rem rgba(40, 167, 69, 0.25); } + +.invalid-feedback { + display: none; + width: 100%; + margin-top: 0.25rem; + font-size: 80%; + color: #dc3545; } + +.invalid-tooltip { + position: absolute; + top: 100%; + z-index: 5; + display: none; + max-width: 100%; + padding: .5rem; + margin-top: .1rem; + font-size: .875rem; + line-height: 1; + color: #fff; + background-color: rgba(220, 53, 69, 0.8); + border-radius: .2rem; } + +.was-validated .form-control:invalid, .was-validated .react-autosuggest__input:invalid, .form-control.is-invalid, .is-invalid.react-autosuggest__input, .was-validated +.custom-select:invalid, +.custom-select.is-invalid { + border-color: #dc3545; } + .was-validated .form-control:invalid:focus, .was-validated .react-autosuggest__input:invalid:focus, .form-control.is-invalid:focus, .is-invalid.react-autosuggest__input:focus, .was-validated + .custom-select:invalid:focus, + .custom-select.is-invalid:focus { + border-color: #dc3545; + box-shadow: 0 0 0 0.2rem rgba(220, 53, 69, 0.25); } + .was-validated .form-control:invalid ~ .invalid-feedback, .was-validated .react-autosuggest__input:invalid ~ .invalid-feedback, + .was-validated .form-control:invalid ~ .invalid-tooltip, + .was-validated .react-autosuggest__input:invalid ~ .invalid-tooltip, .form-control.is-invalid ~ .invalid-feedback, .is-invalid.react-autosuggest__input ~ .invalid-feedback, + .form-control.is-invalid ~ .invalid-tooltip, + .is-invalid.react-autosuggest__input ~ .invalid-tooltip, .was-validated + .custom-select:invalid ~ .invalid-feedback, + .was-validated + .custom-select:invalid ~ .invalid-tooltip, + .custom-select.is-invalid ~ .invalid-feedback, + .custom-select.is-invalid ~ .invalid-tooltip { + display: block; } + +.was-validated .form-control-file:invalid ~ .invalid-feedback, +.was-validated .form-control-file:invalid ~ .invalid-tooltip, .form-control-file.is-invalid ~ .invalid-feedback, +.form-control-file.is-invalid ~ .invalid-tooltip { + display: block; } + +.was-validated .form-check-input:invalid ~ .form-check-label, .form-check-input.is-invalid ~ .form-check-label { + color: #dc3545; } + +.was-validated .form-check-input:invalid ~ .invalid-feedback, +.was-validated .form-check-input:invalid ~ .invalid-tooltip, .form-check-input.is-invalid ~ .invalid-feedback, +.form-check-input.is-invalid ~ .invalid-tooltip { + display: block; } + +.was-validated .custom-control-input:invalid ~ .custom-control-label, .custom-control-input.is-invalid ~ .custom-control-label { + color: #dc3545; } + .was-validated .custom-control-input:invalid ~ .custom-control-label::before, .custom-control-input.is-invalid ~ .custom-control-label::before { + background-color: #efa2a9; } + +.was-validated .custom-control-input:invalid ~ .invalid-feedback, +.was-validated .custom-control-input:invalid ~ .invalid-tooltip, .custom-control-input.is-invalid ~ .invalid-feedback, +.custom-control-input.is-invalid ~ .invalid-tooltip { + display: block; } + +.was-validated .custom-control-input:invalid:checked ~ .custom-control-label::before, .custom-control-input.is-invalid:checked ~ .custom-control-label::before { + background-color: #e4606d; } + +.was-validated .custom-control-input:invalid:focus ~ .custom-control-label::before, .custom-control-input.is-invalid:focus ~ .custom-control-label::before { + box-shadow: 0 0 0 1px #f8f9fa, 0 0 0 0.2rem rgba(220, 53, 69, 0.25); } + +.was-validated .custom-file-input:invalid ~ .custom-file-label, .custom-file-input.is-invalid ~ .custom-file-label { + border-color: #dc3545; } + .was-validated .custom-file-input:invalid ~ .custom-file-label::before, .custom-file-input.is-invalid ~ .custom-file-label::before { + border-color: inherit; } + +.was-validated .custom-file-input:invalid ~ .invalid-feedback, +.was-validated .custom-file-input:invalid ~ .invalid-tooltip, .custom-file-input.is-invalid ~ .invalid-feedback, +.custom-file-input.is-invalid ~ .invalid-tooltip { + display: block; } + +.was-validated .custom-file-input:invalid:focus ~ .custom-file-label, .custom-file-input.is-invalid:focus ~ .custom-file-label { + box-shadow: 0 0 0 0.2rem rgba(220, 53, 69, 0.25); } + +.form-inline { + display: flex; + flex-flow: row wrap; + align-items: center; } + .form-inline .form-check { + width: 100%; } + @media (min-width: 576px) { + .form-inline label { + display: flex; + align-items: center; + justify-content: center; + margin-bottom: 0; } + .form-inline .form-group { + display: flex; + flex: 0 0 auto; + flex-flow: row wrap; + align-items: center; + margin-bottom: 0; } + .form-inline .form-control, .form-inline .react-autosuggest__input { + display: inline-block; + width: auto; + vertical-align: middle; } + .form-inline .form-control-plaintext { + display: inline-block; } + .form-inline .input-group, + .form-inline .custom-select { + width: auto; } + .form-inline .form-check { + display: flex; + align-items: center; + justify-content: center; + width: auto; + padding-left: 0; } + .form-inline .form-check-input { + position: relative; + margin-top: 0; + margin-right: 0.25rem; + margin-left: 0; } + .form-inline .custom-control { + align-items: center; + justify-content: center; } + .form-inline .custom-control-label { + margin-bottom: 0; } } + +.list-group, .search-filter-group__list { + display: flex; + flex-direction: column; + padding-left: 0; + margin-bottom: 0; } + +.list-group-item-action, .search-filter { + width: 100%; + color: #495057; + text-align: inherit; } + .list-group-item-action:hover, .search-filter:hover, .list-group-item-action:focus, .search-filter:focus { + color: #495057; + text-decoration: none; + background-color: #f8f9fa; } + .list-group-item-action:active, .search-filter:active { + color: #212529; + background-color: #e9ecef; } + +.list-group-item, .search-filter { + position: relative; + display: block; + padding: 0.75rem 1.25rem; + margin-bottom: -1px; + background-color: #fff; + border: 1px solid rgba(0, 0, 0, 0.125); } + .list-group-item:first-child, .search-filter:first-child { + border-top-left-radius: 0.25rem; + border-top-right-radius: 0.25rem; } + .list-group-item:last-child, .search-filter:last-child { + margin-bottom: 0; + border-bottom-right-radius: 0.25rem; + border-bottom-left-radius: 0.25rem; } + .list-group-item:hover, .search-filter:hover, .list-group-item:focus, .search-filter:focus { + z-index: 1; + text-decoration: none; } + .list-group-item.disabled, .disabled.search-filter, .list-group-item:disabled, .search-filter:disabled { + color: #6c757d; + background-color: #fff; } + .list-group-item.active, .list-group-item.react-autosuggest__suggestion--highlighted, .react-autosuggest__suggestion--highlighted.search-filter, .active.search-filter { + z-index: 2; + color: #fff; + background-color: #007bff; + border-color: #007bff; } + +.list-group-flush .list-group-item, .search-filter-group__list .list-group-item, .list-group-flush .search-filter, .search-filter-group__list .search-filter { + border-right: 0; + border-left: 0; + border-radius: 0; } + +.list-group-flush:first-child .list-group-item:first-child, .search-filter-group__list:first-child .list-group-item:first-child, .list-group-flush:first-child .search-filter:first-child, .search-filter-group__list:first-child .search-filter:first-child { + border-top: 0; } + +.list-group-flush:last-child .list-group-item:last-child, .search-filter-group__list:last-child .list-group-item:last-child, .list-group-flush:last-child .search-filter:last-child, .search-filter-group__list:last-child .search-filter:last-child { + border-bottom: 0; } + +.list-group-item-primary { + color: #004085; + background-color: #b8daff; } + .list-group-item-primary.list-group-item-action:hover, .list-group-item-primary.search-filter:hover, .list-group-item-primary.list-group-item-action:focus, .list-group-item-primary.search-filter:focus { + color: #004085; + background-color: #9fcdff; } + .list-group-item-primary.list-group-item-action.active, .list-group-item-primary.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-primary.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-primary.active.search-filter { + color: #fff; + background-color: #004085; + border-color: #004085; } + +.list-group-item-secondary { + color: #383d41; + background-color: #d6d8db; } + .list-group-item-secondary.list-group-item-action:hover, .list-group-item-secondary.search-filter:hover, .list-group-item-secondary.list-group-item-action:focus, .list-group-item-secondary.search-filter:focus { + color: #383d41; + background-color: #c8cbcf; } + .list-group-item-secondary.list-group-item-action.active, .list-group-item-secondary.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-secondary.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-secondary.active.search-filter { + color: #fff; + background-color: #383d41; + border-color: #383d41; } + +.list-group-item-success { + color: #155724; + background-color: #c3e6cb; } + .list-group-item-success.list-group-item-action:hover, .list-group-item-success.search-filter:hover, .list-group-item-success.list-group-item-action:focus, .list-group-item-success.search-filter:focus { + color: #155724; + background-color: #b1dfbb; } + .list-group-item-success.list-group-item-action.active, .list-group-item-success.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-success.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-success.active.search-filter { + color: #fff; + background-color: #155724; + border-color: #155724; } + +.list-group-item-info { + color: #0c5460; + background-color: #bee5eb; } + .list-group-item-info.list-group-item-action:hover, .list-group-item-info.search-filter:hover, .list-group-item-info.list-group-item-action:focus, .list-group-item-info.search-filter:focus { + color: #0c5460; + background-color: #abdde5; } + .list-group-item-info.list-group-item-action.active, .list-group-item-info.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-info.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-info.active.search-filter { + color: #fff; + background-color: #0c5460; + border-color: #0c5460; } + +.list-group-item-warning { + color: #856404; + background-color: #ffeeba; } + .list-group-item-warning.list-group-item-action:hover, .list-group-item-warning.search-filter:hover, .list-group-item-warning.list-group-item-action:focus, .list-group-item-warning.search-filter:focus { + color: #856404; + background-color: #ffe8a1; } + .list-group-item-warning.list-group-item-action.active, .list-group-item-warning.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-warning.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-warning.active.search-filter { + color: #fff; + background-color: #856404; + border-color: #856404; } + +.list-group-item-danger { + color: #721c24; + background-color: #f5c6cb; } + .list-group-item-danger.list-group-item-action:hover, .list-group-item-danger.search-filter:hover, .list-group-item-danger.list-group-item-action:focus, .list-group-item-danger.search-filter:focus { + color: #721c24; + background-color: #f1b0b7; } + .list-group-item-danger.list-group-item-action.active, .list-group-item-danger.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-danger.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-danger.active.search-filter { + color: #fff; + background-color: #721c24; + border-color: #721c24; } + +.list-group-item-light { + color: #818182; + background-color: #fdfdfe; } + .list-group-item-light.list-group-item-action:hover, .list-group-item-light.search-filter:hover, .list-group-item-light.list-group-item-action:focus, .list-group-item-light.search-filter:focus { + color: #818182; + background-color: #ececf6; } + .list-group-item-light.list-group-item-action.active, .list-group-item-light.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-light.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-light.active.search-filter { + color: #fff; + background-color: #818182; + border-color: #818182; } + +.list-group-item-dark { + color: #1b1e21; + background-color: #c6c8ca; } + .list-group-item-dark.list-group-item-action:hover, .list-group-item-dark.search-filter:hover, .list-group-item-dark.list-group-item-action:focus, .list-group-item-dark.search-filter:focus { + color: #1b1e21; + background-color: #b9bbbe; } + .list-group-item-dark.list-group-item-action.active, .list-group-item-dark.list-group-item-action.react-autosuggest__suggestion--highlighted, .list-group-item-dark.react-autosuggest__suggestion--highlighted.search-filter, .list-group-item-dark.active.search-filter { + color: #fff; + background-color: #1b1e21; + border-color: #1b1e21; } + +.nav { + display: flex; + flex-wrap: wrap; + padding-left: 0; + margin-bottom: 0; + list-style: none; } + +.nav-link, .main-menu__list__item { + display: block; + padding: 0.5rem 1rem; } + .nav-link:hover, .main-menu__list__item:hover, .nav-link:focus, .main-menu__list__item:focus { + text-decoration: none; } + .nav-link.disabled, .disabled.main-menu__list__item { + color: #6c757d; } + +.nav-tabs { + border-bottom: 1px solid #dee2e6; } + .nav-tabs .nav-item, .nav-tabs .main-menu__list__item { + margin-bottom: -1px; } + .nav-tabs .nav-link, .nav-tabs .main-menu__list__item { + border: 1px solid transparent; + border-top-left-radius: 0.25rem; + border-top-right-radius: 0.25rem; } + .nav-tabs .nav-link:hover, .nav-tabs .main-menu__list__item:hover, .nav-tabs .nav-link:focus, .nav-tabs .main-menu__list__item:focus { + border-color: #e9ecef #e9ecef #dee2e6; } + .nav-tabs .nav-link.disabled, .nav-tabs .disabled.main-menu__list__item { + color: #6c757d; + background-color: transparent; + border-color: transparent; } + .nav-tabs .nav-link.active, .nav-tabs .nav-link.react-autosuggest__suggestion--highlighted, .nav-tabs .react-autosuggest__suggestion--highlighted.main-menu__list__item, .nav-tabs .active.main-menu__list__item, + .nav-tabs .nav-item.show .nav-link, + .nav-tabs .show.main-menu__list__item .nav-link, + .nav-tabs .nav-item.show .main-menu__list__item, + .nav-tabs .show.main-menu__list__item .main-menu__list__item { + color: #495057; + background-color: #fff; + border-color: #dee2e6 #dee2e6 #fff; } + .nav-tabs .dropdown-menu, .nav-tabs .react-autosuggest__suggestions-container { + margin-top: -1px; + border-top-left-radius: 0; + border-top-right-radius: 0; } + +.nav-pills .nav-link, .nav-pills .main-menu__list__item { + border-radius: 0.25rem; } + +.nav-pills .nav-link.active, .nav-pills .nav-link.react-autosuggest__suggestion--highlighted, .nav-pills .react-autosuggest__suggestion--highlighted.main-menu__list__item, .nav-pills .active.main-menu__list__item, +.nav-pills .show > .nav-link, +.nav-pills .show > .main-menu__list__item { + color: #fff; + background-color: #007bff; } + +.nav-fill .nav-item, .nav-fill .main-menu__list__item { + flex: 1 1 auto; + text-align: center; } + +.nav-justified .nav-item, .nav-justified .main-menu__list__item { + flex-basis: 0; + flex-grow: 1; + text-align: center; } + +.tab-content > .tab-pane { + display: none; } + +.tab-content > .active, .tab-content > .react-autosuggest__suggestion--highlighted { + display: block; } + +.navbar, .main-menu { + position: relative; + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: space-between; + padding: 0.5rem 1rem; } + .navbar > .container, .main-menu > .container, + .navbar > .container-fluid, + .main-menu > .container-fluid { + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: space-between; } + +.navbar-brand { + display: inline-block; + padding-top: 0.3125rem; + padding-bottom: 0.3125rem; + margin-right: 1rem; + font-size: 1.25rem; + line-height: inherit; + white-space: nowrap; } + .navbar-brand:hover, .navbar-brand:focus { + text-decoration: none; } + +.navbar-nav, .main-menu__list { + display: flex; + flex-direction: column; + padding-left: 0; + margin-bottom: 0; + list-style: none; } + .navbar-nav .nav-link, .main-menu__list .nav-link, .navbar-nav .main-menu__list__item, .main-menu__list .main-menu__list__item { + padding-right: 0; + padding-left: 0; } + .navbar-nav .dropdown-menu, .main-menu__list .dropdown-menu, .navbar-nav .react-autosuggest__suggestions-container, .main-menu__list .react-autosuggest__suggestions-container { + position: static; + float: none; } + +.navbar-text { + display: inline-block; + padding-top: 0.5rem; + padding-bottom: 0.5rem; } + +.navbar-collapse { + flex-basis: 100%; + flex-grow: 1; + align-items: center; } + +.navbar-toggler { + padding: 0.25rem 0.75rem; + font-size: 1.25rem; + line-height: 1; + background-color: transparent; + border: 1px solid transparent; + border-radius: 0.25rem; } + .navbar-toggler:hover, .navbar-toggler:focus { + text-decoration: none; } + .navbar-toggler:not(:disabled):not(.disabled) { + cursor: pointer; } + +.navbar-toggler-icon { + display: inline-block; + width: 1.5em; + height: 1.5em; + vertical-align: middle; + content: ""; + background: no-repeat center center; + background-size: 100% 100%; } + +@media (max-width: 575.98px) { + .navbar-expand-sm > .container, + .navbar-expand-sm > .container-fluid { + padding-right: 0; + padding-left: 0; } } + +@media (min-width: 576px) { + .navbar-expand-sm { + flex-flow: row nowrap; + justify-content: flex-start; } + .navbar-expand-sm .navbar-nav, .navbar-expand-sm .main-menu__list { + flex-direction: row; } + .navbar-expand-sm .navbar-nav .dropdown-menu, .navbar-expand-sm .main-menu__list .dropdown-menu, .navbar-expand-sm .navbar-nav .react-autosuggest__suggestions-container, .navbar-expand-sm .main-menu__list .react-autosuggest__suggestions-container { + position: absolute; } + .navbar-expand-sm .navbar-nav .nav-link, .navbar-expand-sm .main-menu__list .nav-link, .navbar-expand-sm .navbar-nav .main-menu__list__item, .navbar-expand-sm .main-menu__list .main-menu__list__item { + padding-right: 0.5rem; + padding-left: 0.5rem; } + .navbar-expand-sm > .container, + .navbar-expand-sm > .container-fluid { + flex-wrap: nowrap; } + .navbar-expand-sm .navbar-collapse { + display: flex !important; + flex-basis: auto; } + .navbar-expand-sm .navbar-toggler { + display: none; } } + +@media (max-width: 767.98px) { + .navbar-expand-md > .container, .main-menu > .container, + .navbar-expand-md > .container-fluid, + .main-menu > .container-fluid { + padding-right: 0; + padding-left: 0; } } + +@media (min-width: 768px) { + .navbar-expand-md, .main-menu { + flex-flow: row nowrap; + justify-content: flex-start; } + .navbar-expand-md .navbar-nav, .main-menu .navbar-nav, .navbar-expand-md .main-menu__list, .main-menu .main-menu__list { + flex-direction: row; } + .navbar-expand-md .navbar-nav .dropdown-menu, .main-menu .navbar-nav .dropdown-menu, .navbar-expand-md .main-menu__list .dropdown-menu, .main-menu .main-menu__list .dropdown-menu, .navbar-expand-md .navbar-nav .react-autosuggest__suggestions-container, .main-menu .navbar-nav .react-autosuggest__suggestions-container, .navbar-expand-md .main-menu__list .react-autosuggest__suggestions-container, .main-menu .main-menu__list .react-autosuggest__suggestions-container { + position: absolute; } + .navbar-expand-md .navbar-nav .nav-link, .main-menu .navbar-nav .nav-link, .navbar-expand-md .main-menu__list .nav-link, .main-menu .main-menu__list .nav-link, .navbar-expand-md .navbar-nav .main-menu__list__item, .main-menu .navbar-nav .main-menu__list__item, .navbar-expand-md .main-menu__list .main-menu__list__item, .main-menu .main-menu__list .main-menu__list__item { + padding-right: 0.5rem; + padding-left: 0.5rem; } + .navbar-expand-md > .container, .main-menu > .container, + .navbar-expand-md > .container-fluid, + .main-menu > .container-fluid { + flex-wrap: nowrap; } + .navbar-expand-md .navbar-collapse, .main-menu .navbar-collapse { + display: flex !important; + flex-basis: auto; } + .navbar-expand-md .navbar-toggler, .main-menu .navbar-toggler { + display: none; } } + +@media (max-width: 991.98px) { + .navbar-expand-lg > .container, + .navbar-expand-lg > .container-fluid { + padding-right: 0; + padding-left: 0; } } + +@media (min-width: 992px) { + .navbar-expand-lg { + flex-flow: row nowrap; + justify-content: flex-start; } + .navbar-expand-lg .navbar-nav, .navbar-expand-lg .main-menu__list { + flex-direction: row; } + .navbar-expand-lg .navbar-nav .dropdown-menu, .navbar-expand-lg .main-menu__list .dropdown-menu, .navbar-expand-lg .navbar-nav .react-autosuggest__suggestions-container, .navbar-expand-lg .main-menu__list .react-autosuggest__suggestions-container { + position: absolute; } + .navbar-expand-lg .navbar-nav .nav-link, .navbar-expand-lg .main-menu__list .nav-link, .navbar-expand-lg .navbar-nav .main-menu__list__item, .navbar-expand-lg .main-menu__list .main-menu__list__item { + padding-right: 0.5rem; + padding-left: 0.5rem; } + .navbar-expand-lg > .container, + .navbar-expand-lg > .container-fluid { + flex-wrap: nowrap; } + .navbar-expand-lg .navbar-collapse { + display: flex !important; + flex-basis: auto; } + .navbar-expand-lg .navbar-toggler { + display: none; } } + +@media (max-width: 1199.98px) { + .navbar-expand-xl > .container, + .navbar-expand-xl > .container-fluid { + padding-right: 0; + padding-left: 0; } } + +@media (min-width: 1200px) { + .navbar-expand-xl { + flex-flow: row nowrap; + justify-content: flex-start; } + .navbar-expand-xl .navbar-nav, .navbar-expand-xl .main-menu__list { + flex-direction: row; } + .navbar-expand-xl .navbar-nav .dropdown-menu, .navbar-expand-xl .main-menu__list .dropdown-menu, .navbar-expand-xl .navbar-nav .react-autosuggest__suggestions-container, .navbar-expand-xl .main-menu__list .react-autosuggest__suggestions-container { + position: absolute; } + .navbar-expand-xl .navbar-nav .nav-link, .navbar-expand-xl .main-menu__list .nav-link, .navbar-expand-xl .navbar-nav .main-menu__list__item, .navbar-expand-xl .main-menu__list .main-menu__list__item { + padding-right: 0.5rem; + padding-left: 0.5rem; } + .navbar-expand-xl > .container, + .navbar-expand-xl > .container-fluid { + flex-wrap: nowrap; } + .navbar-expand-xl .navbar-collapse { + display: flex !important; + flex-basis: auto; } + .navbar-expand-xl .navbar-toggler { + display: none; } } + +.navbar-expand { + flex-flow: row nowrap; + justify-content: flex-start; } + .navbar-expand > .container, + .navbar-expand > .container-fluid { + padding-right: 0; + padding-left: 0; } + .navbar-expand .navbar-nav, .navbar-expand .main-menu__list { + flex-direction: row; } + .navbar-expand .navbar-nav .dropdown-menu, .navbar-expand .main-menu__list .dropdown-menu, .navbar-expand .navbar-nav .react-autosuggest__suggestions-container, .navbar-expand .main-menu__list .react-autosuggest__suggestions-container { + position: absolute; } + .navbar-expand .navbar-nav .nav-link, .navbar-expand .main-menu__list .nav-link, .navbar-expand .navbar-nav .main-menu__list__item, .navbar-expand .main-menu__list .main-menu__list__item { + padding-right: 0.5rem; + padding-left: 0.5rem; } + .navbar-expand > .container, + .navbar-expand > .container-fluid { + flex-wrap: nowrap; } + .navbar-expand .navbar-collapse { + display: flex !important; + flex-basis: auto; } + .navbar-expand .navbar-toggler { + display: none; } + +.navbar-light .navbar-brand { + color: rgba(0, 0, 0, 0.9); } + .navbar-light .navbar-brand:hover, .navbar-light .navbar-brand:focus { + color: rgba(0, 0, 0, 0.9); } + +.navbar-light .navbar-nav .nav-link, .navbar-light .main-menu__list .nav-link, .navbar-light .navbar-nav .main-menu__list__item, .navbar-light .main-menu__list .main-menu__list__item { + color: rgba(0, 0, 0, 0.5); } + .navbar-light .navbar-nav .nav-link:hover, .navbar-light .main-menu__list .nav-link:hover, .navbar-light .navbar-nav .main-menu__list__item:hover, .navbar-light .main-menu__list .main-menu__list__item:hover, .navbar-light .navbar-nav .nav-link:focus, .navbar-light .main-menu__list .nav-link:focus, .navbar-light .navbar-nav .main-menu__list__item:focus, .navbar-light .main-menu__list .main-menu__list__item:focus { + color: rgba(0, 0, 0, 0.7); } + .navbar-light .navbar-nav .nav-link.disabled, .navbar-light .main-menu__list .nav-link.disabled, .navbar-light .navbar-nav .disabled.main-menu__list__item, .navbar-light .main-menu__list .disabled.main-menu__list__item { + color: rgba(0, 0, 0, 0.3); } + +.navbar-light .navbar-nav .show > .nav-link, .navbar-light .main-menu__list .show > .nav-link, .navbar-light .navbar-nav .show > .main-menu__list__item, .navbar-light .main-menu__list .show > .main-menu__list__item, +.navbar-light .navbar-nav .active > .nav-link, +.navbar-light .main-menu__list .active > .nav-link, +.navbar-light .navbar-nav .react-autosuggest__suggestion--highlighted > .nav-link, +.navbar-light .main-menu__list .react-autosuggest__suggestion--highlighted > .nav-link, +.navbar-light .navbar-nav .active > .main-menu__list__item, +.navbar-light .main-menu__list .active > .main-menu__list__item, +.navbar-light .navbar-nav .react-autosuggest__suggestion--highlighted > .main-menu__list__item, +.navbar-light .main-menu__list .react-autosuggest__suggestion--highlighted > .main-menu__list__item, +.navbar-light .navbar-nav .nav-link.show, +.navbar-light .main-menu__list .nav-link.show, +.navbar-light .navbar-nav .show.main-menu__list__item, +.navbar-light .main-menu__list .show.main-menu__list__item, +.navbar-light .navbar-nav .nav-link.active, +.navbar-light .main-menu__list .nav-link.active, +.navbar-light .navbar-nav .nav-link.react-autosuggest__suggestion--highlighted, +.navbar-light .main-menu__list .nav-link.react-autosuggest__suggestion--highlighted, +.navbar-light .navbar-nav .react-autosuggest__suggestion--highlighted.main-menu__list__item, +.navbar-light .main-menu__list .react-autosuggest__suggestion--highlighted.main-menu__list__item, +.navbar-light .navbar-nav .active.main-menu__list__item, +.navbar-light .main-menu__list .active.main-menu__list__item { + color: rgba(0, 0, 0, 0.9); } + +.navbar-light .navbar-toggler { + color: rgba(0, 0, 0, 0.5); + border-color: rgba(0, 0, 0, 0.1); } + +.navbar-light .navbar-toggler-icon { + background-image: url("data:image/svg+xml;charset=utf8,%3Csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath stroke='rgba(0, 0, 0, 0.5)' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3E%3C/svg%3E"); } + +.navbar-light .navbar-text { + color: rgba(0, 0, 0, 0.5); } + .navbar-light .navbar-text a { + color: rgba(0, 0, 0, 0.9); } + .navbar-light .navbar-text a:hover, .navbar-light .navbar-text a:focus { + color: rgba(0, 0, 0, 0.9); } + +.navbar-dark .navbar-brand { + color: #fff; } + .navbar-dark .navbar-brand:hover, .navbar-dark .navbar-brand:focus { + color: #fff; } + +.navbar-dark .navbar-nav .nav-link, .navbar-dark .main-menu__list .nav-link, .navbar-dark .navbar-nav .main-menu__list__item, .navbar-dark .main-menu__list .main-menu__list__item { + color: rgba(255, 255, 255, 0.5); } + .navbar-dark .navbar-nav .nav-link:hover, .navbar-dark .main-menu__list .nav-link:hover, .navbar-dark .navbar-nav .main-menu__list__item:hover, .navbar-dark .main-menu__list .main-menu__list__item:hover, .navbar-dark .navbar-nav .nav-link:focus, .navbar-dark .main-menu__list .nav-link:focus, .navbar-dark .navbar-nav .main-menu__list__item:focus, .navbar-dark .main-menu__list .main-menu__list__item:focus { + color: rgba(255, 255, 255, 0.75); } + .navbar-dark .navbar-nav .nav-link.disabled, .navbar-dark .main-menu__list .nav-link.disabled, .navbar-dark .navbar-nav .disabled.main-menu__list__item, .navbar-dark .main-menu__list .disabled.main-menu__list__item { + color: rgba(255, 255, 255, 0.25); } + +.navbar-dark .navbar-nav .show > .nav-link, .navbar-dark .main-menu__list .show > .nav-link, .navbar-dark .navbar-nav .show > .main-menu__list__item, .navbar-dark .main-menu__list .show > .main-menu__list__item, +.navbar-dark .navbar-nav .active > .nav-link, +.navbar-dark .main-menu__list .active > .nav-link, +.navbar-dark .navbar-nav .react-autosuggest__suggestion--highlighted > .nav-link, +.navbar-dark .main-menu__list .react-autosuggest__suggestion--highlighted > .nav-link, +.navbar-dark .navbar-nav .active > .main-menu__list__item, +.navbar-dark .main-menu__list .active > .main-menu__list__item, +.navbar-dark .navbar-nav .react-autosuggest__suggestion--highlighted > .main-menu__list__item, +.navbar-dark .main-menu__list .react-autosuggest__suggestion--highlighted > .main-menu__list__item, +.navbar-dark .navbar-nav .nav-link.show, +.navbar-dark .main-menu__list .nav-link.show, +.navbar-dark .navbar-nav .show.main-menu__list__item, +.navbar-dark .main-menu__list .show.main-menu__list__item, +.navbar-dark .navbar-nav .nav-link.active, +.navbar-dark .main-menu__list .nav-link.active, +.navbar-dark .navbar-nav .nav-link.react-autosuggest__suggestion--highlighted, +.navbar-dark .main-menu__list .nav-link.react-autosuggest__suggestion--highlighted, +.navbar-dark .navbar-nav .react-autosuggest__suggestion--highlighted.main-menu__list__item, +.navbar-dark .main-menu__list .react-autosuggest__suggestion--highlighted.main-menu__list__item, +.navbar-dark .navbar-nav .active.main-menu__list__item, +.navbar-dark .main-menu__list .active.main-menu__list__item { + color: #fff; } + +.navbar-dark .navbar-toggler { + color: rgba(255, 255, 255, 0.5); + border-color: rgba(255, 255, 255, 0.1); } + +.navbar-dark .navbar-toggler-icon { + background-image: url("data:image/svg+xml;charset=utf8,%3Csvg viewBox='0 0 30 30' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath stroke='rgba(255, 255, 255, 0.5)' stroke-width='2' stroke-linecap='round' stroke-miterlimit='10' d='M4 7h22M4 15h22M4 23h22'/%3E%3C/svg%3E"); } + +.navbar-dark .navbar-text { + color: rgba(255, 255, 255, 0.5); } + .navbar-dark .navbar-text a { + color: #fff; } + .navbar-dark .navbar-text a:hover, .navbar-dark .navbar-text a:focus { + color: #fff; } + +h1, h2, h3, h4, h5, h6, +.h1, .h2, .h3, .h4, .h5, .h6 { + margin-bottom: 0.5rem; + font-family: inherit; + font-weight: 500; + line-height: 1.2; + color: inherit; } + +h1, .h1 { + font-size: 2.5rem; } + +h2, .h2 { + font-size: 2rem; } + +h3, .h3 { + font-size: 1.75rem; } + +h4, .h4 { + font-size: 1.5rem; } + +h5, .h5 { + font-size: 1.25rem; } + +h6, .h6 { + font-size: 1rem; } + +.lead { + font-size: 1.25rem; + font-weight: 300; } + +.display-1 { + font-size: 6rem; + font-weight: 300; + line-height: 1.2; } + +.display-2 { + font-size: 5.5rem; + font-weight: 300; + line-height: 1.2; } + +.display-3 { + font-size: 4.5rem; + font-weight: 300; + line-height: 1.2; } + +.display-4 { + font-size: 3.5rem; + font-weight: 300; + line-height: 1.2; } + +hr { + margin-top: 1rem; + margin-bottom: 1rem; + border: 0; + border-top: 1px solid rgba(0, 0, 0, 0.1); } + +small, +.small { + font-size: 80%; + font-weight: 400; } + +mark, +.mark { + padding: 0.2em; + background-color: #fcf8e3; } + +.list-unstyled { + padding-left: 0; + list-style: none; } + +.list-inline { + padding-left: 0; + list-style: none; } + +.list-inline-item { + display: inline-block; } + .list-inline-item:not(:last-child) { + margin-right: 0.5rem; } + +.initialism { + font-size: 90%; + text-transform: uppercase; } + +.blockquote { + margin-bottom: 1rem; + font-size: 1.25rem; } + +.blockquote-footer { + display: block; + font-size: 80%; + color: #6c757d; } + .blockquote-footer::before { + content: "\2014 \00A0"; } + +.d-none { + display: none !important; } + +.d-inline { + display: inline !important; } + +.d-inline-block { + display: inline-block !important; } + +.d-block { + display: block !important; } + +.d-table { + display: table !important; } + +.d-table-row { + display: table-row !important; } + +.d-table-cell { + display: table-cell !important; } + +.d-flex, .search-filter { + display: flex !important; } + +.d-inline-flex { + display: inline-flex !important; } + +@media (min-width: 576px) { + .d-sm-none { + display: none !important; } + .d-sm-inline { + display: inline !important; } + .d-sm-inline-block { + display: inline-block !important; } + .d-sm-block { + display: block !important; } + .d-sm-table { + display: table !important; } + .d-sm-table-row { + display: table-row !important; } + .d-sm-table-cell { + display: table-cell !important; } + .d-sm-flex { + display: flex !important; } + .d-sm-inline-flex { + display: inline-flex !important; } } + +@media (min-width: 768px) { + .d-md-none { + display: none !important; } + .d-md-inline { + display: inline !important; } + .d-md-inline-block { + display: inline-block !important; } + .d-md-block { + display: block !important; } + .d-md-table { + display: table !important; } + .d-md-table-row { + display: table-row !important; } + .d-md-table-cell { + display: table-cell !important; } + .d-md-flex { + display: flex !important; } + .d-md-inline-flex { + display: inline-flex !important; } } + +@media (min-width: 992px) { + .d-lg-none { + display: none !important; } + .d-lg-inline { + display: inline !important; } + .d-lg-inline-block { + display: inline-block !important; } + .d-lg-block { + display: block !important; } + .d-lg-table { + display: table !important; } + .d-lg-table-row { + display: table-row !important; } + .d-lg-table-cell { + display: table-cell !important; } + .d-lg-flex { + display: flex !important; } + .d-lg-inline-flex { + display: inline-flex !important; } } + +@media (min-width: 1200px) { + .d-xl-none { + display: none !important; } + .d-xl-inline { + display: inline !important; } + .d-xl-inline-block { + display: inline-block !important; } + .d-xl-block { + display: block !important; } + .d-xl-table { + display: table !important; } + .d-xl-table-row { + display: table-row !important; } + .d-xl-table-cell { + display: table-cell !important; } + .d-xl-flex { + display: flex !important; } + .d-xl-inline-flex { + display: inline-flex !important; } } + +@media print { + .d-print-none { + display: none !important; } + .d-print-inline { + display: inline !important; } + .d-print-inline-block { + display: inline-block !important; } + .d-print-block { + display: block !important; } + .d-print-table { + display: table !important; } + .d-print-table-row { + display: table-row !important; } + .d-print-table-cell { + display: table-cell !important; } + .d-print-flex { + display: flex !important; } + .d-print-inline-flex { + display: inline-flex !important; } } + +.flex-row { + flex-direction: row !important; } + +.flex-column { + flex-direction: column !important; } + +.flex-row-reverse { + flex-direction: row-reverse !important; } + +.flex-column-reverse { + flex-direction: column-reverse !important; } + +.flex-wrap { + flex-wrap: wrap !important; } + +.flex-nowrap { + flex-wrap: nowrap !important; } + +.flex-wrap-reverse { + flex-wrap: wrap-reverse !important; } + +.flex-fill { + flex: 1 1 auto !important; } + +.flex-grow-0 { + flex-grow: 0 !important; } + +.flex-grow-1 { + flex-grow: 1 !important; } + +.flex-shrink-0 { + flex-shrink: 0 !important; } + +.flex-shrink-1 { + flex-shrink: 1 !important; } + +.justify-content-start { + justify-content: flex-start !important; } + +.justify-content-end { + justify-content: flex-end !important; } + +.justify-content-center { + justify-content: center !important; } + +.justify-content-between, .search-filter { + justify-content: space-between !important; } + +.justify-content-around { + justify-content: space-around !important; } + +.align-items-start { + align-items: flex-start !important; } + +.align-items-end { + align-items: flex-end !important; } + +.align-items-center, .search-filter { + align-items: center !important; } + +.align-items-baseline { + align-items: baseline !important; } + +.align-items-stretch { + align-items: stretch !important; } + +.align-content-start { + align-content: flex-start !important; } + +.align-content-end { + align-content: flex-end !important; } + +.align-content-center { + align-content: center !important; } + +.align-content-between { + align-content: space-between !important; } + +.align-content-around { + align-content: space-around !important; } + +.align-content-stretch { + align-content: stretch !important; } + +.align-self-auto { + align-self: auto !important; } + +.align-self-start { + align-self: flex-start !important; } + +.align-self-end { + align-self: flex-end !important; } + +.align-self-center { + align-self: center !important; } + +.align-self-baseline { + align-self: baseline !important; } + +.align-self-stretch { + align-self: stretch !important; } + +@media (min-width: 576px) { + .flex-sm-row { + flex-direction: row !important; } + .flex-sm-column { + flex-direction: column !important; } + .flex-sm-row-reverse { + flex-direction: row-reverse !important; } + .flex-sm-column-reverse { + flex-direction: column-reverse !important; } + .flex-sm-wrap { + flex-wrap: wrap !important; } + .flex-sm-nowrap { + flex-wrap: nowrap !important; } + .flex-sm-wrap-reverse { + flex-wrap: wrap-reverse !important; } + .flex-sm-fill { + flex: 1 1 auto !important; } + .flex-sm-grow-0 { + flex-grow: 0 !important; } + .flex-sm-grow-1 { + flex-grow: 1 !important; } + .flex-sm-shrink-0 { + flex-shrink: 0 !important; } + .flex-sm-shrink-1 { + flex-shrink: 1 !important; } + .justify-content-sm-start { + justify-content: flex-start !important; } + .justify-content-sm-end { + justify-content: flex-end !important; } + .justify-content-sm-center { + justify-content: center !important; } + .justify-content-sm-between { + justify-content: space-between !important; } + .justify-content-sm-around { + justify-content: space-around !important; } + .align-items-sm-start { + align-items: flex-start !important; } + .align-items-sm-end { + align-items: flex-end !important; } + .align-items-sm-center { + align-items: center !important; } + .align-items-sm-baseline { + align-items: baseline !important; } + .align-items-sm-stretch { + align-items: stretch !important; } + .align-content-sm-start { + align-content: flex-start !important; } + .align-content-sm-end { + align-content: flex-end !important; } + .align-content-sm-center { + align-content: center !important; } + .align-content-sm-between { + align-content: space-between !important; } + .align-content-sm-around { + align-content: space-around !important; } + .align-content-sm-stretch { + align-content: stretch !important; } + .align-self-sm-auto { + align-self: auto !important; } + .align-self-sm-start { + align-self: flex-start !important; } + .align-self-sm-end { + align-self: flex-end !important; } + .align-self-sm-center { + align-self: center !important; } + .align-self-sm-baseline { + align-self: baseline !important; } + .align-self-sm-stretch { + align-self: stretch !important; } } + +@media (min-width: 768px) { + .flex-md-row { + flex-direction: row !important; } + .flex-md-column { + flex-direction: column !important; } + .flex-md-row-reverse { + flex-direction: row-reverse !important; } + .flex-md-column-reverse { + flex-direction: column-reverse !important; } + .flex-md-wrap { + flex-wrap: wrap !important; } + .flex-md-nowrap { + flex-wrap: nowrap !important; } + .flex-md-wrap-reverse { + flex-wrap: wrap-reverse !important; } + .flex-md-fill { + flex: 1 1 auto !important; } + .flex-md-grow-0 { + flex-grow: 0 !important; } + .flex-md-grow-1 { + flex-grow: 1 !important; } + .flex-md-shrink-0 { + flex-shrink: 0 !important; } + .flex-md-shrink-1 { + flex-shrink: 1 !important; } + .justify-content-md-start { + justify-content: flex-start !important; } + .justify-content-md-end { + justify-content: flex-end !important; } + .justify-content-md-center { + justify-content: center !important; } + .justify-content-md-between { + justify-content: space-between !important; } + .justify-content-md-around { + justify-content: space-around !important; } + .align-items-md-start { + align-items: flex-start !important; } + .align-items-md-end { + align-items: flex-end !important; } + .align-items-md-center { + align-items: center !important; } + .align-items-md-baseline { + align-items: baseline !important; } + .align-items-md-stretch { + align-items: stretch !important; } + .align-content-md-start { + align-content: flex-start !important; } + .align-content-md-end { + align-content: flex-end !important; } + .align-content-md-center { + align-content: center !important; } + .align-content-md-between { + align-content: space-between !important; } + .align-content-md-around { + align-content: space-around !important; } + .align-content-md-stretch { + align-content: stretch !important; } + .align-self-md-auto { + align-self: auto !important; } + .align-self-md-start { + align-self: flex-start !important; } + .align-self-md-end { + align-self: flex-end !important; } + .align-self-md-center { + align-self: center !important; } + .align-self-md-baseline { + align-self: baseline !important; } + .align-self-md-stretch { + align-self: stretch !important; } } + +@media (min-width: 992px) { + .flex-lg-row { + flex-direction: row !important; } + .flex-lg-column { + flex-direction: column !important; } + .flex-lg-row-reverse { + flex-direction: row-reverse !important; } + .flex-lg-column-reverse { + flex-direction: column-reverse !important; } + .flex-lg-wrap { + flex-wrap: wrap !important; } + .flex-lg-nowrap { + flex-wrap: nowrap !important; } + .flex-lg-wrap-reverse { + flex-wrap: wrap-reverse !important; } + .flex-lg-fill { + flex: 1 1 auto !important; } + .flex-lg-grow-0 { + flex-grow: 0 !important; } + .flex-lg-grow-1 { + flex-grow: 1 !important; } + .flex-lg-shrink-0 { + flex-shrink: 0 !important; } + .flex-lg-shrink-1 { + flex-shrink: 1 !important; } + .justify-content-lg-start { + justify-content: flex-start !important; } + .justify-content-lg-end { + justify-content: flex-end !important; } + .justify-content-lg-center { + justify-content: center !important; } + .justify-content-lg-between { + justify-content: space-between !important; } + .justify-content-lg-around { + justify-content: space-around !important; } + .align-items-lg-start { + align-items: flex-start !important; } + .align-items-lg-end { + align-items: flex-end !important; } + .align-items-lg-center { + align-items: center !important; } + .align-items-lg-baseline { + align-items: baseline !important; } + .align-items-lg-stretch { + align-items: stretch !important; } + .align-content-lg-start { + align-content: flex-start !important; } + .align-content-lg-end { + align-content: flex-end !important; } + .align-content-lg-center { + align-content: center !important; } + .align-content-lg-between { + align-content: space-between !important; } + .align-content-lg-around { + align-content: space-around !important; } + .align-content-lg-stretch { + align-content: stretch !important; } + .align-self-lg-auto { + align-self: auto !important; } + .align-self-lg-start { + align-self: flex-start !important; } + .align-self-lg-end { + align-self: flex-end !important; } + .align-self-lg-center { + align-self: center !important; } + .align-self-lg-baseline { + align-self: baseline !important; } + .align-self-lg-stretch { + align-self: stretch !important; } } + +@media (min-width: 1200px) { + .flex-xl-row { + flex-direction: row !important; } + .flex-xl-column { + flex-direction: column !important; } + .flex-xl-row-reverse { + flex-direction: row-reverse !important; } + .flex-xl-column-reverse { + flex-direction: column-reverse !important; } + .flex-xl-wrap { + flex-wrap: wrap !important; } + .flex-xl-nowrap { + flex-wrap: nowrap !important; } + .flex-xl-wrap-reverse { + flex-wrap: wrap-reverse !important; } + .flex-xl-fill { + flex: 1 1 auto !important; } + .flex-xl-grow-0 { + flex-grow: 0 !important; } + .flex-xl-grow-1 { + flex-grow: 1 !important; } + .flex-xl-shrink-0 { + flex-shrink: 0 !important; } + .flex-xl-shrink-1 { + flex-shrink: 1 !important; } + .justify-content-xl-start { + justify-content: flex-start !important; } + .justify-content-xl-end { + justify-content: flex-end !important; } + .justify-content-xl-center { + justify-content: center !important; } + .justify-content-xl-between { + justify-content: space-between !important; } + .justify-content-xl-around { + justify-content: space-around !important; } + .align-items-xl-start { + align-items: flex-start !important; } + .align-items-xl-end { + align-items: flex-end !important; } + .align-items-xl-center { + align-items: center !important; } + .align-items-xl-baseline { + align-items: baseline !important; } + .align-items-xl-stretch { + align-items: stretch !important; } + .align-content-xl-start { + align-content: flex-start !important; } + .align-content-xl-end { + align-content: flex-end !important; } + .align-content-xl-center { + align-content: center !important; } + .align-content-xl-between { + align-content: space-between !important; } + .align-content-xl-around { + align-content: space-around !important; } + .align-content-xl-stretch { + align-content: stretch !important; } + .align-self-xl-auto { + align-self: auto !important; } + .align-self-xl-start { + align-self: flex-start !important; } + .align-self-xl-end { + align-self: flex-end !important; } + .align-self-xl-center { + align-self: center !important; } + .align-self-xl-baseline { + align-self: baseline !important; } + .align-self-xl-stretch { + align-self: stretch !important; } } + +.text-monospace { + font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; } + +.text-justify { + text-align: justify !important; } + +.text-nowrap { + white-space: nowrap !important; } + +.text-truncate { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; } + +.text-left { + text-align: left !important; } + +.text-right { + text-align: right !important; } + +.text-center, .course-glimpse__body, .course-glimpse__date, .search-template__title { + text-align: center !important; } + +@media (min-width: 576px) { + .text-sm-left { + text-align: left !important; } + .text-sm-right { + text-align: right !important; } + .text-sm-center { + text-align: center !important; } } + +@media (min-width: 768px) { + .text-md-left { + text-align: left !important; } + .text-md-right { + text-align: right !important; } + .text-md-center { + text-align: center !important; } } + +@media (min-width: 992px) { + .text-lg-left { + text-align: left !important; } + .text-lg-right { + text-align: right !important; } + .text-lg-center { + text-align: center !important; } } + +@media (min-width: 1200px) { + .text-xl-left { + text-align: left !important; } + .text-xl-right { + text-align: right !important; } + .text-xl-center { + text-align: center !important; } } + +.text-lowercase { + text-transform: lowercase !important; } + +.text-uppercase { + text-transform: uppercase !important; } + +.text-capitalize { + text-transform: capitalize !important; } + +.font-weight-light { + font-weight: 300 !important; } + +.font-weight-normal { + font-weight: 400 !important; } + +.font-weight-bold { + font-weight: 700 !important; } + +.font-italic { + font-style: italic !important; } + +.text-white { + color: #fff !important; } + +.text-primary { + color: #007bff !important; } + +a.text-primary:hover, a.text-primary:focus { + color: #0062cc !important; } + +.text-secondary { + color: #6c757d !important; } + +a.text-secondary:hover, a.text-secondary:focus { + color: #545b62 !important; } + +.text-success { + color: #28a745 !important; } + +a.text-success:hover, a.text-success:focus { + color: #1e7e34 !important; } + +.text-info { + color: #17a2b8 !important; } + +a.text-info:hover, a.text-info:focus { + color: #117a8b !important; } + +.text-warning { + color: #ffc107 !important; } + +a.text-warning:hover, a.text-warning:focus { + color: #d39e00 !important; } + +.text-danger { + color: #dc3545 !important; } + +a.text-danger:hover, a.text-danger:focus { + color: #bd2130 !important; } + +.text-light { + color: #f8f9fa !important; } + +a.text-light:hover, a.text-light:focus { + color: #dae0e5 !important; } + +.text-dark { + color: #343a40 !important; } + +a.text-dark:hover, a.text-dark:focus { + color: #1d2124 !important; } + +.text-body { + color: #212529 !important; } + +.text-muted { + color: #6c757d !important; } + +.text-black-50 { + color: rgba(0, 0, 0, 0.5) !important; } + +.text-white-50 { + color: rgba(255, 255, 255, 0.5) !important; } + +.text-hide { + font: 0/0 a; + color: transparent; + text-shadow: none; + background-color: transparent; + border: 0; } + +.react-autosuggest__container { + position: relative; + margin-bottom: 20px; } + +.react-autosuggest__suggestions-container { + width: 100%; } + .react-autosuggest__suggestions-container--open { + display: block; } + +.react-autosuggest__suggestions-list { + display: block; } + +.react-autosuggest__suggestion:hover { + cursor: pointer; } + +.course-glimpse-container { + position: relative; + width: 100%; + min-height: 1px; + padding-right: 10px; + padding-left: 10px; } + @media (min-width: 576px) { + .course-glimpse-container { + flex: 0 0 50%; + max-width: 50%; } } + @media (min-width: 768px) { + .course-glimpse-container { + flex: 0 0 33.33333%; + max-width: 33.33333%; } } + @media (min-width: 992px) { + .course-glimpse-container { + flex: 0 0 25%; + max-width: 25%; } } + +.course-glimpse { + margin-bottom: 20px; } + .course-glimpse:hover { + border-color: #007bff; + cursor: pointer; } + .course-glimpse:hover .course-glimpse__body__title { + color: #007bff; } + .course-glimpse__body__title { + height: 4.5rem; + overflow: hidden; + font-size: .9em; } + .course-glimpse__body__org { + color: #6c757d; + height: 3rem; + overflow: hidden; } + +.course-glimpse-list { + display: flex; + flex-wrap: wrap; + margin-right: -10px; + margin-left: -10px; } + +.search { + display: flex; + flex-wrap: wrap; + margin-right: -10px; + margin-left: -10px; } + .search__filters { + position: relative; + width: 100%; + min-height: 1px; + padding-right: 10px; + padding-left: 10px; + flex: 0 0 25%; + max-width: 25%; } + .search__results { + position: relative; + width: 100%; + min-height: 1px; + padding-right: 10px; + padding-left: 10px; + flex: 0 0 75%; + max-width: 75%; } + +.search-filter { + padding: 0.375rem 1.25rem; + background: inherit; + border-color: rgba(255, 255, 255, 0.15); + color: rgba(255, 255, 255, 0.85); + cursor: pointer; } + .search-filter:first-child { + border-top-color: rgba(255, 255, 255, 0.3); } + .search-filter:hover, .search-filter:focus { + background: inherit; + color: white; } + .search-filter.active, .search-filter.react-autosuggest__suggestion--highlighted { + background: inherit; + color: white; + font-weight: 700; + border-color: white; } + +.search-filter-group { + margin: 2rem; } + .search-filter-group__title { + padding: 0 1.25rem; } + +.search-filters-pane { + width: 100%; + height: 100%; + background: #343a40; + color: white; + overflow: hidden; } + .search-filters-pane__title { + padding: 2rem 0; + text-align: center; } + +.organization-detail { + width: 100%; + padding-right: 10px; + padding-left: 10px; + margin-right: auto; + margin-left: auto; + background: white; } + @media (min-width: 576px) { + .organization-detail { + max-width: 540px; } } + @media (min-width: 768px) { + .organization-detail { + max-width: 720px; } } + @media (min-width: 992px) { + .organization-detail { + max-width: 960px; } } + @media (min-width: 1200px) { + .organization-detail { + max-width: 1140px; } } + .organization-detail__banner { + display: flex; + flex-wrap: wrap; + margin-right: -10px; + margin-left: -10px; + position: relative; + width: calc(100% + 20px); + height: 15rem; + overflow: hidden; } + @media (min-width: 768px) { + .organization-detail__banner { + height: 20rem; } } + @media (min-width: 1200px) { + .organization-detail__banner { + height: 25rem; } } + .organization-detail__banner img { + position: absolute; + top: -1000%; + right: -1000%; + bottom: -1000%; + left: -1000%; + min-width: 100%; + min-height: 100%; + margin: auto; } + .organization-detail__logo { + position: relative; + overflow: hidden; + border: 1px solid #bdc6d0; + width: 11.25rem; + height: 11.25rem; + margin: -5.625rem auto 20px; } + @media (min-width: 768px) { + .organization-detail__logo { + float: right; + width: 15rem; + height: 15rem; + margin: -10rem 3rem 20px; } } + @media (min-width: 1200px) { + .organization-detail__logo { + width: 18.75rem; + height: 18.75rem; + margin: -12.5rem 8rem 20px; } } + .organization-detail__logo img { + position: absolute; + top: -1000%; + right: -1000%; + bottom: -1000%; + left: -1000%; + width: 100%; + min-height: 100%; + margin: auto; } + @media (min-width: 768px) { + .organization-detail__title { + margin: 1rem 20px; } } + @media (min-width: 1200px) { + .organization-detail__title { + margin: 1.625rem 20px; } } + .organization-detail__content { + clear: both; + display: flex; + flex-wrap: wrap; + margin-right: -10px; + margin-left: -10px; + padding-left: 20px; + padding-right: 20px; } + .organization-detail__content__description, .organization-detail__content__courses { + position: relative; + width: 100%; + min-height: 1px; + padding-right: 10px; + padding-left: 10px; + flex: 0 0 100%; + max-width: 100%; } + +.search-template__content { + width: 100%; + padding-right: 10px; + padding-left: 10px; + margin-right: auto; + margin-left: auto; } + +.large-banner { + position: relative; + height: 18rem; + max-width: 100%; + overflow: hidden; + display: flex; + justify-content: center; + align-items: center; + background-color: #343a40; + color: white; + text-shadow: 0 0 3px #343a40; } + @media (min-width: 768px) { + .large-banner { + height: 20rem; } } + @media (min-width: 992px) { + .large-banner { + height: 22.5vw; } } + .large-banner__background { + position: absolute; + min-width: 100%; + min-height: 100%; } + .large-banner__title, .large-banner__logo { + position: relative; } + .large-banner__title { + margin: 0 0.5rem 0 0; + font-size: 1.5rem; + text-align: center; } + @media (min-width: 576px) { + .large-banner__title { + font-size: 2rem; } } + @media (min-width: 768px) { + .large-banner__title { + font-size: 2.5rem; } } + .large-banner__logo { + margin: 0 0 0 0.5rem; + max-width: 10rem; + max-height: 5rem; } + @media (min-width: 576px) { + .large-banner__logo { + max-width: 11.25rem; + max-height: 5.625rem; } } + @media (min-width: 768px) { + .large-banner__logo { + max-width: 12.5rem; + max-height: 6.25rem; } } diff --git a/src/richie/apps/persons/models.py b/src/richie/apps/persons/models.py index 34c24435e8..41d0e3fba3 100644 --- a/src/richie/apps/persons/models.py +++ b/src/richie/apps/persons/models.py @@ -86,7 +86,7 @@ class PersonPluginModel(CMSPlugin): to their Person instance """ - page = models.ForeignKey(Page) + page = models.ForeignKey(Page, limit_choices_to={"person__isnull": False}) class Meta: verbose_name = _("person plugin model") diff --git a/strings/src/richie-front/js/components/CourseGlimpse/CourseGlimpse.json b/strings/src/richie-front/js/components/CourseGlimpse/CourseGlimpse.json new file mode 100644 index 0000000000..a9f1d818db --- /dev/null +++ b/strings/src/richie-front/js/components/CourseGlimpse/CourseGlimpse.json @@ -0,0 +1,12 @@ +[ + { + "id": "components.CourseGlimpse.logoAltText", + "description": "Alternate text for the course logo in a course glimpse.", + "defaultMessage": "Logo for {courseTitle} course." + }, + { + "id": "components.CourseGlimpse.startsOn", + "description": "Shows the start date for a course in a course glimpse in a short format such as Sep 4, '1986'", + "defaultMessage": "Starts on {date}" + } +] \ No newline at end of file diff --git a/strings/src/richie-front/js/components/SearchSuggestField/SearchSuggestField.json b/strings/src/richie-front/js/components/SearchSuggestField/SearchSuggestField.json new file mode 100644 index 0000000000..f9249981e5 --- /dev/null +++ b/strings/src/richie-front/js/components/SearchSuggestField/SearchSuggestField.json @@ -0,0 +1,7 @@ +[ + { + "id": "components.SearchSuggestField.searchFieldPlaceholder", + "description": "Placeholder text displayed in the search field when it is empty.", + "defaultMessage": "Search for courses, organizations, subjects" + } +] \ No newline at end of file diff --git a/strings/src/richie-front/js/settings.json b/strings/src/richie-front/js/settings.json new file mode 100644 index 0000000000..f874150618 --- /dev/null +++ b/strings/src/richie-front/js/settings.json @@ -0,0 +1,52 @@ +[ + { + "id": "settings.filters.availability.title", + "description": "Title for the \"Availability\" section of course filters (eg. Coming soon / Current session etc.)", + "defaultMessage": "Availability" + }, + { + "id": "settings.filters.availability.values.coming_soon", + "description": "Possible value for the \"Availability\" filter for courses", + "defaultMessage": "Coming soon" + }, + { + "id": "settings.filters.availability.values.current", + "description": "Possible value for the \"Availability\" filter for courses", + "defaultMessage": "Current session" + }, + { + "id": "settings.filters.language.title", + "description": "Title for the \"Language\" section of course filters (eg. FR / EN etc.)", + "defaultMessage": "Language" + }, + { + "id": "settings.filters.language.en", + "description": "Language", + "defaultMessage": "English" + }, + { + "id": "settings.filters.language.fr", + "description": "Language", + "defaultMessage": "French" + }, + { + "id": "settings.filters.new.title", + "description": "Title for the \"New\" section of course filters", + "defaultMessage": "New courses" + }, + { + "id": "settings.filters.new.new", + "description": "Possible balue for the \"New\" filter for courses", + "defaultMessage": "First session" + }, + { + "id": "settings.filters.organizations.title", + "description": "Title for the \"Organizations\" section of course filters", + "defaultMessage": "Organizations" + }, + { + "id": "settings.filters.subjects.title", + "description": "Title for the \"Subjects\" section of course filters", + "defaultMessage": "Subjects" + } +] \ No newline at end of file diff --git a/strings/src/richie-front/js/utils/commonMessages.json b/strings/src/richie-front/js/utils/commonMessages.json new file mode 100644 index 0000000000..550cc7ff03 --- /dev/null +++ b/strings/src/richie-front/js/utils/commonMessages.json @@ -0,0 +1,17 @@ +[ + { + "id": "common.coursesHumanName", + "description": "Title/name to use when we display a list of courses", + "defaultMessage": "Courses" + }, + { + "id": "common.organizationsHumanName", + "description": "Title/name to use when we display a list of organizations", + "defaultMessage": "Organizations" + }, + { + "id": "common.subjectsHumanName", + "description": "Title/name to use when we display a list of subjects", + "defaultMessage": "Subjects" + } +] \ No newline at end of file diff --git a/src/richie/apps/persons/tests/test_person_plugin.py b/tests/apps/persons/test_person_plugin.py similarity index 80% rename from src/richie/apps/persons/tests/test_person_plugin.py rename to tests/apps/persons/test_person_plugin.py index 18acd59cbf..077243ba9b 100644 --- a/src/richie/apps/persons/tests/test_person_plugin.py +++ b/tests/apps/persons/test_person_plugin.py @@ -2,17 +2,19 @@ """ Unit tests for the Person plugin and its model """ +from django import forms +from django.conf import settings from django.test import TestCase -from cms.api import add_plugin +from cms.api import add_plugin, create_page from cmsplugin_plain_text.cms_plugins import PlaintextPlugin from djangocms_picture.cms_plugins import PicturePlugin from richie.apps.core.factories import FilerImageFactory, UserFactory from richie.apps.core.helpers import create_i18n_page - -from ..cms_plugins import PersonPlugin -from ..factories import PersonFactory +from richie.apps.persons.cms_plugins import PersonPlugin +from richie.apps.persons.factories import PersonFactory +from richie.apps.persons.models import PersonPluginModel class PersonPluginTestCase(TestCase): @@ -20,6 +22,24 @@ class PersonPluginTestCase(TestCase): Test that PersonPlugin correctly displays a Person's page placeholders content """ + def test_person_plugin_form_page_choices(self): + """ + The form to create a person plugin should only list person pages in the select box. + """ + + class PersonPluginModelForm(forms.ModelForm): + class Meta: + model = PersonPluginModel + exclude = () + + person = PersonFactory() + other_page_title = "other page" + create_page(other_page_title, "richie/fullwidth.html", settings.LANGUAGE_CODE) + plugin_form = PersonPluginModelForm() + print(plugin_form) + self.assertIn(person.get_full_name(), plugin_form.as_table()) + self.assertNotIn(other_page_title, plugin_form.as_table()) + def test_person_plugin_render(self): """ Test that a PersonPlugin correctly renders person's page specific information
wagtail__wagtail-8473
"Sort menu order" button even with missing permissions ### Issue Summary Currently, the "Sort menu order"-button in the "more buttons"-dropdown is shown to users, which aren't allowed to change the order. Normally that's not a big issue, because clicking the link, which appends `?ordering=ord`, doesn't allow the user to change the order (drag&drop). But it still triggers the adapted view where all pages are rendered in one listing, so that could be avoided. **Possible fix:** I think this line https://github.com/wagtail/wagtail/blob/d308d6930a728208281cbfa426fe066951ca6736/wagtail/admin/wagtail_hooks.py#L353 should be changed to `if is_parent and page_perms.can_reorder_children():` ### Steps to Reproduce 1. Start a new project with `wagtail start myproject` 2. Create a page structure with some subpages which could be sorted. 3. Create an user and assign him to the existing "Editors" group. This group is not allowed to re-order the pages, because they cannot publish pages. 4. Login with that user and find the "Sort menu order"-button ### Technical details * Wagtail version: 2.16.2
[ { "content": "from django.conf import settings\nfrom django.contrib.auth.models import Permission\nfrom django.urls import reverse\nfrom django.utils.http import urlencode\nfrom django.utils.translation import gettext\nfrom django.utils.translation import gettext_lazy as _\nfrom draftjs_exporter.dom import DOM\n\nimport wagtail.admin.rich_text.editors.draftail.features as draftail_features\nfrom wagtail import __version__, hooks\nfrom wagtail.admin.admin_url_finder import (\n ModelAdminURLFinder,\n register_admin_url_finder,\n)\nfrom wagtail.admin.auth import user_has_any_page_permission\nfrom wagtail.admin.forms.collections import GroupCollectionManagementPermissionFormSet\nfrom wagtail.admin.menu import MenuItem, SubmenuMenuItem, reports_menu, settings_menu\nfrom wagtail.admin.navigation import get_explorable_root_page\nfrom wagtail.admin.rich_text.converters.contentstate import link_entity\nfrom wagtail.admin.rich_text.converters.editor_html import (\n LinkTypeRule,\n PageLinkHandler,\n WhitelistRule,\n)\nfrom wagtail.admin.rich_text.converters.html_to_contentstate import (\n BlockElementHandler,\n ExternalLinkElementHandler,\n HorizontalRuleHandler,\n InlineStyleElementHandler,\n ListElementHandler,\n ListItemElementHandler,\n PageLinkElementHandler,\n)\nfrom wagtail.admin.search import SearchArea\nfrom wagtail.admin.site_summary import PagesSummaryItem\nfrom wagtail.admin.ui.sidebar import (\n PageExplorerMenuItem as PageExplorerMenuItemComponent,\n)\nfrom wagtail.admin.ui.sidebar import SubMenuItem as SubMenuItemComponent\nfrom wagtail.admin.views.pages.bulk_actions import (\n DeleteBulkAction,\n MoveBulkAction,\n PublishBulkAction,\n UnpublishBulkAction,\n)\nfrom wagtail.admin.viewsets import viewsets\nfrom wagtail.admin.widgets import Button, ButtonWithDropdownFromHook, PageListingButton\nfrom wagtail.models import Collection, Page, Task, UserPagePermissionsProxy, Workflow\nfrom wagtail.permissions import (\n collection_permission_policy,\n task_permission_policy,\n workflow_permission_policy,\n)\nfrom wagtail.whitelist import allow_without_attributes, attribute_rule, check_url\n\n\nclass ExplorerMenuItem(MenuItem):\n def is_shown(self, request):\n return user_has_any_page_permission(request.user)\n\n def get_context(self, request):\n context = super().get_context(request)\n start_page = get_explorable_root_page(request.user)\n\n if start_page:\n context[\"start_page_id\"] = start_page.id\n\n return context\n\n def render_component(self, request):\n start_page = get_explorable_root_page(request.user)\n\n if start_page:\n return PageExplorerMenuItemComponent(\n self.name,\n self.label,\n self.url,\n start_page.id,\n icon_name=self.icon_name,\n classnames=self.classnames,\n )\n else:\n return super().render_component(request)\n\n\[email protected](\"register_admin_menu_item\")\ndef register_explorer_menu_item():\n return ExplorerMenuItem(\n _(\"Pages\"),\n reverse(\"wagtailadmin_explore_root\"),\n name=\"explorer\",\n icon_name=\"folder-open-inverse\",\n order=100,\n )\n\n\nclass SettingsMenuItem(SubmenuMenuItem):\n def render_component(self, request):\n return SubMenuItemComponent(\n self.name,\n self.label,\n self.menu.render_component(request),\n icon_name=self.icon_name,\n classnames=self.classnames,\n footer_text=\"Wagtail v.\" + __version__,\n )\n\n\[email protected](\"register_admin_menu_item\")\ndef register_settings_menu():\n return SettingsMenuItem(_(\"Settings\"), settings_menu, icon_name=\"cogs\", order=10000)\n\n\[email protected](\"register_permissions\")\ndef register_permissions():\n return Permission.objects.filter(\n content_type__app_label=\"wagtailadmin\", codename=\"access_admin\"\n )\n\n\nclass PageSearchArea(SearchArea):\n def __init__(self):\n super().__init__(\n _(\"Pages\"),\n reverse(\"wagtailadmin_pages:search\"),\n name=\"pages\",\n icon_name=\"folder-open-inverse\",\n order=100,\n )\n\n def is_shown(self, request):\n return user_has_any_page_permission(request.user)\n\n\[email protected](\"register_admin_search_area\")\ndef register_pages_search_area():\n return PageSearchArea()\n\n\[email protected](\"register_group_permission_panel\")\ndef register_collection_permissions_panel():\n return GroupCollectionManagementPermissionFormSet\n\n\nclass CollectionsMenuItem(MenuItem):\n def is_shown(self, request):\n return collection_permission_policy.user_has_any_permission(\n request.user, [\"add\", \"change\", \"delete\"]\n )\n\n\[email protected](\"register_settings_menu_item\")\ndef register_collections_menu_item():\n return CollectionsMenuItem(\n _(\"Collections\"),\n reverse(\"wagtailadmin_collections:index\"),\n icon_name=\"folder-open-1\",\n order=700,\n )\n\n\nclass WorkflowsMenuItem(MenuItem):\n def is_shown(self, request):\n if not getattr(settings, \"WAGTAIL_WORKFLOW_ENABLED\", True):\n return False\n\n return workflow_permission_policy.user_has_any_permission(\n request.user, [\"add\", \"change\", \"delete\"]\n )\n\n\nclass WorkflowTasksMenuItem(MenuItem):\n def is_shown(self, request):\n if not getattr(settings, \"WAGTAIL_WORKFLOW_ENABLED\", True):\n return False\n\n return task_permission_policy.user_has_any_permission(\n request.user, [\"add\", \"change\", \"delete\"]\n )\n\n\[email protected](\"register_settings_menu_item\")\ndef register_workflows_menu_item():\n return WorkflowsMenuItem(\n _(\"Workflows\"),\n reverse(\"wagtailadmin_workflows:index\"),\n icon_name=\"tasks\",\n order=100,\n )\n\n\[email protected](\"register_settings_menu_item\")\ndef register_workflow_tasks_menu_item():\n return WorkflowTasksMenuItem(\n _(\"Workflow tasks\"),\n reverse(\"wagtailadmin_workflows:task_index\"),\n icon_name=\"thumbtack\",\n order=150,\n )\n\n\[email protected](\"register_page_listing_buttons\")\ndef page_listing_buttons(page, page_perms, is_parent=False, next_url=None):\n if page_perms.can_edit():\n yield PageListingButton(\n _(\"Edit\"),\n reverse(\"wagtailadmin_pages:edit\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Edit '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=10,\n )\n if page.has_unpublished_changes and page.is_previewable():\n yield PageListingButton(\n _(\"View draft\"),\n reverse(\"wagtailadmin_pages:view_draft\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Preview draft version of '%(title)s'\")\n % {\"title\": page.get_admin_display_title()},\n \"rel\": \"noreferrer\",\n },\n priority=20,\n )\n if page.live and page.url:\n yield PageListingButton(\n _(\"View live\"),\n page.url,\n attrs={\n \"rel\": \"noreferrer\",\n \"aria-label\": _(\"View live version of '%(title)s'\")\n % {\"title\": page.get_admin_display_title()},\n },\n priority=30,\n )\n if page_perms.can_add_subpage():\n if is_parent:\n yield Button(\n _(\"Add child page\"),\n reverse(\"wagtailadmin_pages:add_subpage\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Add a child page to '%(title)s' \")\n % {\"title\": page.get_admin_display_title()},\n },\n classes={\n \"button\",\n \"button-small\",\n \"bicolor\",\n \"icon\",\n \"white\",\n \"icon-plus\",\n },\n priority=40,\n )\n else:\n yield PageListingButton(\n _(\"Add child page\"),\n reverse(\"wagtailadmin_pages:add_subpage\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Add a child page to '%(title)s' \")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=40,\n )\n\n yield ButtonWithDropdownFromHook(\n _(\"More\"),\n hook_name=\"register_page_listing_more_buttons\",\n page=page,\n page_perms=page_perms,\n is_parent=is_parent,\n next_url=next_url,\n attrs={\n \"target\": \"_blank\",\n \"rel\": \"noreferrer\",\n \"title\": _(\"View more options for '%(title)s'\")\n % {\"title\": page.get_admin_display_title()},\n },\n priority=50,\n )\n\n\[email protected](\"register_page_listing_more_buttons\")\ndef page_listing_more_buttons(page, page_perms, is_parent=False, next_url=None):\n if page_perms.can_move():\n yield Button(\n _(\"Move\"),\n reverse(\"wagtailadmin_pages:move\", args=[page.id]),\n attrs={\n \"title\": _(\"Move page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=10,\n )\n if page_perms.can_copy():\n url = reverse(\"wagtailadmin_pages:copy\", args=[page.id])\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Copy\"),\n url,\n attrs={\n \"title\": _(\"Copy page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=20,\n )\n if page_perms.can_delete():\n url = reverse(\"wagtailadmin_pages:delete\", args=[page.id])\n\n # After deleting the page, it is impossible to redirect to it.\n if next_url == reverse(\"wagtailadmin_explore\", args=[page.id]):\n next_url = None\n\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Delete\"),\n url,\n attrs={\n \"title\": _(\"Delete page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=30,\n )\n if page_perms.can_unpublish():\n url = reverse(\"wagtailadmin_pages:unpublish\", args=[page.id])\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Unpublish\"),\n url,\n attrs={\n \"title\": _(\"Unpublish page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=40,\n )\n if page_perms.can_view_revisions():\n yield Button(\n _(\"History\"),\n reverse(\"wagtailadmin_pages:history\", args=[page.id]),\n attrs={\n \"title\": _(\"View page history for '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=50,\n )\n\n if is_parent:\n yield Button(\n _(\"Sort menu order\"),\n \"?ordering=ord\",\n attrs={\n \"title\": _(\"Change ordering of child pages of '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=60,\n )\n\n\[email protected](\"register_page_header_buttons\")\ndef page_header_buttons(page, page_perms, next_url=None):\n if page_perms.can_move():\n yield Button(\n _(\"Move\"),\n reverse(\"wagtailadmin_pages:move\", args=[page.id]),\n icon_name=\"arrow-right-full\",\n attrs={\n \"title\": _(\"Move page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=10,\n )\n if page_perms.can_copy():\n url = reverse(\"wagtailadmin_pages:copy\", args=[page.id])\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Copy\"),\n url,\n icon_name=\"copy\",\n attrs={\n \"title\": _(\"Copy page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=20,\n )\n if page_perms.can_add_subpage():\n yield Button(\n _(\"Add child page\"),\n reverse(\"wagtailadmin_pages:add_subpage\", args=[page.id]),\n icon_name=\"circle-plus\",\n attrs={\n \"aria-label\": _(\"Add a child page to '%(title)s' \")\n % {\"title\": page.get_admin_display_title()},\n },\n priority=30,\n )\n\n\[email protected](\"register_admin_urls\")\ndef register_viewsets_urls():\n viewsets.populate()\n return viewsets.get_urlpatterns()\n\n\[email protected](\"register_rich_text_features\")\ndef register_core_features(features):\n features.register_converter_rule(\n \"editorhtml\",\n \"link\",\n [\n WhitelistRule(\"a\", attribute_rule({\"href\": check_url})),\n LinkTypeRule(\"page\", PageLinkHandler),\n ],\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"bold\",\n [\n WhitelistRule(\"b\", allow_without_attributes),\n WhitelistRule(\"strong\", allow_without_attributes),\n ],\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"italic\",\n [\n WhitelistRule(\"i\", allow_without_attributes),\n WhitelistRule(\"em\", allow_without_attributes),\n ],\n )\n\n headings_elements = [\"h1\", \"h2\", \"h3\", \"h4\", \"h5\", \"h6\"]\n for order, element in enumerate(headings_elements):\n features.register_converter_rule(\n \"editorhtml\", element, [WhitelistRule(element, allow_without_attributes)]\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"ol\",\n [\n WhitelistRule(\"ol\", allow_without_attributes),\n WhitelistRule(\"li\", allow_without_attributes),\n ],\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"ul\",\n [\n WhitelistRule(\"ul\", allow_without_attributes),\n WhitelistRule(\"li\", allow_without_attributes),\n ],\n )\n\n # Draftail\n features.register_editor_plugin(\n \"draftail\", \"hr\", draftail_features.BooleanFeature(\"enableHorizontalRule\")\n )\n features.register_converter_rule(\n \"contentstate\",\n \"hr\",\n {\n \"from_database_format\": {\n \"hr\": HorizontalRuleHandler(),\n },\n \"to_database_format\": {\n \"entity_decorators\": {\n \"HORIZONTAL_RULE\": lambda props: DOM.create_element(\"hr\")\n }\n },\n },\n )\n\n features.register_editor_plugin(\n \"draftail\",\n \"h1\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H1\",\n \"type\": \"header-one\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 1},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h1\",\n {\n \"from_database_format\": {\n \"h1\": BlockElementHandler(\"header-one\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-one\": \"h1\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h2\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H2\",\n \"type\": \"header-two\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 2},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h2\",\n {\n \"from_database_format\": {\n \"h2\": BlockElementHandler(\"header-two\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-two\": \"h2\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h3\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H3\",\n \"type\": \"header-three\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 3},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h3\",\n {\n \"from_database_format\": {\n \"h3\": BlockElementHandler(\"header-three\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-three\": \"h3\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h4\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H4\",\n \"type\": \"header-four\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 4},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h4\",\n {\n \"from_database_format\": {\n \"h4\": BlockElementHandler(\"header-four\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-four\": \"h4\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h5\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H5\",\n \"type\": \"header-five\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 5},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h5\",\n {\n \"from_database_format\": {\n \"h5\": BlockElementHandler(\"header-five\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-five\": \"h5\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h6\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H6\",\n \"type\": \"header-six\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 6},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h6\",\n {\n \"from_database_format\": {\n \"h6\": BlockElementHandler(\"header-six\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-six\": \"h6\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"ul\",\n draftail_features.BlockFeature(\n {\n \"type\": \"unordered-list-item\",\n \"icon\": \"list-ul\",\n \"description\": gettext(\"Bulleted list\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"ul\",\n {\n \"from_database_format\": {\n \"ul\": ListElementHandler(\"unordered-list-item\"),\n \"li\": ListItemElementHandler(),\n },\n \"to_database_format\": {\n \"block_map\": {\"unordered-list-item\": {\"element\": \"li\", \"wrapper\": \"ul\"}}\n },\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"ol\",\n draftail_features.BlockFeature(\n {\n \"type\": \"ordered-list-item\",\n \"icon\": \"list-ol\",\n \"description\": gettext(\"Numbered list\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"ol\",\n {\n \"from_database_format\": {\n \"ol\": ListElementHandler(\"ordered-list-item\"),\n \"li\": ListItemElementHandler(),\n },\n \"to_database_format\": {\n \"block_map\": {\"ordered-list-item\": {\"element\": \"li\", \"wrapper\": \"ol\"}}\n },\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"blockquote\",\n draftail_features.BlockFeature(\n {\n \"type\": \"blockquote\",\n \"icon\": \"openquote\",\n \"description\": gettext(\"Blockquote\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"blockquote\",\n {\n \"from_database_format\": {\n \"blockquote\": BlockElementHandler(\"blockquote\"),\n },\n \"to_database_format\": {\"block_map\": {\"blockquote\": \"blockquote\"}},\n },\n )\n\n features.register_editor_plugin(\n \"draftail\",\n \"bold\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"BOLD\",\n \"icon\": \"bold\",\n \"description\": gettext(\"Bold\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"bold\",\n {\n \"from_database_format\": {\n \"b\": InlineStyleElementHandler(\"BOLD\"),\n \"strong\": InlineStyleElementHandler(\"BOLD\"),\n },\n \"to_database_format\": {\"style_map\": {\"BOLD\": \"b\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"italic\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"ITALIC\",\n \"icon\": \"italic\",\n \"description\": gettext(\"Italic\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"italic\",\n {\n \"from_database_format\": {\n \"i\": InlineStyleElementHandler(\"ITALIC\"),\n \"em\": InlineStyleElementHandler(\"ITALIC\"),\n },\n \"to_database_format\": {\"style_map\": {\"ITALIC\": \"i\"}},\n },\n )\n\n features.register_editor_plugin(\n \"draftail\",\n \"link\",\n draftail_features.EntityFeature(\n {\n \"type\": \"LINK\",\n \"icon\": \"link\",\n \"description\": gettext(\"Link\"),\n # We want to enforce constraints on which links can be pasted into rich text.\n # Keep only the attributes Wagtail needs.\n \"attributes\": [\"url\", \"id\", \"parentId\"],\n \"whitelist\": {\n # Keep pasted links with http/https protocol, and not-pasted links (href = undefined).\n \"href\": \"^(http:|https:|undefined$)\",\n },\n },\n js=[\n \"wagtailadmin/js/page-chooser-modal.js\",\n ],\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"link\",\n {\n \"from_database_format\": {\n \"a[href]\": ExternalLinkElementHandler(\"LINK\"),\n 'a[linktype=\"page\"]': PageLinkElementHandler(\"LINK\"),\n },\n \"to_database_format\": {\"entity_decorators\": {\"LINK\": link_entity}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"superscript\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"SUPERSCRIPT\",\n \"icon\": \"superscript\",\n \"description\": gettext(\"Superscript\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"superscript\",\n {\n \"from_database_format\": {\n \"sup\": InlineStyleElementHandler(\"SUPERSCRIPT\"),\n },\n \"to_database_format\": {\"style_map\": {\"SUPERSCRIPT\": \"sup\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"subscript\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"SUBSCRIPT\",\n \"icon\": \"subscript\",\n \"description\": gettext(\"Subscript\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"subscript\",\n {\n \"from_database_format\": {\n \"sub\": InlineStyleElementHandler(\"SUBSCRIPT\"),\n },\n \"to_database_format\": {\"style_map\": {\"SUBSCRIPT\": \"sub\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"strikethrough\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"STRIKETHROUGH\",\n \"icon\": \"strikethrough\",\n \"description\": gettext(\"Strikethrough\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"strikethrough\",\n {\n \"from_database_format\": {\n \"s\": InlineStyleElementHandler(\"STRIKETHROUGH\"),\n },\n \"to_database_format\": {\"style_map\": {\"STRIKETHROUGH\": \"s\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"code\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"CODE\",\n \"icon\": \"code\",\n \"description\": gettext(\"Code\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"code\",\n {\n \"from_database_format\": {\n \"code\": InlineStyleElementHandler(\"CODE\"),\n },\n \"to_database_format\": {\"style_map\": {\"CODE\": \"code\"}},\n },\n )\n\n\nclass LockedPagesMenuItem(MenuItem):\n def is_shown(self, request):\n return UserPagePermissionsProxy(request.user).can_remove_locks()\n\n\nclass WorkflowReportMenuItem(MenuItem):\n def is_shown(self, request):\n return getattr(settings, \"WAGTAIL_WORKFLOW_ENABLED\", True)\n\n\nclass SiteHistoryReportMenuItem(MenuItem):\n def is_shown(self, request):\n return UserPagePermissionsProxy(request.user).explorable_pages().exists()\n\n\nclass AgingPagesReportMenuItem(MenuItem):\n def is_shown(self, request):\n return getattr(settings, \"WAGTAIL_AGING_PAGES_ENABLED\", True)\n\n\[email protected](\"register_reports_menu_item\")\ndef register_locked_pages_menu_item():\n return LockedPagesMenuItem(\n _(\"Locked Pages\"),\n reverse(\"wagtailadmin_reports:locked_pages\"),\n icon_name=\"lock\",\n order=700,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_workflow_report_menu_item():\n return WorkflowReportMenuItem(\n _(\"Workflows\"),\n reverse(\"wagtailadmin_reports:workflow\"),\n icon_name=\"tasks\",\n order=800,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_workflow_tasks_report_menu_item():\n return WorkflowReportMenuItem(\n _(\"Workflow tasks\"),\n reverse(\"wagtailadmin_reports:workflow_tasks\"),\n icon_name=\"thumbtack\",\n order=900,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_site_history_report_menu_item():\n return SiteHistoryReportMenuItem(\n _(\"Site history\"),\n reverse(\"wagtailadmin_reports:site_history\"),\n icon_name=\"history\",\n order=1000,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_aging_pages_report_menu_item():\n return AgingPagesReportMenuItem(\n _(\"Aging pages\"),\n reverse(\"wagtailadmin_reports:aging_pages\"),\n icon_name=\"time\",\n order=1100,\n )\n\n\[email protected](\"register_admin_menu_item\")\ndef register_reports_menu():\n return SubmenuMenuItem(_(\"Reports\"), reports_menu, icon_name=\"site\", order=9000)\n\n\[email protected](\"register_icons\")\ndef register_icons(icons):\n for icon in [\n \"angle-double-left.svg\",\n \"angle-double-right.svg\",\n \"arrow-down-big.svg\",\n \"arrow-down.svg\",\n \"arrow-right-full.svg\",\n \"arrow-left.svg\",\n \"arrow-right.svg\",\n \"arrow-up-big.svg\",\n \"arrow-up.svg\",\n \"arrows-up-down.svg\",\n \"bars.svg\",\n \"bin.svg\",\n \"bold.svg\",\n \"breadcrumb-expand.svg\",\n \"chain-broken.svg\",\n \"check.svg\",\n \"chevron-down.svg\",\n \"circle-check.svg\",\n \"circle-plus.svg\",\n \"clipboard-list.svg\",\n \"code.svg\",\n \"cog.svg\",\n \"cogs.svg\",\n \"copy.svg\",\n \"collapse-down.svg\",\n \"collapse-up.svg\",\n \"comment.svg\",\n \"comment-add.svg\",\n \"comment-add-reversed.svg\",\n \"comment-large.svg\",\n \"comment-large-outline.svg\",\n \"comment-large-reversed.svg\",\n \"cross.svg\",\n \"cut.svg\",\n \"date.svg\",\n \"doc-empty-inverse.svg\",\n \"doc-empty.svg\",\n \"doc-full-inverse.svg\",\n \"doc-full.svg\", # aka file-text-alt\n \"dots-vertical.svg\",\n \"dots-horizontal.svg\",\n \"download-alt.svg\",\n \"download.svg\",\n \"draft.svg\",\n \"duplicate.svg\",\n \"edit.svg\",\n \"ellipsis-v.svg\",\n \"expand-right.svg\",\n \"error.svg\",\n \"folder-inverse.svg\",\n \"folder-open-1.svg\",\n \"folder-open-inverse.svg\",\n \"folder.svg\",\n \"form.svg\",\n \"globe.svg\",\n \"grip.svg\",\n \"group.svg\",\n \"help.svg\",\n \"history.svg\",\n \"home.svg\",\n \"horizontalrule.svg\",\n \"image.svg\", # aka picture\n \"info-circle.svg\",\n \"italic.svg\",\n \"link.svg\",\n \"link-external.svg\",\n \"list-ol.svg\",\n \"list-ul.svg\",\n \"lock-open.svg\",\n \"lock.svg\",\n \"login.svg\",\n \"logout.svg\",\n \"mail.svg\",\n \"media.svg\",\n \"mobile-alt.svg\",\n \"no-view.svg\",\n \"openquote.svg\",\n \"order-down.svg\",\n \"order-up.svg\",\n \"order.svg\",\n \"password.svg\",\n \"pick.svg\",\n \"pilcrow.svg\",\n \"placeholder.svg\", # aka marquee\n \"plus-inverse.svg\",\n \"plus.svg\",\n \"radio-empty.svg\",\n \"radio-full.svg\",\n \"redirect.svg\",\n \"repeat.svg\",\n \"reset.svg\",\n \"resubmit.svg\",\n \"search.svg\",\n \"site.svg\",\n \"snippet.svg\",\n \"spinner.svg\",\n \"strikethrough.svg\",\n \"success.svg\",\n \"subscript.svg\",\n \"superscript.svg\",\n \"table.svg\",\n \"tag.svg\",\n \"tasks.svg\",\n \"thumbtack.svg\",\n \"tick-inverse.svg\",\n \"tick.svg\",\n \"time.svg\",\n \"title.svg\",\n \"undo.svg\",\n \"uni52.svg\", # Is this a redundant icon?\n \"upload.svg\",\n \"user.svg\",\n \"view.svg\",\n \"wagtail-inverse.svg\",\n \"wagtail.svg\",\n \"warning.svg\",\n ]:\n icons.append(\"wagtailadmin/icons/{}\".format(icon))\n return icons\n\n\[email protected](\"construct_homepage_summary_items\")\ndef add_pages_summary_item(request, items):\n items.insert(0, PagesSummaryItem(request))\n\n\nclass PageAdminURLFinder:\n def __init__(self, user):\n self.page_perms = user and UserPagePermissionsProxy(user)\n\n def get_edit_url(self, instance):\n if self.page_perms and not self.page_perms.for_page(instance).can_edit():\n return None\n else:\n return reverse(\"wagtailadmin_pages:edit\", args=(instance.pk,))\n\n\nregister_admin_url_finder(Page, PageAdminURLFinder)\n\n\nclass CollectionAdminURLFinder(ModelAdminURLFinder):\n permission_policy = collection_permission_policy\n edit_url_name = \"wagtailadmin_collections:edit\"\n\n\nregister_admin_url_finder(Collection, CollectionAdminURLFinder)\n\n\nclass WorkflowAdminURLFinder(ModelAdminURLFinder):\n permission_policy = workflow_permission_policy\n edit_url_name = \"wagtailadmin_workflows:edit\"\n\n\nregister_admin_url_finder(Workflow, WorkflowAdminURLFinder)\n\n\nclass WorkflowTaskAdminURLFinder(ModelAdminURLFinder):\n permission_policy = task_permission_policy\n edit_url_name = \"wagtailadmin_workflows:edit_task\"\n\n\nregister_admin_url_finder(Task, WorkflowTaskAdminURLFinder)\n\n\nfor action_class in [\n DeleteBulkAction,\n MoveBulkAction,\n PublishBulkAction,\n UnpublishBulkAction,\n]:\n hooks.register(\"register_bulk_action\", action_class)\n", "path": "wagtail/admin/wagtail_hooks.py" } ]
[ { "content": "from django.conf import settings\nfrom django.contrib.auth.models import Permission\nfrom django.urls import reverse\nfrom django.utils.http import urlencode\nfrom django.utils.translation import gettext\nfrom django.utils.translation import gettext_lazy as _\nfrom draftjs_exporter.dom import DOM\n\nimport wagtail.admin.rich_text.editors.draftail.features as draftail_features\nfrom wagtail import __version__, hooks\nfrom wagtail.admin.admin_url_finder import (\n ModelAdminURLFinder,\n register_admin_url_finder,\n)\nfrom wagtail.admin.auth import user_has_any_page_permission\nfrom wagtail.admin.forms.collections import GroupCollectionManagementPermissionFormSet\nfrom wagtail.admin.menu import MenuItem, SubmenuMenuItem, reports_menu, settings_menu\nfrom wagtail.admin.navigation import get_explorable_root_page\nfrom wagtail.admin.rich_text.converters.contentstate import link_entity\nfrom wagtail.admin.rich_text.converters.editor_html import (\n LinkTypeRule,\n PageLinkHandler,\n WhitelistRule,\n)\nfrom wagtail.admin.rich_text.converters.html_to_contentstate import (\n BlockElementHandler,\n ExternalLinkElementHandler,\n HorizontalRuleHandler,\n InlineStyleElementHandler,\n ListElementHandler,\n ListItemElementHandler,\n PageLinkElementHandler,\n)\nfrom wagtail.admin.search import SearchArea\nfrom wagtail.admin.site_summary import PagesSummaryItem\nfrom wagtail.admin.ui.sidebar import (\n PageExplorerMenuItem as PageExplorerMenuItemComponent,\n)\nfrom wagtail.admin.ui.sidebar import SubMenuItem as SubMenuItemComponent\nfrom wagtail.admin.views.pages.bulk_actions import (\n DeleteBulkAction,\n MoveBulkAction,\n PublishBulkAction,\n UnpublishBulkAction,\n)\nfrom wagtail.admin.viewsets import viewsets\nfrom wagtail.admin.widgets import Button, ButtonWithDropdownFromHook, PageListingButton\nfrom wagtail.models import Collection, Page, Task, UserPagePermissionsProxy, Workflow\nfrom wagtail.permissions import (\n collection_permission_policy,\n task_permission_policy,\n workflow_permission_policy,\n)\nfrom wagtail.whitelist import allow_without_attributes, attribute_rule, check_url\n\n\nclass ExplorerMenuItem(MenuItem):\n def is_shown(self, request):\n return user_has_any_page_permission(request.user)\n\n def get_context(self, request):\n context = super().get_context(request)\n start_page = get_explorable_root_page(request.user)\n\n if start_page:\n context[\"start_page_id\"] = start_page.id\n\n return context\n\n def render_component(self, request):\n start_page = get_explorable_root_page(request.user)\n\n if start_page:\n return PageExplorerMenuItemComponent(\n self.name,\n self.label,\n self.url,\n start_page.id,\n icon_name=self.icon_name,\n classnames=self.classnames,\n )\n else:\n return super().render_component(request)\n\n\[email protected](\"register_admin_menu_item\")\ndef register_explorer_menu_item():\n return ExplorerMenuItem(\n _(\"Pages\"),\n reverse(\"wagtailadmin_explore_root\"),\n name=\"explorer\",\n icon_name=\"folder-open-inverse\",\n order=100,\n )\n\n\nclass SettingsMenuItem(SubmenuMenuItem):\n def render_component(self, request):\n return SubMenuItemComponent(\n self.name,\n self.label,\n self.menu.render_component(request),\n icon_name=self.icon_name,\n classnames=self.classnames,\n footer_text=\"Wagtail v.\" + __version__,\n )\n\n\[email protected](\"register_admin_menu_item\")\ndef register_settings_menu():\n return SettingsMenuItem(_(\"Settings\"), settings_menu, icon_name=\"cogs\", order=10000)\n\n\[email protected](\"register_permissions\")\ndef register_permissions():\n return Permission.objects.filter(\n content_type__app_label=\"wagtailadmin\", codename=\"access_admin\"\n )\n\n\nclass PageSearchArea(SearchArea):\n def __init__(self):\n super().__init__(\n _(\"Pages\"),\n reverse(\"wagtailadmin_pages:search\"),\n name=\"pages\",\n icon_name=\"folder-open-inverse\",\n order=100,\n )\n\n def is_shown(self, request):\n return user_has_any_page_permission(request.user)\n\n\[email protected](\"register_admin_search_area\")\ndef register_pages_search_area():\n return PageSearchArea()\n\n\[email protected](\"register_group_permission_panel\")\ndef register_collection_permissions_panel():\n return GroupCollectionManagementPermissionFormSet\n\n\nclass CollectionsMenuItem(MenuItem):\n def is_shown(self, request):\n return collection_permission_policy.user_has_any_permission(\n request.user, [\"add\", \"change\", \"delete\"]\n )\n\n\[email protected](\"register_settings_menu_item\")\ndef register_collections_menu_item():\n return CollectionsMenuItem(\n _(\"Collections\"),\n reverse(\"wagtailadmin_collections:index\"),\n icon_name=\"folder-open-1\",\n order=700,\n )\n\n\nclass WorkflowsMenuItem(MenuItem):\n def is_shown(self, request):\n if not getattr(settings, \"WAGTAIL_WORKFLOW_ENABLED\", True):\n return False\n\n return workflow_permission_policy.user_has_any_permission(\n request.user, [\"add\", \"change\", \"delete\"]\n )\n\n\nclass WorkflowTasksMenuItem(MenuItem):\n def is_shown(self, request):\n if not getattr(settings, \"WAGTAIL_WORKFLOW_ENABLED\", True):\n return False\n\n return task_permission_policy.user_has_any_permission(\n request.user, [\"add\", \"change\", \"delete\"]\n )\n\n\[email protected](\"register_settings_menu_item\")\ndef register_workflows_menu_item():\n return WorkflowsMenuItem(\n _(\"Workflows\"),\n reverse(\"wagtailadmin_workflows:index\"),\n icon_name=\"tasks\",\n order=100,\n )\n\n\[email protected](\"register_settings_menu_item\")\ndef register_workflow_tasks_menu_item():\n return WorkflowTasksMenuItem(\n _(\"Workflow tasks\"),\n reverse(\"wagtailadmin_workflows:task_index\"),\n icon_name=\"thumbtack\",\n order=150,\n )\n\n\[email protected](\"register_page_listing_buttons\")\ndef page_listing_buttons(page, page_perms, is_parent=False, next_url=None):\n if page_perms.can_edit():\n yield PageListingButton(\n _(\"Edit\"),\n reverse(\"wagtailadmin_pages:edit\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Edit '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=10,\n )\n if page.has_unpublished_changes and page.is_previewable():\n yield PageListingButton(\n _(\"View draft\"),\n reverse(\"wagtailadmin_pages:view_draft\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Preview draft version of '%(title)s'\")\n % {\"title\": page.get_admin_display_title()},\n \"rel\": \"noreferrer\",\n },\n priority=20,\n )\n if page.live and page.url:\n yield PageListingButton(\n _(\"View live\"),\n page.url,\n attrs={\n \"rel\": \"noreferrer\",\n \"aria-label\": _(\"View live version of '%(title)s'\")\n % {\"title\": page.get_admin_display_title()},\n },\n priority=30,\n )\n if page_perms.can_add_subpage():\n if is_parent:\n yield Button(\n _(\"Add child page\"),\n reverse(\"wagtailadmin_pages:add_subpage\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Add a child page to '%(title)s' \")\n % {\"title\": page.get_admin_display_title()},\n },\n classes={\n \"button\",\n \"button-small\",\n \"bicolor\",\n \"icon\",\n \"white\",\n \"icon-plus\",\n },\n priority=40,\n )\n else:\n yield PageListingButton(\n _(\"Add child page\"),\n reverse(\"wagtailadmin_pages:add_subpage\", args=[page.id]),\n attrs={\n \"aria-label\": _(\"Add a child page to '%(title)s' \")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=40,\n )\n\n yield ButtonWithDropdownFromHook(\n _(\"More\"),\n hook_name=\"register_page_listing_more_buttons\",\n page=page,\n page_perms=page_perms,\n is_parent=is_parent,\n next_url=next_url,\n attrs={\n \"target\": \"_blank\",\n \"rel\": \"noreferrer\",\n \"title\": _(\"View more options for '%(title)s'\")\n % {\"title\": page.get_admin_display_title()},\n },\n priority=50,\n )\n\n\[email protected](\"register_page_listing_more_buttons\")\ndef page_listing_more_buttons(page, page_perms, is_parent=False, next_url=None):\n if page_perms.can_move():\n yield Button(\n _(\"Move\"),\n reverse(\"wagtailadmin_pages:move\", args=[page.id]),\n attrs={\n \"title\": _(\"Move page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=10,\n )\n if page_perms.can_copy():\n url = reverse(\"wagtailadmin_pages:copy\", args=[page.id])\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Copy\"),\n url,\n attrs={\n \"title\": _(\"Copy page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=20,\n )\n if page_perms.can_delete():\n url = reverse(\"wagtailadmin_pages:delete\", args=[page.id])\n\n # After deleting the page, it is impossible to redirect to it.\n if next_url == reverse(\"wagtailadmin_explore\", args=[page.id]):\n next_url = None\n\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Delete\"),\n url,\n attrs={\n \"title\": _(\"Delete page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=30,\n )\n if page_perms.can_unpublish():\n url = reverse(\"wagtailadmin_pages:unpublish\", args=[page.id])\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Unpublish\"),\n url,\n attrs={\n \"title\": _(\"Unpublish page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=40,\n )\n if page_perms.can_view_revisions():\n yield Button(\n _(\"History\"),\n reverse(\"wagtailadmin_pages:history\", args=[page.id]),\n attrs={\n \"title\": _(\"View page history for '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=50,\n )\n\n if is_parent and page_perms.can_reorder_children():\n yield Button(\n _(\"Sort menu order\"),\n \"?ordering=ord\",\n attrs={\n \"title\": _(\"Change ordering of child pages of '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=60,\n )\n\n\[email protected](\"register_page_header_buttons\")\ndef page_header_buttons(page, page_perms, next_url=None):\n if page_perms.can_move():\n yield Button(\n _(\"Move\"),\n reverse(\"wagtailadmin_pages:move\", args=[page.id]),\n icon_name=\"arrow-right-full\",\n attrs={\n \"title\": _(\"Move page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=10,\n )\n if page_perms.can_copy():\n url = reverse(\"wagtailadmin_pages:copy\", args=[page.id])\n if next_url:\n url += \"?\" + urlencode({\"next\": next_url})\n\n yield Button(\n _(\"Copy\"),\n url,\n icon_name=\"copy\",\n attrs={\n \"title\": _(\"Copy page '%(title)s'\")\n % {\"title\": page.get_admin_display_title()}\n },\n priority=20,\n )\n if page_perms.can_add_subpage():\n yield Button(\n _(\"Add child page\"),\n reverse(\"wagtailadmin_pages:add_subpage\", args=[page.id]),\n icon_name=\"circle-plus\",\n attrs={\n \"aria-label\": _(\"Add a child page to '%(title)s' \")\n % {\"title\": page.get_admin_display_title()},\n },\n priority=30,\n )\n\n\[email protected](\"register_admin_urls\")\ndef register_viewsets_urls():\n viewsets.populate()\n return viewsets.get_urlpatterns()\n\n\[email protected](\"register_rich_text_features\")\ndef register_core_features(features):\n features.register_converter_rule(\n \"editorhtml\",\n \"link\",\n [\n WhitelistRule(\"a\", attribute_rule({\"href\": check_url})),\n LinkTypeRule(\"page\", PageLinkHandler),\n ],\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"bold\",\n [\n WhitelistRule(\"b\", allow_without_attributes),\n WhitelistRule(\"strong\", allow_without_attributes),\n ],\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"italic\",\n [\n WhitelistRule(\"i\", allow_without_attributes),\n WhitelistRule(\"em\", allow_without_attributes),\n ],\n )\n\n headings_elements = [\"h1\", \"h2\", \"h3\", \"h4\", \"h5\", \"h6\"]\n for order, element in enumerate(headings_elements):\n features.register_converter_rule(\n \"editorhtml\", element, [WhitelistRule(element, allow_without_attributes)]\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"ol\",\n [\n WhitelistRule(\"ol\", allow_without_attributes),\n WhitelistRule(\"li\", allow_without_attributes),\n ],\n )\n\n features.register_converter_rule(\n \"editorhtml\",\n \"ul\",\n [\n WhitelistRule(\"ul\", allow_without_attributes),\n WhitelistRule(\"li\", allow_without_attributes),\n ],\n )\n\n # Draftail\n features.register_editor_plugin(\n \"draftail\", \"hr\", draftail_features.BooleanFeature(\"enableHorizontalRule\")\n )\n features.register_converter_rule(\n \"contentstate\",\n \"hr\",\n {\n \"from_database_format\": {\n \"hr\": HorizontalRuleHandler(),\n },\n \"to_database_format\": {\n \"entity_decorators\": {\n \"HORIZONTAL_RULE\": lambda props: DOM.create_element(\"hr\")\n }\n },\n },\n )\n\n features.register_editor_plugin(\n \"draftail\",\n \"h1\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H1\",\n \"type\": \"header-one\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 1},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h1\",\n {\n \"from_database_format\": {\n \"h1\": BlockElementHandler(\"header-one\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-one\": \"h1\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h2\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H2\",\n \"type\": \"header-two\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 2},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h2\",\n {\n \"from_database_format\": {\n \"h2\": BlockElementHandler(\"header-two\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-two\": \"h2\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h3\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H3\",\n \"type\": \"header-three\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 3},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h3\",\n {\n \"from_database_format\": {\n \"h3\": BlockElementHandler(\"header-three\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-three\": \"h3\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h4\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H4\",\n \"type\": \"header-four\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 4},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h4\",\n {\n \"from_database_format\": {\n \"h4\": BlockElementHandler(\"header-four\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-four\": \"h4\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h5\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H5\",\n \"type\": \"header-five\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 5},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h5\",\n {\n \"from_database_format\": {\n \"h5\": BlockElementHandler(\"header-five\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-five\": \"h5\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"h6\",\n draftail_features.BlockFeature(\n {\n \"label\": \"H6\",\n \"type\": \"header-six\",\n \"description\": gettext(\"Heading %(level)d\") % {\"level\": 6},\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"h6\",\n {\n \"from_database_format\": {\n \"h6\": BlockElementHandler(\"header-six\"),\n },\n \"to_database_format\": {\"block_map\": {\"header-six\": \"h6\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"ul\",\n draftail_features.BlockFeature(\n {\n \"type\": \"unordered-list-item\",\n \"icon\": \"list-ul\",\n \"description\": gettext(\"Bulleted list\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"ul\",\n {\n \"from_database_format\": {\n \"ul\": ListElementHandler(\"unordered-list-item\"),\n \"li\": ListItemElementHandler(),\n },\n \"to_database_format\": {\n \"block_map\": {\"unordered-list-item\": {\"element\": \"li\", \"wrapper\": \"ul\"}}\n },\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"ol\",\n draftail_features.BlockFeature(\n {\n \"type\": \"ordered-list-item\",\n \"icon\": \"list-ol\",\n \"description\": gettext(\"Numbered list\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"ol\",\n {\n \"from_database_format\": {\n \"ol\": ListElementHandler(\"ordered-list-item\"),\n \"li\": ListItemElementHandler(),\n },\n \"to_database_format\": {\n \"block_map\": {\"ordered-list-item\": {\"element\": \"li\", \"wrapper\": \"ol\"}}\n },\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"blockquote\",\n draftail_features.BlockFeature(\n {\n \"type\": \"blockquote\",\n \"icon\": \"openquote\",\n \"description\": gettext(\"Blockquote\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"blockquote\",\n {\n \"from_database_format\": {\n \"blockquote\": BlockElementHandler(\"blockquote\"),\n },\n \"to_database_format\": {\"block_map\": {\"blockquote\": \"blockquote\"}},\n },\n )\n\n features.register_editor_plugin(\n \"draftail\",\n \"bold\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"BOLD\",\n \"icon\": \"bold\",\n \"description\": gettext(\"Bold\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"bold\",\n {\n \"from_database_format\": {\n \"b\": InlineStyleElementHandler(\"BOLD\"),\n \"strong\": InlineStyleElementHandler(\"BOLD\"),\n },\n \"to_database_format\": {\"style_map\": {\"BOLD\": \"b\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"italic\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"ITALIC\",\n \"icon\": \"italic\",\n \"description\": gettext(\"Italic\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"italic\",\n {\n \"from_database_format\": {\n \"i\": InlineStyleElementHandler(\"ITALIC\"),\n \"em\": InlineStyleElementHandler(\"ITALIC\"),\n },\n \"to_database_format\": {\"style_map\": {\"ITALIC\": \"i\"}},\n },\n )\n\n features.register_editor_plugin(\n \"draftail\",\n \"link\",\n draftail_features.EntityFeature(\n {\n \"type\": \"LINK\",\n \"icon\": \"link\",\n \"description\": gettext(\"Link\"),\n # We want to enforce constraints on which links can be pasted into rich text.\n # Keep only the attributes Wagtail needs.\n \"attributes\": [\"url\", \"id\", \"parentId\"],\n \"whitelist\": {\n # Keep pasted links with http/https protocol, and not-pasted links (href = undefined).\n \"href\": \"^(http:|https:|undefined$)\",\n },\n },\n js=[\n \"wagtailadmin/js/page-chooser-modal.js\",\n ],\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"link\",\n {\n \"from_database_format\": {\n \"a[href]\": ExternalLinkElementHandler(\"LINK\"),\n 'a[linktype=\"page\"]': PageLinkElementHandler(\"LINK\"),\n },\n \"to_database_format\": {\"entity_decorators\": {\"LINK\": link_entity}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"superscript\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"SUPERSCRIPT\",\n \"icon\": \"superscript\",\n \"description\": gettext(\"Superscript\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"superscript\",\n {\n \"from_database_format\": {\n \"sup\": InlineStyleElementHandler(\"SUPERSCRIPT\"),\n },\n \"to_database_format\": {\"style_map\": {\"SUPERSCRIPT\": \"sup\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"subscript\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"SUBSCRIPT\",\n \"icon\": \"subscript\",\n \"description\": gettext(\"Subscript\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"subscript\",\n {\n \"from_database_format\": {\n \"sub\": InlineStyleElementHandler(\"SUBSCRIPT\"),\n },\n \"to_database_format\": {\"style_map\": {\"SUBSCRIPT\": \"sub\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"strikethrough\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"STRIKETHROUGH\",\n \"icon\": \"strikethrough\",\n \"description\": gettext(\"Strikethrough\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"strikethrough\",\n {\n \"from_database_format\": {\n \"s\": InlineStyleElementHandler(\"STRIKETHROUGH\"),\n },\n \"to_database_format\": {\"style_map\": {\"STRIKETHROUGH\": \"s\"}},\n },\n )\n features.register_editor_plugin(\n \"draftail\",\n \"code\",\n draftail_features.InlineStyleFeature(\n {\n \"type\": \"CODE\",\n \"icon\": \"code\",\n \"description\": gettext(\"Code\"),\n }\n ),\n )\n features.register_converter_rule(\n \"contentstate\",\n \"code\",\n {\n \"from_database_format\": {\n \"code\": InlineStyleElementHandler(\"CODE\"),\n },\n \"to_database_format\": {\"style_map\": {\"CODE\": \"code\"}},\n },\n )\n\n\nclass LockedPagesMenuItem(MenuItem):\n def is_shown(self, request):\n return UserPagePermissionsProxy(request.user).can_remove_locks()\n\n\nclass WorkflowReportMenuItem(MenuItem):\n def is_shown(self, request):\n return getattr(settings, \"WAGTAIL_WORKFLOW_ENABLED\", True)\n\n\nclass SiteHistoryReportMenuItem(MenuItem):\n def is_shown(self, request):\n return UserPagePermissionsProxy(request.user).explorable_pages().exists()\n\n\nclass AgingPagesReportMenuItem(MenuItem):\n def is_shown(self, request):\n return getattr(settings, \"WAGTAIL_AGING_PAGES_ENABLED\", True)\n\n\[email protected](\"register_reports_menu_item\")\ndef register_locked_pages_menu_item():\n return LockedPagesMenuItem(\n _(\"Locked Pages\"),\n reverse(\"wagtailadmin_reports:locked_pages\"),\n icon_name=\"lock\",\n order=700,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_workflow_report_menu_item():\n return WorkflowReportMenuItem(\n _(\"Workflows\"),\n reverse(\"wagtailadmin_reports:workflow\"),\n icon_name=\"tasks\",\n order=800,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_workflow_tasks_report_menu_item():\n return WorkflowReportMenuItem(\n _(\"Workflow tasks\"),\n reverse(\"wagtailadmin_reports:workflow_tasks\"),\n icon_name=\"thumbtack\",\n order=900,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_site_history_report_menu_item():\n return SiteHistoryReportMenuItem(\n _(\"Site history\"),\n reverse(\"wagtailadmin_reports:site_history\"),\n icon_name=\"history\",\n order=1000,\n )\n\n\[email protected](\"register_reports_menu_item\")\ndef register_aging_pages_report_menu_item():\n return AgingPagesReportMenuItem(\n _(\"Aging pages\"),\n reverse(\"wagtailadmin_reports:aging_pages\"),\n icon_name=\"time\",\n order=1100,\n )\n\n\[email protected](\"register_admin_menu_item\")\ndef register_reports_menu():\n return SubmenuMenuItem(_(\"Reports\"), reports_menu, icon_name=\"site\", order=9000)\n\n\[email protected](\"register_icons\")\ndef register_icons(icons):\n for icon in [\n \"angle-double-left.svg\",\n \"angle-double-right.svg\",\n \"arrow-down-big.svg\",\n \"arrow-down.svg\",\n \"arrow-right-full.svg\",\n \"arrow-left.svg\",\n \"arrow-right.svg\",\n \"arrow-up-big.svg\",\n \"arrow-up.svg\",\n \"arrows-up-down.svg\",\n \"bars.svg\",\n \"bin.svg\",\n \"bold.svg\",\n \"breadcrumb-expand.svg\",\n \"chain-broken.svg\",\n \"check.svg\",\n \"chevron-down.svg\",\n \"circle-check.svg\",\n \"circle-plus.svg\",\n \"clipboard-list.svg\",\n \"code.svg\",\n \"cog.svg\",\n \"cogs.svg\",\n \"copy.svg\",\n \"collapse-down.svg\",\n \"collapse-up.svg\",\n \"comment.svg\",\n \"comment-add.svg\",\n \"comment-add-reversed.svg\",\n \"comment-large.svg\",\n \"comment-large-outline.svg\",\n \"comment-large-reversed.svg\",\n \"cross.svg\",\n \"cut.svg\",\n \"date.svg\",\n \"doc-empty-inverse.svg\",\n \"doc-empty.svg\",\n \"doc-full-inverse.svg\",\n \"doc-full.svg\", # aka file-text-alt\n \"dots-vertical.svg\",\n \"dots-horizontal.svg\",\n \"download-alt.svg\",\n \"download.svg\",\n \"draft.svg\",\n \"duplicate.svg\",\n \"edit.svg\",\n \"ellipsis-v.svg\",\n \"expand-right.svg\",\n \"error.svg\",\n \"folder-inverse.svg\",\n \"folder-open-1.svg\",\n \"folder-open-inverse.svg\",\n \"folder.svg\",\n \"form.svg\",\n \"globe.svg\",\n \"grip.svg\",\n \"group.svg\",\n \"help.svg\",\n \"history.svg\",\n \"home.svg\",\n \"horizontalrule.svg\",\n \"image.svg\", # aka picture\n \"info-circle.svg\",\n \"italic.svg\",\n \"link.svg\",\n \"link-external.svg\",\n \"list-ol.svg\",\n \"list-ul.svg\",\n \"lock-open.svg\",\n \"lock.svg\",\n \"login.svg\",\n \"logout.svg\",\n \"mail.svg\",\n \"media.svg\",\n \"mobile-alt.svg\",\n \"no-view.svg\",\n \"openquote.svg\",\n \"order-down.svg\",\n \"order-up.svg\",\n \"order.svg\",\n \"password.svg\",\n \"pick.svg\",\n \"pilcrow.svg\",\n \"placeholder.svg\", # aka marquee\n \"plus-inverse.svg\",\n \"plus.svg\",\n \"radio-empty.svg\",\n \"radio-full.svg\",\n \"redirect.svg\",\n \"repeat.svg\",\n \"reset.svg\",\n \"resubmit.svg\",\n \"search.svg\",\n \"site.svg\",\n \"snippet.svg\",\n \"spinner.svg\",\n \"strikethrough.svg\",\n \"success.svg\",\n \"subscript.svg\",\n \"superscript.svg\",\n \"table.svg\",\n \"tag.svg\",\n \"tasks.svg\",\n \"thumbtack.svg\",\n \"tick-inverse.svg\",\n \"tick.svg\",\n \"time.svg\",\n \"title.svg\",\n \"undo.svg\",\n \"uni52.svg\", # Is this a redundant icon?\n \"upload.svg\",\n \"user.svg\",\n \"view.svg\",\n \"wagtail-inverse.svg\",\n \"wagtail.svg\",\n \"warning.svg\",\n ]:\n icons.append(\"wagtailadmin/icons/{}\".format(icon))\n return icons\n\n\[email protected](\"construct_homepage_summary_items\")\ndef add_pages_summary_item(request, items):\n items.insert(0, PagesSummaryItem(request))\n\n\nclass PageAdminURLFinder:\n def __init__(self, user):\n self.page_perms = user and UserPagePermissionsProxy(user)\n\n def get_edit_url(self, instance):\n if self.page_perms and not self.page_perms.for_page(instance).can_edit():\n return None\n else:\n return reverse(\"wagtailadmin_pages:edit\", args=(instance.pk,))\n\n\nregister_admin_url_finder(Page, PageAdminURLFinder)\n\n\nclass CollectionAdminURLFinder(ModelAdminURLFinder):\n permission_policy = collection_permission_policy\n edit_url_name = \"wagtailadmin_collections:edit\"\n\n\nregister_admin_url_finder(Collection, CollectionAdminURLFinder)\n\n\nclass WorkflowAdminURLFinder(ModelAdminURLFinder):\n permission_policy = workflow_permission_policy\n edit_url_name = \"wagtailadmin_workflows:edit\"\n\n\nregister_admin_url_finder(Workflow, WorkflowAdminURLFinder)\n\n\nclass WorkflowTaskAdminURLFinder(ModelAdminURLFinder):\n permission_policy = task_permission_policy\n edit_url_name = \"wagtailadmin_workflows:edit_task\"\n\n\nregister_admin_url_finder(Task, WorkflowTaskAdminURLFinder)\n\n\nfor action_class in [\n DeleteBulkAction,\n MoveBulkAction,\n PublishBulkAction,\n UnpublishBulkAction,\n]:\n hooks.register(\"register_bulk_action\", action_class)\n", "path": "wagtail/admin/wagtail_hooks.py" } ]
diff --git a/CHANGELOG.txt b/CHANGELOG.txt index ec819eb63d47..e65aee9ea34b 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -35,6 +35,7 @@ Changelog * Fix: Ensure that custom document or image models support custom tag models (Matt Westcott) * Fix: Ensure comments use translated values for their placeholder text (Stefan Hammer) * Fix: Ensure the upgrade notification, shown to admins on the dashboard if Wagtail is out of date, content is translatable (LB (Ben) Johnston) + * Fix: Show the re-ordering option to users that have permission to publish pages within the page listing (Stefan Hammer) 3.0 (16.05.2022) diff --git a/docs/releases/4.0.md b/docs/releases/4.0.md index d08feae02756..26189b8560dd 100644 --- a/docs/releases/4.0.md +++ b/docs/releases/4.0.md @@ -49,6 +49,7 @@ When using a queryset to render a list of images, you can now use the ``prefetch * Ensure that custom document or image models support custom tag models (Matt Westcott) * Ensure comments use translated values for their placeholder text (Stefan Hammer) * Ensure the upgrade notification, shown to admins on the dashboard if Wagtail is out of date, content is translatable (LB (Ben) Johnston) + * Only show the re-ordering option to users that have permission to publish pages within the page listing (Stefan Hammer) ## Upgrade considerations diff --git a/wagtail/admin/tests/test_buttons_hooks.py b/wagtail/admin/tests/test_buttons_hooks.py index 3fc48704ef91..1565866ae155 100644 --- a/wagtail/admin/tests/test_buttons_hooks.py +++ b/wagtail/admin/tests/test_buttons_hooks.py @@ -9,7 +9,7 @@ from wagtail.test.utils import WagtailTestUtils -class PagePerms: +class BasePagePerms: def can_move(self): return False @@ -17,7 +17,7 @@ def can_copy(self): return False def can_delete(self): - return True + return False def can_unpublish(self): return False @@ -25,6 +25,19 @@ def can_unpublish(self): def can_view_revisions(self): return False + def can_reorder_children(self): + return False + + +class DeleteOnlyPagePerms(BasePagePerms): + def can_delete(self): + return True + + +class ReorderOnlyPagePerms(BasePagePerms): + def can_reorder_children(self): + return True + class TestButtonsHooks(TestCase, WagtailTestUtils): def setUp(self): @@ -144,7 +157,7 @@ def page_header_buttons(page, page_perms, next_url=None): self.assertContains(response, "Another useless header button") def test_delete_button_next_url(self): - page_perms = PagePerms() + page_perms = DeleteOnlyPagePerms() page = self.root_page base_url = reverse("wagtailadmin_pages:delete", args=[page.id]) @@ -168,3 +181,19 @@ def test_delete_button_next_url(self): ) self.assertEqual(delete_button.url, base_url) + + def test_reorder_button_visibility(self): + page = self.root_page + page_perms = BasePagePerms() + + # no button returned + buttons = page_listing_more_buttons(page, page_perms, is_parent=True) + self.assertEqual(len(list(buttons)), 0) + + page_perms = ReorderOnlyPagePerms() + # page_listing_more_button generator yields only `Sort menu order button` + reorder_button = next( + page_listing_more_buttons(page, page_perms, is_parent=True) + ) + + self.assertEqual(reorder_button.url, "?ordering=ord") diff --git a/wagtail/admin/wagtail_hooks.py b/wagtail/admin/wagtail_hooks.py index 7066e09146e3..bd74ac3fbc56 100644 --- a/wagtail/admin/wagtail_hooks.py +++ b/wagtail/admin/wagtail_hooks.py @@ -350,7 +350,7 @@ def page_listing_more_buttons(page, page_perms, is_parent=False, next_url=None): priority=50, ) - if is_parent: + if is_parent and page_perms.can_reorder_children(): yield Button( _("Sort menu order"), "?ordering=ord",
gammapy__gammapy-5237
`plot_regions` fails when using linewidth with a `PointSpatialModel` and extended spatial model **Gammapy version** gammapy v1.2 **Bug description** When utilising `plot_regions` to plot different models, if a `PointSpatialModel` is included it somehow tries to include some of the `**kwargs` instead of only utilising `kwargs_point`. Trying to utilise `linewidth` specifically results in an error. This is not present when only non `PointSpatialModels` are used. **To Reproduce** To reproduce see the small code [here](https://gist.github.com/Astro-Kirsty/cfa975c9938043a37b6043a3ad968ee3). ``` models.plot_regions(ax=ax, kwargs_point=dict(marker="o", fillstyle='full'), edgecolor="deepskyblue", facecolor="deepskyblue", linewidth=2) ```
[ { "content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\nimport logging as log\nimport numpy as np\nfrom scipy.interpolate import CubicSpline\nfrom scipy.optimize import curve_fit\nfrom scipy.stats import norm\nfrom astropy.visualization import make_lupton_rgb\nimport matplotlib.axes as maxes\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\n\n__all__ = [\n \"add_colorbar\",\n \"plot_contour_line\",\n \"plot_map_rgb\",\n \"plot_theta_squared_table\",\n \"plot_distribution\",\n]\n\n\nARTIST_TO_LINE_PROPERTIES = {\n \"color\": \"markeredgecolor\",\n \"edgecolor\": \"markeredgecolor\",\n \"ec\": \"markeredgecolor\",\n \"facecolor\": \"markerfacecolor\",\n \"fc\": \"markerfacecolor\",\n \"linewidth\": \"markerwidth\",\n \"lw\": \"markerwidth\",\n}\n\n\ndef add_colorbar(img, ax, axes_loc=None, **kwargs):\n \"\"\"\n Add colorbar to a given axis.\n\n Parameters\n ----------\n img : `~matplotlib.image.AxesImage`\n The image to plot the colorbar for.\n ax : `~matplotlib.axes.Axes`\n Matplotlib axes.\n axes_loc : dict, optional\n Keyword arguments passed to `~mpl_toolkits.axes_grid1.axes_divider.AxesDivider.append_axes`.\n kwargs : dict, optional\n Keyword arguments passed to `~matplotlib.pyplot.colorbar`.\n\n Returns\n -------\n cbar : `~matplotlib.pyplot.colorbar`\n The colorbar.\n\n Examples\n --------\n .. testcode::\n\n from gammapy.maps import Map\n from gammapy.visualization import add_colorbar\n import matplotlib.pyplot as plt\n map_ = Map.read(\"$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz\")\n axes_loc = {\"position\": \"right\", \"size\": \"2%\", \"pad\": \"10%\"}\n kwargs_colorbar = {'label':'Colorbar label'}\n\n # Example outside gammapy\n fig = plt.figure(figsize=(6, 3))\n ax = fig.add_subplot(111)\n img = ax.imshow(map_.sum_over_axes().data[0,:,:])\n add_colorbar(img, ax=ax, axes_loc=axes_loc, **kwargs_colorbar)\n\n # `add_colorbar` is available for the `plot` function here:\n fig = plt.figure(figsize=(6, 3))\n ax = fig.add_subplot(111)\n map_.sum_over_axes().plot(ax=ax, add_cbar=True, axes_loc=axes_loc,\n kwargs_colorbar=kwargs_colorbar) # doctest: +SKIP\n\n \"\"\"\n kwargs.setdefault(\"use_gridspec\", True)\n kwargs.setdefault(\"orientation\", \"vertical\")\n\n axes_loc = axes_loc or {}\n axes_loc.setdefault(\"position\", \"right\")\n axes_loc.setdefault(\"size\", \"5%\")\n axes_loc.setdefault(\"pad\", \"2%\")\n axes_loc.setdefault(\"axes_class\", maxes.Axes)\n\n divider = make_axes_locatable(ax)\n cax = divider.append_axes(**axes_loc)\n cbar = plt.colorbar(img, cax=cax, **kwargs)\n return cbar\n\n\ndef plot_map_rgb(map_, ax=None, **kwargs):\n \"\"\"\n Plot RGB image on matplotlib WCS axes.\n\n This function is based on the `~astropy.visualization.make_lupton_rgb` function. The input map must\n contain 1 non-spatial axis with exactly 3 bins. If this is not the case, the map has to be resampled\n before using the `plot_map_rgb` function (e.g. as shown in the code example below).\n\n Parameters\n ----------\n map_ : `~gammapy.maps.WcsNDMap`\n WCS map. The map must contain 1 non-spatial axis with exactly 3 bins.\n ax : `~astropy.visualization.wcsaxes.WCSAxes`, optional\n WCS axis object to plot on.\n **kwargs : dict\n Keyword arguments passed to `~astropy.visualization.make_lupton_rgb`.\n\n Returns\n -------\n ax : `~astropy.visualization.wcsaxes.WCSAxes`\n WCS axis object.\n\n Examples\n --------\n >>> from gammapy.visualization import plot_map_rgb\n >>> from gammapy.maps import Map, MapAxis\n >>> import astropy.units as u\n >>> map_ = Map.read(\"$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz\")\n >>> axis_rgb = MapAxis.from_energy_edges(\n ... [0.1, 0.2, 0.5, 10], unit=u.TeV, name=\"energy\", interp=\"log\"\n ... )\n >>> map_ = map_.resample_axis(axis_rgb)\n >>> kwargs = {\"stretch\": 0.5, \"Q\": 1, \"minimum\": 0.15}\n >>> plot_map_rgb(map_.smooth(0.08*u.deg), **kwargs) #doctest: +SKIP\n \"\"\"\n geom = map_.geom\n if len(geom.axes) != 1 or geom.axes[0].nbin != 3:\n raise ValueError(\n \"One non-spatial axis with exactly 3 bins is needed to plot an RGB image\"\n )\n\n data = [data_slice / np.nanmax(data_slice.flatten()) for data_slice in map_.data]\n data = make_lupton_rgb(*data, **kwargs)\n\n ax = map_._plot_default_axes(ax=ax)\n ax.imshow(data)\n\n if geom.is_allsky:\n ax = map_._plot_format_allsky(ax)\n else:\n ax = map_._plot_format(ax)\n\n # without this the axis limits are changed when calling scatter\n ax.autoscale(enable=False)\n\n return ax\n\n\ndef plot_contour_line(ax, x, y, **kwargs):\n \"\"\"Plot smooth curve from contour points.\"\"\"\n xf = x\n yf = y\n\n # close contour\n if not (x[0] == x[-1] and y[0] == y[-1]):\n xf = np.append(x, x[0])\n yf = np.append(y, y[0])\n\n # curve parametrization must be strictly increasing\n # so we use the cumulative distance of each point from the first one\n dist = np.sqrt(np.diff(xf) ** 2.0 + np.diff(yf) ** 2.0)\n dist = [0] + list(dist)\n t = np.cumsum(dist)\n ts = np.linspace(0, t[-1], 50)\n\n # 1D cubic spline interpolation\n cs = CubicSpline(t, np.c_[xf, yf], bc_type=\"periodic\")\n out = cs(ts)\n\n # plot\n if \"marker\" in kwargs.keys():\n marker = kwargs.pop(\"marker\")\n else:\n marker = \"+\"\n if \"color\" in kwargs.keys():\n color = kwargs.pop(\"color\")\n else:\n color = \"b\"\n\n ax.plot(out[:, 0], out[:, 1], \"-\", color=color, **kwargs)\n ax.plot(xf, yf, linestyle=\"\", marker=marker, color=color)\n\n\ndef plot_theta_squared_table(table):\n \"\"\"Plot the theta2 distribution of counts, excess and significance.\n\n Take the table containing the ON counts, the OFF counts, the acceptance,\n the off acceptance and the alpha (normalisation between ON and OFF)\n for each theta2 bin.\n\n Parameters\n ----------\n table : `~astropy.table.Table`\n Required columns: theta2_min, theta2_max, counts, counts_off and alpha\n \"\"\"\n from gammapy.maps import MapAxis\n from gammapy.maps.axes import UNIT_STRING_FORMAT\n from gammapy.maps.utils import edges_from_lo_hi\n\n theta2_edges = edges_from_lo_hi(\n table[\"theta2_min\"].quantity, table[\"theta2_max\"].quantity\n )\n theta2_axis = MapAxis.from_edges(theta2_edges, interp=\"lin\", name=\"theta_squared\")\n\n ax0 = plt.subplot(2, 1, 1)\n\n x = theta2_axis.center.value\n x_edges = theta2_axis.edges.value\n xerr = (x - x_edges[:-1], x_edges[1:] - x)\n\n ax0.errorbar(\n x,\n table[\"counts\"],\n xerr=xerr,\n yerr=np.sqrt(table[\"counts\"]),\n linestyle=\"None\",\n label=\"Counts\",\n )\n\n ax0.errorbar(\n x,\n table[\"counts_off\"],\n xerr=xerr,\n yerr=np.sqrt(table[\"counts_off\"]),\n linestyle=\"None\",\n label=\"Counts Off\",\n )\n\n ax0.errorbar(\n x,\n table[\"excess\"],\n xerr=xerr,\n yerr=(table[\"excess_errn\"], table[\"excess_errp\"]),\n fmt=\"+\",\n linestyle=\"None\",\n label=\"Excess\",\n )\n\n ax0.set_ylabel(\"Counts\")\n ax0.set_xticks([])\n ax0.set_xlabel(\"\")\n ax0.legend()\n\n ax1 = plt.subplot(2, 1, 2)\n ax1.errorbar(x, table[\"sqrt_ts\"], xerr=xerr, linestyle=\"None\")\n ax1.set_xlabel(f\"Theta [{theta2_axis.unit.to_string(UNIT_STRING_FORMAT)}]\")\n ax1.set_ylabel(\"Significance\")\n\n\ndef plot_distribution(\n wcs_map,\n ax=None,\n ncols=3,\n func=None,\n kwargs_hist=None,\n kwargs_axes=None,\n kwargs_fit=None,\n):\n \"\"\"\n Plot the 1D distribution of data inside a map as an histogram. If the dimension of the map is smaller than 2,\n a unique plot will be displayed. Otherwise, if the dimension is 3 or greater, a grid of plot will be displayed.\n\n Parameters\n ----------\n wcs_map : `~gammapy.maps.WcsNDMap`\n A map that contains data to be plotted.\n ax : `~matplotlib.axes.Axes` or list of `~matplotlib.axes.Axes`\n Axis object to plot on. If a list of Axis is provided it has to be the same length as the length of _map.data.\n ncols : int\n Number of columns to plot if a \"plot grid\" was to be done.\n func : function object or str\n The function used to fit a map data histogram or \"norm\". Default is None.\n If None, no fit will be performed. If \"norm\" is given, `scipy.stats.norm.pdf`\n will be passed to `scipy.optimize.curve_fit`.\n kwargs_hist : dict\n Keyword arguments to pass to `matplotlib.pyplot.hist`.\n kwargs_axes : dict\n Keyword arguments to pass to `matplotlib.axes.Axes`.\n kwargs_fit : dict\n Keyword arguments to pass to `scipy.optimize.curve_fit`\n\n Returns\n -------\n axes : `~numpy.ndarray` of `~matplotlib.pyplot.Axes`\n Array of Axes.\n result_list : list of dict\n List of dictionnary that contains the results of `scipy.optimize.curve_fit`. The number of elements in the list\n correspond to the dimension of the non-spatial axis of the map.\n The dictionnary contains:\n\n * `axis_edges` : the edges of the non-spatial axis bin used\n * `param` : the best-fit parameters of the input function `func`\n * `covar` : the covariance matrix for the fitted parameters `param`\n * `info_dict` : the `infodict` return of `scipy.optimize.curve_fit`\n\n Examples\n --------\n >>> from gammapy.datasets import MapDataset\n >>> from gammapy.estimators import TSMapEstimator\n >>> from scipy.stats import norm\n >>> from gammapy.visualization import plot_distribution\n >>> dataset = MapDataset.read(\"$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz\")\n >>> tsmap_est = TSMapEstimator().run(dataset)\n >>> axs, res = plot_distribution(tsmap_est.sqrt_ts, func=\"norm\", kwargs_hist={'bins': 75, 'range': (-10, 10), 'density': True})\n >>> # Equivalently, one can do the following:\n >>> func = lambda x, mu, sig : norm.pdf(x, loc=mu, scale=sig)\n >>> axs, res = plot_distribution(tsmap_est.sqrt_ts, func=func, kwargs_hist={'bins': 75, 'range': (-10, 10), 'density': True})\n \"\"\"\n\n from gammapy.maps import WcsNDMap # import here to avoid circular import\n\n if not isinstance(wcs_map, WcsNDMap):\n raise TypeError(\n f\"map_ must be an instance of gammapy.maps.WcsNDMap, given {type(wcs_map)}\"\n )\n\n kwargs_hist = kwargs_hist or {}\n kwargs_axes = kwargs_axes or {}\n kwargs_fit = kwargs_fit or {}\n\n kwargs_hist.setdefault(\"density\", True)\n kwargs_fit.setdefault(\"full_output\", True)\n\n cutout, mask = wcs_map.cutout_and_mask_region()\n idx_x, idx_y = np.where(mask)\n\n data = cutout.data[..., idx_x, idx_y]\n\n if ax is None:\n n_plot = len(data)\n cols = min(ncols, n_plot)\n rows = 1 + (n_plot - 1) // cols\n\n width = 12\n figsize = (width, width * rows / cols)\n\n fig, axes = plt.subplots(\n nrows=rows,\n ncols=cols,\n figsize=figsize,\n )\n cells_in_grid = rows * cols\n else:\n axes = ax\n cells_in_grid = len(ax.flatten())\n\n if not isinstance(axes, np.ndarray):\n axes = np.array([axes])\n\n result_list = []\n\n for idx in range(cells_in_grid):\n\n axe = axes.flat[idx]\n if idx > len(data) - 1:\n axe.set_visible(False)\n continue\n d = data[idx][np.isfinite(data[idx])]\n n, bins, _ = axe.hist(d, **kwargs_hist)\n\n if func is not None:\n kwargs_plot_fit = {\"label\": \"Fit\"}\n centers = 0.5 * (bins[1:] + bins[:-1])\n\n if func == \"norm\":\n\n def func_to_fit(x, mu, sigma):\n return norm.pdf(x, mu, sigma)\n\n pars, cov, infodict, message, _ = curve_fit(\n func_to_fit, centers, n, **kwargs_fit\n )\n\n mu, sig = pars[0], pars[1]\n err_mu, err_sig = np.sqrt(cov[0][0]), np.sqrt(cov[1][1])\n\n label_norm = (\n r\"$\\mu$ = {:.2f} ± {:.2E}\\n$\\sigma$ = {:.2f} ± {:.2E}\".format(\n mu, err_mu, sig, err_sig\n )\n ).replace(r\"\\n\", \"\\n\")\n kwargs_plot_fit[\"label\"] = label_norm\n\n else:\n func_to_fit = func\n\n pars, cov, infodict, message, _ = curve_fit(\n func_to_fit, centers, n, **kwargs_fit\n )\n\n axis_edges = (\n wcs_map.geom.axes[-1].edges[idx],\n wcs_map.geom.axes[-1].edges[idx + 1],\n )\n result_dict = {\n \"axis_edges\": axis_edges,\n \"param\": pars,\n \"covar\": cov,\n \"info_dict\": infodict,\n }\n result_list.append(result_dict)\n log.info(message)\n\n xmin, xmax = kwargs_hist.get(\"range\", (np.min(d), np.max(d)))\n x = np.linspace(xmin, xmax, 1000)\n\n axe.plot(x, func_to_fit(x, *pars), lw=2, color=\"black\", **kwargs_plot_fit)\n\n axe.set(**kwargs_axes)\n axe.legend()\n\n return axes, result_list\n", "path": "gammapy/visualization/utils.py" } ]
[ { "content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\nimport logging as log\nimport numpy as np\nfrom scipy.interpolate import CubicSpline\nfrom scipy.optimize import curve_fit\nfrom scipy.stats import norm\nfrom astropy.visualization import make_lupton_rgb\nimport matplotlib.axes as maxes\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.axes_grid1 import make_axes_locatable\n\n__all__ = [\n \"add_colorbar\",\n \"plot_contour_line\",\n \"plot_map_rgb\",\n \"plot_theta_squared_table\",\n \"plot_distribution\",\n]\n\n\nARTIST_TO_LINE_PROPERTIES = {\n \"color\": \"markeredgecolor\",\n \"edgecolor\": \"markeredgecolor\",\n \"ec\": \"markeredgecolor\",\n \"facecolor\": \"markerfacecolor\",\n \"fc\": \"markerfacecolor\",\n \"linewidth\": \"markeredgewidth\",\n \"lw\": \"markeredgewidth\",\n}\n\n\ndef add_colorbar(img, ax, axes_loc=None, **kwargs):\n \"\"\"\n Add colorbar to a given axis.\n\n Parameters\n ----------\n img : `~matplotlib.image.AxesImage`\n The image to plot the colorbar for.\n ax : `~matplotlib.axes.Axes`\n Matplotlib axes.\n axes_loc : dict, optional\n Keyword arguments passed to `~mpl_toolkits.axes_grid1.axes_divider.AxesDivider.append_axes`.\n kwargs : dict, optional\n Keyword arguments passed to `~matplotlib.pyplot.colorbar`.\n\n Returns\n -------\n cbar : `~matplotlib.pyplot.colorbar`\n The colorbar.\n\n Examples\n --------\n .. testcode::\n\n from gammapy.maps import Map\n from gammapy.visualization import add_colorbar\n import matplotlib.pyplot as plt\n map_ = Map.read(\"$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz\")\n axes_loc = {\"position\": \"right\", \"size\": \"2%\", \"pad\": \"10%\"}\n kwargs_colorbar = {'label':'Colorbar label'}\n\n # Example outside gammapy\n fig = plt.figure(figsize=(6, 3))\n ax = fig.add_subplot(111)\n img = ax.imshow(map_.sum_over_axes().data[0,:,:])\n add_colorbar(img, ax=ax, axes_loc=axes_loc, **kwargs_colorbar)\n\n # `add_colorbar` is available for the `plot` function here:\n fig = plt.figure(figsize=(6, 3))\n ax = fig.add_subplot(111)\n map_.sum_over_axes().plot(ax=ax, add_cbar=True, axes_loc=axes_loc,\n kwargs_colorbar=kwargs_colorbar) # doctest: +SKIP\n\n \"\"\"\n kwargs.setdefault(\"use_gridspec\", True)\n kwargs.setdefault(\"orientation\", \"vertical\")\n\n axes_loc = axes_loc or {}\n axes_loc.setdefault(\"position\", \"right\")\n axes_loc.setdefault(\"size\", \"5%\")\n axes_loc.setdefault(\"pad\", \"2%\")\n axes_loc.setdefault(\"axes_class\", maxes.Axes)\n\n divider = make_axes_locatable(ax)\n cax = divider.append_axes(**axes_loc)\n cbar = plt.colorbar(img, cax=cax, **kwargs)\n return cbar\n\n\ndef plot_map_rgb(map_, ax=None, **kwargs):\n \"\"\"\n Plot RGB image on matplotlib WCS axes.\n\n This function is based on the `~astropy.visualization.make_lupton_rgb` function. The input map must\n contain 1 non-spatial axis with exactly 3 bins. If this is not the case, the map has to be resampled\n before using the `plot_map_rgb` function (e.g. as shown in the code example below).\n\n Parameters\n ----------\n map_ : `~gammapy.maps.WcsNDMap`\n WCS map. The map must contain 1 non-spatial axis with exactly 3 bins.\n ax : `~astropy.visualization.wcsaxes.WCSAxes`, optional\n WCS axis object to plot on.\n **kwargs : dict\n Keyword arguments passed to `~astropy.visualization.make_lupton_rgb`.\n\n Returns\n -------\n ax : `~astropy.visualization.wcsaxes.WCSAxes`\n WCS axis object.\n\n Examples\n --------\n >>> from gammapy.visualization import plot_map_rgb\n >>> from gammapy.maps import Map, MapAxis\n >>> import astropy.units as u\n >>> map_ = Map.read(\"$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz\")\n >>> axis_rgb = MapAxis.from_energy_edges(\n ... [0.1, 0.2, 0.5, 10], unit=u.TeV, name=\"energy\", interp=\"log\"\n ... )\n >>> map_ = map_.resample_axis(axis_rgb)\n >>> kwargs = {\"stretch\": 0.5, \"Q\": 1, \"minimum\": 0.15}\n >>> plot_map_rgb(map_.smooth(0.08*u.deg), **kwargs) #doctest: +SKIP\n \"\"\"\n geom = map_.geom\n if len(geom.axes) != 1 or geom.axes[0].nbin != 3:\n raise ValueError(\n \"One non-spatial axis with exactly 3 bins is needed to plot an RGB image\"\n )\n\n data = [data_slice / np.nanmax(data_slice.flatten()) for data_slice in map_.data]\n data = make_lupton_rgb(*data, **kwargs)\n\n ax = map_._plot_default_axes(ax=ax)\n ax.imshow(data)\n\n if geom.is_allsky:\n ax = map_._plot_format_allsky(ax)\n else:\n ax = map_._plot_format(ax)\n\n # without this the axis limits are changed when calling scatter\n ax.autoscale(enable=False)\n\n return ax\n\n\ndef plot_contour_line(ax, x, y, **kwargs):\n \"\"\"Plot smooth curve from contour points.\"\"\"\n xf = x\n yf = y\n\n # close contour\n if not (x[0] == x[-1] and y[0] == y[-1]):\n xf = np.append(x, x[0])\n yf = np.append(y, y[0])\n\n # curve parametrization must be strictly increasing\n # so we use the cumulative distance of each point from the first one\n dist = np.sqrt(np.diff(xf) ** 2.0 + np.diff(yf) ** 2.0)\n dist = [0] + list(dist)\n t = np.cumsum(dist)\n ts = np.linspace(0, t[-1], 50)\n\n # 1D cubic spline interpolation\n cs = CubicSpline(t, np.c_[xf, yf], bc_type=\"periodic\")\n out = cs(ts)\n\n # plot\n if \"marker\" in kwargs.keys():\n marker = kwargs.pop(\"marker\")\n else:\n marker = \"+\"\n if \"color\" in kwargs.keys():\n color = kwargs.pop(\"color\")\n else:\n color = \"b\"\n\n ax.plot(out[:, 0], out[:, 1], \"-\", color=color, **kwargs)\n ax.plot(xf, yf, linestyle=\"\", marker=marker, color=color)\n\n\ndef plot_theta_squared_table(table):\n \"\"\"Plot the theta2 distribution of counts, excess and significance.\n\n Take the table containing the ON counts, the OFF counts, the acceptance,\n the off acceptance and the alpha (normalisation between ON and OFF)\n for each theta2 bin.\n\n Parameters\n ----------\n table : `~astropy.table.Table`\n Required columns: theta2_min, theta2_max, counts, counts_off and alpha\n \"\"\"\n from gammapy.maps import MapAxis\n from gammapy.maps.axes import UNIT_STRING_FORMAT\n from gammapy.maps.utils import edges_from_lo_hi\n\n theta2_edges = edges_from_lo_hi(\n table[\"theta2_min\"].quantity, table[\"theta2_max\"].quantity\n )\n theta2_axis = MapAxis.from_edges(theta2_edges, interp=\"lin\", name=\"theta_squared\")\n\n ax0 = plt.subplot(2, 1, 1)\n\n x = theta2_axis.center.value\n x_edges = theta2_axis.edges.value\n xerr = (x - x_edges[:-1], x_edges[1:] - x)\n\n ax0.errorbar(\n x,\n table[\"counts\"],\n xerr=xerr,\n yerr=np.sqrt(table[\"counts\"]),\n linestyle=\"None\",\n label=\"Counts\",\n )\n\n ax0.errorbar(\n x,\n table[\"counts_off\"],\n xerr=xerr,\n yerr=np.sqrt(table[\"counts_off\"]),\n linestyle=\"None\",\n label=\"Counts Off\",\n )\n\n ax0.errorbar(\n x,\n table[\"excess\"],\n xerr=xerr,\n yerr=(table[\"excess_errn\"], table[\"excess_errp\"]),\n fmt=\"+\",\n linestyle=\"None\",\n label=\"Excess\",\n )\n\n ax0.set_ylabel(\"Counts\")\n ax0.set_xticks([])\n ax0.set_xlabel(\"\")\n ax0.legend()\n\n ax1 = plt.subplot(2, 1, 2)\n ax1.errorbar(x, table[\"sqrt_ts\"], xerr=xerr, linestyle=\"None\")\n ax1.set_xlabel(f\"Theta [{theta2_axis.unit.to_string(UNIT_STRING_FORMAT)}]\")\n ax1.set_ylabel(\"Significance\")\n\n\ndef plot_distribution(\n wcs_map,\n ax=None,\n ncols=3,\n func=None,\n kwargs_hist=None,\n kwargs_axes=None,\n kwargs_fit=None,\n):\n \"\"\"\n Plot the 1D distribution of data inside a map as an histogram. If the dimension of the map is smaller than 2,\n a unique plot will be displayed. Otherwise, if the dimension is 3 or greater, a grid of plot will be displayed.\n\n Parameters\n ----------\n wcs_map : `~gammapy.maps.WcsNDMap`\n A map that contains data to be plotted.\n ax : `~matplotlib.axes.Axes` or list of `~matplotlib.axes.Axes`\n Axis object to plot on. If a list of Axis is provided it has to be the same length as the length of _map.data.\n ncols : int\n Number of columns to plot if a \"plot grid\" was to be done.\n func : function object or str\n The function used to fit a map data histogram or \"norm\". Default is None.\n If None, no fit will be performed. If \"norm\" is given, `scipy.stats.norm.pdf`\n will be passed to `scipy.optimize.curve_fit`.\n kwargs_hist : dict\n Keyword arguments to pass to `matplotlib.pyplot.hist`.\n kwargs_axes : dict\n Keyword arguments to pass to `matplotlib.axes.Axes`.\n kwargs_fit : dict\n Keyword arguments to pass to `scipy.optimize.curve_fit`\n\n Returns\n -------\n axes : `~numpy.ndarray` of `~matplotlib.pyplot.Axes`\n Array of Axes.\n result_list : list of dict\n List of dictionnary that contains the results of `scipy.optimize.curve_fit`. The number of elements in the list\n correspond to the dimension of the non-spatial axis of the map.\n The dictionnary contains:\n\n * `axis_edges` : the edges of the non-spatial axis bin used\n * `param` : the best-fit parameters of the input function `func`\n * `covar` : the covariance matrix for the fitted parameters `param`\n * `info_dict` : the `infodict` return of `scipy.optimize.curve_fit`\n\n Examples\n --------\n >>> from gammapy.datasets import MapDataset\n >>> from gammapy.estimators import TSMapEstimator\n >>> from scipy.stats import norm\n >>> from gammapy.visualization import plot_distribution\n >>> dataset = MapDataset.read(\"$GAMMAPY_DATA/cta-1dc-gc/cta-1dc-gc.fits.gz\")\n >>> tsmap_est = TSMapEstimator().run(dataset)\n >>> axs, res = plot_distribution(tsmap_est.sqrt_ts, func=\"norm\", kwargs_hist={'bins': 75, 'range': (-10, 10), 'density': True})\n >>> # Equivalently, one can do the following:\n >>> func = lambda x, mu, sig : norm.pdf(x, loc=mu, scale=sig)\n >>> axs, res = plot_distribution(tsmap_est.sqrt_ts, func=func, kwargs_hist={'bins': 75, 'range': (-10, 10), 'density': True})\n \"\"\"\n\n from gammapy.maps import WcsNDMap # import here to avoid circular import\n\n if not isinstance(wcs_map, WcsNDMap):\n raise TypeError(\n f\"map_ must be an instance of gammapy.maps.WcsNDMap, given {type(wcs_map)}\"\n )\n\n kwargs_hist = kwargs_hist or {}\n kwargs_axes = kwargs_axes or {}\n kwargs_fit = kwargs_fit or {}\n\n kwargs_hist.setdefault(\"density\", True)\n kwargs_fit.setdefault(\"full_output\", True)\n\n cutout, mask = wcs_map.cutout_and_mask_region()\n idx_x, idx_y = np.where(mask)\n\n data = cutout.data[..., idx_x, idx_y]\n\n if ax is None:\n n_plot = len(data)\n cols = min(ncols, n_plot)\n rows = 1 + (n_plot - 1) // cols\n\n width = 12\n figsize = (width, width * rows / cols)\n\n fig, axes = plt.subplots(\n nrows=rows,\n ncols=cols,\n figsize=figsize,\n )\n cells_in_grid = rows * cols\n else:\n axes = ax\n cells_in_grid = len(ax.flatten())\n\n if not isinstance(axes, np.ndarray):\n axes = np.array([axes])\n\n result_list = []\n\n for idx in range(cells_in_grid):\n\n axe = axes.flat[idx]\n if idx > len(data) - 1:\n axe.set_visible(False)\n continue\n d = data[idx][np.isfinite(data[idx])]\n n, bins, _ = axe.hist(d, **kwargs_hist)\n\n if func is not None:\n kwargs_plot_fit = {\"label\": \"Fit\"}\n centers = 0.5 * (bins[1:] + bins[:-1])\n\n if func == \"norm\":\n\n def func_to_fit(x, mu, sigma):\n return norm.pdf(x, mu, sigma)\n\n pars, cov, infodict, message, _ = curve_fit(\n func_to_fit, centers, n, **kwargs_fit\n )\n\n mu, sig = pars[0], pars[1]\n err_mu, err_sig = np.sqrt(cov[0][0]), np.sqrt(cov[1][1])\n\n label_norm = (\n r\"$\\mu$ = {:.2f} ± {:.2E}\\n$\\sigma$ = {:.2f} ± {:.2E}\".format(\n mu, err_mu, sig, err_sig\n )\n ).replace(r\"\\n\", \"\\n\")\n kwargs_plot_fit[\"label\"] = label_norm\n\n else:\n func_to_fit = func\n\n pars, cov, infodict, message, _ = curve_fit(\n func_to_fit, centers, n, **kwargs_fit\n )\n\n axis_edges = (\n wcs_map.geom.axes[-1].edges[idx],\n wcs_map.geom.axes[-1].edges[idx + 1],\n )\n result_dict = {\n \"axis_edges\": axis_edges,\n \"param\": pars,\n \"covar\": cov,\n \"info_dict\": infodict,\n }\n result_list.append(result_dict)\n log.info(message)\n\n xmin, xmax = kwargs_hist.get(\"range\", (np.min(d), np.max(d)))\n x = np.linspace(xmin, xmax, 1000)\n\n axe.plot(x, func_to_fit(x, *pars), lw=2, color=\"black\", **kwargs_plot_fit)\n\n axe.set(**kwargs_axes)\n axe.legend()\n\n return axes, result_list\n", "path": "gammapy/visualization/utils.py" } ]
diff --git a/gammapy/modeling/models/tests/test_core.py b/gammapy/modeling/models/tests/test_core.py index 94924686e8..682250c642 100644 --- a/gammapy/modeling/models/tests/test_core.py +++ b/gammapy/modeling/models/tests/test_core.py @@ -215,7 +215,7 @@ def test_plot_models(caplog): models = Models.read("$GAMMAPY_DATA/tests/models/gc_example_models.yaml") with mpl_plot_check(): - models.plot_positions() + models.plot_regions(linewidth=2) models.plot_regions() assert models.wcs_geom.data_shape == models.wcs_geom.wcs.array_shape diff --git a/gammapy/visualization/utils.py b/gammapy/visualization/utils.py index bb95def734..122dca4a8f 100644 --- a/gammapy/visualization/utils.py +++ b/gammapy/visualization/utils.py @@ -24,8 +24,8 @@ "ec": "markeredgecolor", "facecolor": "markerfacecolor", "fc": "markerfacecolor", - "linewidth": "markerwidth", - "lw": "markerwidth", + "linewidth": "markeredgewidth", + "lw": "markeredgewidth", }
microsoft__DeepSpeed-4160
[REQUEST] Handle SIGTERM Command deepspeed can catch SIGINT and stop the subprocess ([code](https://github.com/microsoft/DeepSpeed/blob/master/deepspeed/launcher/runner.py#L580)). In Kubernetes, kubelet sends process SIGTERM, which is not handled by deepspeed, before closing a container. If deepspeed can handle SIGTERM and transmit to training processes, it will be helpful.
[ { "content": "# Copyright (c) Microsoft Corporation.\n# SPDX-License-Identifier: Apache-2.0\n\n# DeepSpeed Team\n\"\"\"\nDeepSpeed runner is the main front-end to launching multi-worker\ntraining jobs with DeepSpeed. By default this uses pdsh to parallel\nssh into multiple worker nodes and launch all the necessary processes\nper rank for training.\n\"\"\"\n\nimport os\nimport re\nimport sys\nimport shlex\nimport json\nimport base64\nimport argparse\nimport subprocess\nimport collections\nfrom copy import deepcopy\nimport signal\nimport time\n\nfrom .multinode_runner import PDSHRunner, OpenMPIRunner, MVAPICHRunner, SlurmRunner, MPICHRunner, IMPIRunner\nfrom .constants import PDSH_LAUNCHER, OPENMPI_LAUNCHER, MVAPICH_LAUNCHER, SLURM_LAUNCHER, MPICH_LAUNCHER, IMPI_LAUNCHER\nfrom ..constants import TORCH_DISTRIBUTED_DEFAULT_PORT\nfrom ..nebula.constants import NEBULA_EXPORT_ENVS\nfrom ..utils import logger\n\nfrom ..autotuning import Autotuner\nfrom deepspeed.accelerator import get_accelerator\n\nDLTS_HOSTFILE = \"/job/hostfile\"\nEXPORT_ENVS = ['MLFLOW', 'NCCL', 'PYTHON', 'MV2', 'UCX']\nEXPORT_ENVS += NEBULA_EXPORT_ENVS\nDEEPSPEED_ENVIRONMENT_NAME = os.getenv(\"DS_ENV_FILE\", \".deepspeed_env\")\nDEEPSPEED_ENVIRONMENT_PATHS = [os.path.expanduser(\"~\"), '.']\nPDSH_MAX_FAN_OUT = 1024\n\n# On AISC compute, each node sets environment variables independently, want to prevent\n# exporting rank-0 env variables in case of heterogeneous compute.\nEXCLUDE_ENVS = {'AISC_JOB_NAME': ['NCCL_IB_HCA', 'UCX_NET_DEVICES']}\n\n\ndef parse_args(args=None):\n parser = argparse.ArgumentParser(description=\"DeepSpeed runner to help launch distributed \"\n \"multi-node/multi-gpu training jobs.\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n parser.add_argument(\"-H\",\n \"--hostfile\",\n type=str,\n default=DLTS_HOSTFILE,\n help=\"Hostfile path (in MPI style) that defines the \"\n \"resource pool available to the job (e.g., \"\n \"worker-0 slots=4)\")\n\n parser.add_argument(\"-i\",\n \"--include\",\n type=str,\n default=\"\",\n help='''Specify hardware resources to use during execution.\n String format is\n NODE_SPEC[@NODE_SPEC ...],\n where\n NODE_SPEC=NAME[:SLOT[,SLOT ...]].\n If :SLOT is omitted, include all slots on that host.\n Example: -i \"worker-0@worker-1:0,2\" will use all slots\n on worker-0 and slots [0, 2] on worker-1.\n ''')\n\n parser.add_argument(\"-e\",\n \"--exclude\",\n type=str,\n default=\"\",\n help='''Specify hardware resources to NOT use during execution.\n Mutually exclusive with --include. Resource formatting\n is the same as --include.\n Example: -e \"worker-1:0\" will use all available\n resources except slot 0 on worker-1.\n ''')\n\n parser.add_argument(\"--num_nodes\",\n type=int,\n default=-1,\n help=\"Total number of worker nodes to run on, this will use \"\n \"the top N hosts from the given hostfile.\")\n\n parser.add_argument(\"--min_elastic_nodes\",\n type=int,\n default=-1,\n help=\"Minimum number of nodes to run elastic training on. \"\n \"Default is 1 when elastic training is enabled\")\n\n parser.add_argument(\"--max_elastic_nodes\",\n type=int,\n default=-1,\n help=\"Maximum number of nodes to run elastic training on. \"\n \"Default is num_nodes when elastic training is enabled\")\n\n parser.add_argument(\"--num_gpus\",\n \"--num_accelerators\",\n type=int,\n default=-1,\n help=\"Max number of GPUs to use on each node, will use \"\n \"[0:N) GPU ids on each node.\")\n\n parser.add_argument(\"--master_port\",\n default=TORCH_DISTRIBUTED_DEFAULT_PORT,\n type=int,\n help=\"(optional) Port used by PyTorch distributed for \"\n \"communication during training.\")\n\n parser.add_argument(\"--master_addr\",\n default=\"\",\n type=str,\n help=\"(optional) IP address of node 0, will be \"\n \"inferred via 'hostname -I' if not specified.\")\n\n parser.add_argument(\"--launcher\",\n default=PDSH_LAUNCHER,\n type=str,\n help=\"(optional) choose launcher backend for multi-node \"\n \"training. Options currently include PDSH, OpenMPI, MVAPICH, SLURM, MPICH, IMPI.\")\n\n parser.add_argument(\"--launcher_args\",\n default=\"\",\n type=str,\n help=\"(optional) pass launcher specific arguments as a \"\n \"single quoted argument.\")\n\n parser.add_argument(\"--module\",\n action=\"store_true\",\n help=\"Change each process to interpret the launch \"\n \"script as a Python module, executing with the same \"\n \"behavior as 'python -m'.\")\n\n parser.add_argument(\"--no_python\",\n action=\"store_true\",\n help=\"Skip prepending the training script with \"\n \"'python' - just execute it directly.\")\n\n parser.add_argument(\"--no_local_rank\",\n action=\"store_true\",\n help=\"Do not pass local_rank as an argument when calling \"\n \"the user's training script.\")\n\n parser.add_argument(\"--no_ssh_check\",\n action=\"store_true\",\n help=\"Do not perform ssh check in multi-node launcher model\")\n\n parser.add_argument(\"--force_multi\",\n action=\"store_true\",\n help=\"Force multi-node launcher mode, helps in cases where user \"\n \"wants to launch on single remote node.\")\n\n parser.add_argument(\"--save_pid\",\n action=\"store_true\",\n help=\"Save file containing launcher process id (pid) at /tmp/<main-pid>.ds, \"\n \"where <main-pid> is the pid of the first process that invoked `deepspeed`. \"\n \"Useful when launching deepspeed processes programmatically.\")\n\n parser.add_argument(\"--enable_each_rank_log\",\n default=\"None\",\n type=str,\n help=\"redirect the stdout and stderr from each rank into different log files\")\n\n parser.add_argument(\"--autotuning\",\n default=\"\",\n choices=[\"tune\", \"run\"],\n type=str,\n help=\"Run DeepSpeed autotuner to discover optimal configuration parameters \"\n \"before running job.\")\n\n parser.add_argument(\"--elastic_training\",\n action=\"store_true\",\n help=\"Enable elastic training support in DeepSpeed.\")\n\n parser.add_argument(\"user_script\", type=str, help=\"User script to launch, followed by any required \"\n \"arguments.\")\n\n parser.add_argument('user_args', nargs=argparse.REMAINDER)\n\n parser.add_argument(\"--bind_cores_to_rank\",\n action=\"store_true\",\n help=\"Bind each rank to different cores of the host\")\n\n parser.add_argument(\"--bind_core_list\",\n type=str,\n default=None,\n help=\"List of cores to bind to with comma separated list of \"\n \"numbers and range. i.e. 1,3-5,7 => [1,3,4,5,7]. When not \"\n \"specified, all cores on system would be used rank binding\")\n\n return parser.parse_args(args=args)\n\n\ndef fetch_hostfile(hostfile_path):\n if not os.path.isfile(hostfile_path):\n logger.warning(\"Unable to find hostfile, will proceed with training \"\n \"with local resources only.\")\n return None\n\n # e.g., worker-0 slots=16\n with open(hostfile_path, 'r') as fd:\n hostfile_text = fd.readlines()\n\n return _parse_hostfile(hostfile_text)\n\n\ndef _parse_hostfile(hostfile_lines):\n # Regex matches one or more non-whitespace characters (\\S+) at the start of\n # the line, followed by one or more whitespace characters (\\s+), followed\n # by the string \"slots=\", followed by one or more digits (\\d+).\n pattern = r'^(\\S+)\\s+slots=(\\d+)'\n\n resource_pool = collections.OrderedDict()\n\n for line in hostfile_lines:\n line = line.strip()\n match = re.search(pattern, line)\n if line.startswith(\"#\") or line == \"\":\n # hostfile comment or empty line, ignore\n continue\n elif match:\n host = match.group(1)\n num_slots = int(match.group(2))\n if host in resource_pool:\n logger.error(f\"Bad hostfile text: {hostfile_lines}\")\n raise ValueError(f\"Hostfile contains multiple entries for {host}, unable to proceed with launching\")\n resource_pool[host] = num_slots\n else:\n logger.error(f\"Bad hostfile text: {hostfile_lines}\")\n raise ValueError(f\"Hostfile contains a bad entry: {line}, unable to proceed with launching\")\n\n if len(resource_pool) == 0:\n logger.error(f\"Bad hostfile text: {hostfile_lines}\")\n raise ValueError(\"Hostfile is empty or not formatted correctly, unable to proceed with launching.\")\n\n return resource_pool\n\n\ndef _stable_remove_duplicates(data):\n # Create a new list in the same order as original but with duplicates\n # removed, should never be more than ~16 elements so simple is best\n new_list = []\n for x in data:\n if x not in new_list:\n new_list.append(x)\n return new_list\n\n\ndef parse_resource_filter(host_info, include_str=\"\", exclude_str=\"\"):\n '''Parse an inclusion or exclusion string and filter a hostfile dictionary.\n\n String format is NODE_SPEC[@NODE_SPEC ...], where\n NODE_SPEC = NAME[:SLOT[,SLOT ...]].\n If :SLOT is omitted, include/exclude all slots on that host.\n\n Examples:\n include_str=\"worker-0@worker-1:0,2\" will use all slots on worker-0 and\n slots [0, 2] on worker-1.\n exclude_str=\"worker-1:0\" will use all available resources except\n slot 0 on worker-1.\n '''\n\n # Constants that define our syntax\n NODE_SEP = '@'\n SLOT_LIST_START = ':'\n SLOT_SEP = ','\n\n # Ensure include/exclude are mutually exclusive\n if (include_str != \"\") and (exclude_str != \"\"):\n raise ValueError('include_str and exclude_str are mutually exclusive.')\n\n # no-op\n if (include_str == \"\") and (exclude_str == \"\"):\n return host_info\n\n # Either build from scratch or remove items\n filtered_hosts = dict()\n if include_str:\n parse_str = include_str\n if exclude_str != \"\":\n filtered_hosts = deepcopy(host_info)\n parse_str = exclude_str\n\n # foreach node in the list\n for node_config in parse_str.split(NODE_SEP):\n # Node can either be alone or node:slot,slot,slot\n if SLOT_LIST_START in node_config:\n hostname, slots = node_config.split(SLOT_LIST_START)\n slots = [int(x) for x in slots.split(SLOT_SEP)]\n\n # sanity checks\n if hostname not in host_info:\n raise ValueError(f\"Hostname '{hostname}' not found in hostfile\")\n for slot in slots:\n if slot not in host_info[hostname]:\n raise ValueError(f\"No slot '{slot}' specified on host '{hostname}'\")\n\n # If include string, build the list from here\n if include_str:\n filtered_hosts[hostname] = slots\n elif exclude_str:\n for slot in slots:\n logger.info(f'removing {slot} from {hostname}')\n filtered_hosts[hostname].remove(slot)\n\n # User just specified the whole node\n else:\n hostname = node_config\n # sanity check hostname\n if hostname not in host_info:\n raise ValueError(f\"Hostname '{hostname}' not found in hostfile\")\n\n if include_str:\n filtered_hosts[hostname] = host_info[hostname]\n elif exclude_str:\n filtered_hosts[hostname] = []\n\n # Post-processing to remove duplicates and empty nodes\n del_keys = []\n for hostname in filtered_hosts:\n # Remove duplicates\n filtered_hosts[hostname] = _stable_remove_duplicates(filtered_hosts[hostname])\n # Remove empty hosts\n if len(filtered_hosts[hostname]) == 0:\n del_keys.append(hostname)\n for name in del_keys:\n del filtered_hosts[name]\n\n # Lastly, go over filtered_hosts and convert to a OrderedDict() to ensure\n # we map ranks to nodes correctly by maintaining host_info ordering.\n ordered_hosts = collections.OrderedDict()\n for host in host_info:\n if host in filtered_hosts:\n ordered_hosts[host] = filtered_hosts[host]\n\n return ordered_hosts\n\n\ndef parse_inclusion_exclusion(resource_pool, inclusion, exclusion):\n active_resources = collections.OrderedDict()\n for hostname, slots in resource_pool.items():\n active_resources[hostname] = list(range(slots))\n\n return parse_resource_filter(active_resources, include_str=inclusion, exclude_str=exclusion)\n\n\ndef encode_world_info(world_info):\n world_info_json = json.dumps(world_info).encode('utf-8')\n world_info_base64 = base64.urlsafe_b64encode(world_info_json).decode('utf-8')\n return world_info_base64\n\n\ndef run_autotuning(args, active_resources):\n tuner = Autotuner(args, active_resources)\n logger.info(\"[Start] Running autotuning\")\n\n tuner.tune()\n tuner.print_tuning_results()\n\n logger.info(\"[End] Running autotuning\")\n tuner.write_optimal_config()\n\n if args.autotuning == \"run\":\n tuner.run_after_tuning()\n\n\ndef parse_num_nodes(str_num_nodes: str, elastic_training: bool):\n node_list = str_num_nodes.split(\":\")\n\n if len(node_list) == 1:\n min_nodes, max_nodes = int(node_list[0]), -1\n elif len(node_list) == 2 and elastic_training:\n min_nodes, max_nodes = int(node_list[0]), int(node_list[1])\n elif len(node_list) == 2 and not elastic_training:\n raise RuntimeError(\"MIN:MAX format is only supported in elastic training\")\n else:\n raise RuntimeError(\"num_nodes {} is not in MIN:MAX format\".format(str_num_nodes))\n\n return min_nodes, max_nodes\n\n\ndef main(args=None):\n args = parse_args(args)\n\n # For when argparse interprets remaining args as a single string\n args.user_args = shlex.split(\" \".join(args.user_args))\n\n if args.elastic_training:\n assert args.master_addr != \"\", \"Master Addr is required when elastic training is enabled\"\n\n resource_pool = fetch_hostfile(args.hostfile)\n\n # respect CUDA_VISIBLE_DEVICES for a single node and no explicit resource filters\n cuda_visible_devices = os.environ.get(\"CUDA_VISIBLE_DEVICES\", \"\")\n if not resource_pool and len(cuda_visible_devices):\n detected_str = f\"Detected CUDA_VISIBLE_DEVICES={cuda_visible_devices}\"\n if len(args.include) or len(args.exclude) or args.num_nodes > 1 or args.num_gpus > 0:\n print(\n f\"{detected_str} but ignoring it because one or several of --include/--exclude/--num_gpus/--num_nodes cl args were used. If you want to use CUDA_VISIBLE_DEVICES don't pass any of these arguments to deepspeed.\"\n )\n else:\n args.include = f\"localhost:{cuda_visible_devices}\"\n print(f\"{detected_str}: setting --include={args.include}\")\n del os.environ[\"CUDA_VISIBLE_DEVICES\"]\n\n if args.num_nodes >= 0 or args.num_gpus >= 0:\n if args.include != \"\" or args.exclude != \"\":\n raise ValueError(\"Cannot specify num_nodes/gpus with include/exclude\")\n\n multi_node_exec = True\n if not resource_pool:\n resource_pool = {}\n device_count = get_accelerator().device_count()\n if device_count == 0:\n raise RuntimeError(\"Unable to proceed, no GPU resources available\")\n resource_pool['localhost'] = device_count\n args.master_addr = \"127.0.0.1\"\n multi_node_exec = False\n\n if not multi_node_exec and args.num_nodes > 1:\n raise ValueError(\"Num nodes is >1 but no extra nodes available via hostfile\")\n\n active_resources = parse_inclusion_exclusion(resource_pool, args.include, args.exclude)\n env = os.environ.copy()\n\n # validate that passwordless-ssh is workly properly with this hostfile\n if multi_node_exec and not args.no_ssh_check:\n first_host = list(active_resources.keys())[0]\n try:\n subprocess.check_call(f'ssh -o PasswordAuthentication=no {first_host} hostname',\n stderr=subprocess.DEVNULL,\n stdout=subprocess.DEVNULL,\n shell=True)\n except subprocess.CalledProcessError:\n raise RuntimeError(\n f\"Using hostfile at {args.hostfile} but host={first_host} was not reachable via ssh. If you are running with a single node please remove {args.hostfile} or setup passwordless ssh.\"\n )\n\n if not args.master_addr:\n assert multi_node_exec\n first_host = list(active_resources.keys())[0]\n hostname_cmd = [f\"ssh {first_host} hostname -I\"]\n try:\n result = subprocess.check_output(hostname_cmd, shell=True)\n except subprocess.CalledProcessError as err:\n logger.error(\n \"Unable to detect suitable master address via `hostname -I`, please manually specify one via --master_addr\"\n )\n raise err\n args.master_addr = result.decode('utf-8').split()[0]\n if not args.master_addr:\n raise RuntimeError(\n f\"Unable to detect suitable master address via `hostname -I`, please manually specify one via --master_addr\"\n )\n logger.info(f\"Using IP address of {args.master_addr} for node {first_host}\")\n\n if args.autotuning != \"\":\n run_autotuning(args, active_resources)\n return\n\n if args.num_nodes > 0:\n updated_active_resources = collections.OrderedDict()\n for count, hostname in enumerate(active_resources.keys()):\n if args.num_nodes == count:\n break\n updated_active_resources[hostname] = active_resources[hostname]\n active_resources = updated_active_resources\n\n if args.num_gpus > 0:\n updated_active_resources = collections.OrderedDict()\n for hostname in active_resources.keys():\n updated_active_resources[hostname] = list(range(args.num_gpus))\n active_resources = updated_active_resources\n\n if args.elastic_training:\n assert not args.no_local_rank, \"--no_local_rank argument is not supported in Elastic training\"\n\n # encode world info as base64 to make it easier to pass via command line\n world_info_base64 = encode_world_info(active_resources)\n\n multi_node_exec = args.force_multi or len(active_resources) > 1\n\n if not multi_node_exec:\n deepspeed_launch = [\n sys.executable, \"-u\", \"-m\", \"deepspeed.launcher.launch\", f\"--world_info={world_info_base64}\",\n f\"--master_addr={args.master_addr}\", f\"--master_port={args.master_port}\"\n ]\n if args.no_python:\n deepspeed_launch.append(\"--no_python\")\n if args.module:\n deepspeed_launch.append(\"--module\")\n if args.no_local_rank:\n deepspeed_launch.append(\"--no_local_rank\")\n if args.save_pid:\n deepspeed_launch += [\"--save_pid\", f\"{os.getpid()}\"]\n if args.enable_each_rank_log:\n deepspeed_launch.append(f\"--enable_each_rank_log={args.enable_each_rank_log}\")\n if args.elastic_training:\n deepspeed_launch.append(\"--enable_elastic_training\")\n deepspeed_launch.append(f\"--max_elastic_nodes={args.max_elastic_nodes}\")\n deepspeed_launch.append(f\"--min_elastic_nodes={args.min_elastic_nodes}\")\n if args.bind_cores_to_rank:\n deepspeed_launch.append(\"--bind_cores_to_rank\")\n if args.bind_core_list is not None:\n deepspeed_launch.append(f\"--bind_core_list={args.bind_core_list}\")\n cmd = deepspeed_launch + [args.user_script] + args.user_args\n else:\n args.launcher = args.launcher.lower()\n if args.launcher == PDSH_LAUNCHER:\n runner = PDSHRunner(args, world_info_base64)\n elif args.launcher == OPENMPI_LAUNCHER:\n runner = OpenMPIRunner(args, world_info_base64, resource_pool)\n elif args.launcher == MPICH_LAUNCHER:\n runner = MPICHRunner(args, world_info_base64, resource_pool)\n elif args.launcher == IMPI_LAUNCHER:\n runner = IMPIRunner(args, world_info_base64, resource_pool)\n elif args.launcher == MVAPICH_LAUNCHER:\n runner = MVAPICHRunner(args, world_info_base64, resource_pool)\n elif args.launcher == SLURM_LAUNCHER:\n runner = SlurmRunner(args, world_info_base64, resource_pool)\n else:\n raise NotImplementedError(f\"Unknown launcher {args.launcher}\")\n\n if not runner.backend_exists():\n raise RuntimeError(f\"launcher '{args.launcher}' not installed.\")\n\n curr_path = os.path.abspath('.')\n if 'PYTHONPATH' in env:\n env['PYTHONPATH'] = curr_path + \":\" + env['PYTHONPATH']\n else:\n env['PYTHONPATH'] = curr_path\n\n excluded_vars = []\n for exclude_key, var_list in EXCLUDE_ENVS.items():\n if exclude_key in env.keys():\n # key exists in launcher env -> var list should be used\n excluded_vars += var_list\n\n exports = \"\"\n for var in env.keys():\n if any([var.startswith(name) for name in EXPORT_ENVS]):\n if not any([var == name for name in excluded_vars]):\n runner.add_export(var, env[var])\n\n for environ_path in DEEPSPEED_ENVIRONMENT_PATHS:\n environ_file = DEEPSPEED_ENVIRONMENT_NAME\n # handle if users to enter path for `DS_ENV_FILE`\n if not os.path.isfile(environ_file):\n environ_file = os.path.join(environ_path, DEEPSPEED_ENVIRONMENT_NAME)\n if os.path.isfile(environ_file):\n logger.info(f\"deepspeed_env file = {environ_file}\")\n with open(environ_file, 'r') as fd:\n for var in fd.readlines():\n key, val = var.split('=', maxsplit=1)\n runner.add_export(key, val)\n\n if args.launcher == PDSH_LAUNCHER:\n cmd, kill_cmd = runner.get_cmd(env, active_resources)\n else:\n cmd = runner.get_cmd(env, active_resources)\n\n logger.info(f\"cmd = {' '.join(cmd)}\")\n result = subprocess.Popen(cmd, env=env)\n\n def sigkill_handler(signum, frame):\n result.send_signal(signal.SIGINT)\n time.sleep(0.1)\n result.send_signal(signal.SIGTERM)\n result_kill = subprocess.Popen(kill_cmd, env=env)\n result_kill.wait()\n time.sleep(1)\n sys.exit(1)\n\n if args.launcher == PDSH_LAUNCHER and multi_node_exec:\n signal.signal(signal.SIGINT, sigkill_handler)\n\n result.wait()\n\n # In case of failure must propagate the error-condition back to the caller (usually shell). The\n # actual error and traceback should have been printed in the subprocess, so in order to avoid\n # unnecessary noise we just quietly exit here with the same code as the subprocess\n if result.returncode > 0:\n sys.exit(result.returncode)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "deepspeed/launcher/runner.py" } ]
[ { "content": "# Copyright (c) Microsoft Corporation.\n# SPDX-License-Identifier: Apache-2.0\n\n# DeepSpeed Team\n\"\"\"\nDeepSpeed runner is the main front-end to launching multi-worker\ntraining jobs with DeepSpeed. By default this uses pdsh to parallel\nssh into multiple worker nodes and launch all the necessary processes\nper rank for training.\n\"\"\"\n\nimport os\nimport re\nimport sys\nimport shlex\nimport json\nimport base64\nimport argparse\nimport subprocess\nimport collections\nfrom copy import deepcopy\nimport signal\nimport time\n\nfrom .multinode_runner import PDSHRunner, OpenMPIRunner, MVAPICHRunner, SlurmRunner, MPICHRunner, IMPIRunner\nfrom .constants import PDSH_LAUNCHER, OPENMPI_LAUNCHER, MVAPICH_LAUNCHER, SLURM_LAUNCHER, MPICH_LAUNCHER, IMPI_LAUNCHER\nfrom ..constants import TORCH_DISTRIBUTED_DEFAULT_PORT\nfrom ..nebula.constants import NEBULA_EXPORT_ENVS\nfrom ..utils import logger\n\nfrom ..autotuning import Autotuner\nfrom deepspeed.accelerator import get_accelerator\n\nDLTS_HOSTFILE = \"/job/hostfile\"\nEXPORT_ENVS = ['MLFLOW', 'NCCL', 'PYTHON', 'MV2', 'UCX']\nEXPORT_ENVS += NEBULA_EXPORT_ENVS\nDEEPSPEED_ENVIRONMENT_NAME = os.getenv(\"DS_ENV_FILE\", \".deepspeed_env\")\nDEEPSPEED_ENVIRONMENT_PATHS = [os.path.expanduser(\"~\"), '.']\nPDSH_MAX_FAN_OUT = 1024\n\n# On AISC compute, each node sets environment variables independently, want to prevent\n# exporting rank-0 env variables in case of heterogeneous compute.\nEXCLUDE_ENVS = {'AISC_JOB_NAME': ['NCCL_IB_HCA', 'UCX_NET_DEVICES']}\n\n\ndef parse_args(args=None):\n parser = argparse.ArgumentParser(description=\"DeepSpeed runner to help launch distributed \"\n \"multi-node/multi-gpu training jobs.\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n\n parser.add_argument(\"-H\",\n \"--hostfile\",\n type=str,\n default=DLTS_HOSTFILE,\n help=\"Hostfile path (in MPI style) that defines the \"\n \"resource pool available to the job (e.g., \"\n \"worker-0 slots=4)\")\n\n parser.add_argument(\"-i\",\n \"--include\",\n type=str,\n default=\"\",\n help='''Specify hardware resources to use during execution.\n String format is\n NODE_SPEC[@NODE_SPEC ...],\n where\n NODE_SPEC=NAME[:SLOT[,SLOT ...]].\n If :SLOT is omitted, include all slots on that host.\n Example: -i \"worker-0@worker-1:0,2\" will use all slots\n on worker-0 and slots [0, 2] on worker-1.\n ''')\n\n parser.add_argument(\"-e\",\n \"--exclude\",\n type=str,\n default=\"\",\n help='''Specify hardware resources to NOT use during execution.\n Mutually exclusive with --include. Resource formatting\n is the same as --include.\n Example: -e \"worker-1:0\" will use all available\n resources except slot 0 on worker-1.\n ''')\n\n parser.add_argument(\"--num_nodes\",\n type=int,\n default=-1,\n help=\"Total number of worker nodes to run on, this will use \"\n \"the top N hosts from the given hostfile.\")\n\n parser.add_argument(\"--min_elastic_nodes\",\n type=int,\n default=-1,\n help=\"Minimum number of nodes to run elastic training on. \"\n \"Default is 1 when elastic training is enabled\")\n\n parser.add_argument(\"--max_elastic_nodes\",\n type=int,\n default=-1,\n help=\"Maximum number of nodes to run elastic training on. \"\n \"Default is num_nodes when elastic training is enabled\")\n\n parser.add_argument(\"--num_gpus\",\n \"--num_accelerators\",\n type=int,\n default=-1,\n help=\"Max number of GPUs to use on each node, will use \"\n \"[0:N) GPU ids on each node.\")\n\n parser.add_argument(\"--master_port\",\n default=TORCH_DISTRIBUTED_DEFAULT_PORT,\n type=int,\n help=\"(optional) Port used by PyTorch distributed for \"\n \"communication during training.\")\n\n parser.add_argument(\"--master_addr\",\n default=\"\",\n type=str,\n help=\"(optional) IP address of node 0, will be \"\n \"inferred via 'hostname -I' if not specified.\")\n\n parser.add_argument(\"--launcher\",\n default=PDSH_LAUNCHER,\n type=str,\n help=\"(optional) choose launcher backend for multi-node \"\n \"training. Options currently include PDSH, OpenMPI, MVAPICH, SLURM, MPICH, IMPI.\")\n\n parser.add_argument(\"--launcher_args\",\n default=\"\",\n type=str,\n help=\"(optional) pass launcher specific arguments as a \"\n \"single quoted argument.\")\n\n parser.add_argument(\"--module\",\n action=\"store_true\",\n help=\"Change each process to interpret the launch \"\n \"script as a Python module, executing with the same \"\n \"behavior as 'python -m'.\")\n\n parser.add_argument(\"--no_python\",\n action=\"store_true\",\n help=\"Skip prepending the training script with \"\n \"'python' - just execute it directly.\")\n\n parser.add_argument(\"--no_local_rank\",\n action=\"store_true\",\n help=\"Do not pass local_rank as an argument when calling \"\n \"the user's training script.\")\n\n parser.add_argument(\"--no_ssh_check\",\n action=\"store_true\",\n help=\"Do not perform ssh check in multi-node launcher model\")\n\n parser.add_argument(\"--force_multi\",\n action=\"store_true\",\n help=\"Force multi-node launcher mode, helps in cases where user \"\n \"wants to launch on single remote node.\")\n\n parser.add_argument(\"--save_pid\",\n action=\"store_true\",\n help=\"Save file containing launcher process id (pid) at /tmp/<main-pid>.ds, \"\n \"where <main-pid> is the pid of the first process that invoked `deepspeed`. \"\n \"Useful when launching deepspeed processes programmatically.\")\n\n parser.add_argument(\"--enable_each_rank_log\",\n default=\"None\",\n type=str,\n help=\"redirect the stdout and stderr from each rank into different log files\")\n\n parser.add_argument(\"--autotuning\",\n default=\"\",\n choices=[\"tune\", \"run\"],\n type=str,\n help=\"Run DeepSpeed autotuner to discover optimal configuration parameters \"\n \"before running job.\")\n\n parser.add_argument(\"--elastic_training\",\n action=\"store_true\",\n help=\"Enable elastic training support in DeepSpeed.\")\n\n parser.add_argument(\"user_script\", type=str, help=\"User script to launch, followed by any required \"\n \"arguments.\")\n\n parser.add_argument('user_args', nargs=argparse.REMAINDER)\n\n parser.add_argument(\"--bind_cores_to_rank\",\n action=\"store_true\",\n help=\"Bind each rank to different cores of the host\")\n\n parser.add_argument(\"--bind_core_list\",\n type=str,\n default=None,\n help=\"List of cores to bind to with comma separated list of \"\n \"numbers and range. i.e. 1,3-5,7 => [1,3,4,5,7]. When not \"\n \"specified, all cores on system would be used rank binding\")\n\n return parser.parse_args(args=args)\n\n\ndef fetch_hostfile(hostfile_path):\n if not os.path.isfile(hostfile_path):\n logger.warning(\"Unable to find hostfile, will proceed with training \"\n \"with local resources only.\")\n return None\n\n # e.g., worker-0 slots=16\n with open(hostfile_path, 'r') as fd:\n hostfile_text = fd.readlines()\n\n return _parse_hostfile(hostfile_text)\n\n\ndef _parse_hostfile(hostfile_lines):\n # Regex matches one or more non-whitespace characters (\\S+) at the start of\n # the line, followed by one or more whitespace characters (\\s+), followed\n # by the string \"slots=\", followed by one or more digits (\\d+).\n pattern = r'^(\\S+)\\s+slots=(\\d+)'\n\n resource_pool = collections.OrderedDict()\n\n for line in hostfile_lines:\n line = line.strip()\n match = re.search(pattern, line)\n if line.startswith(\"#\") or line == \"\":\n # hostfile comment or empty line, ignore\n continue\n elif match:\n host = match.group(1)\n num_slots = int(match.group(2))\n if host in resource_pool:\n logger.error(f\"Bad hostfile text: {hostfile_lines}\")\n raise ValueError(f\"Hostfile contains multiple entries for {host}, unable to proceed with launching\")\n resource_pool[host] = num_slots\n else:\n logger.error(f\"Bad hostfile text: {hostfile_lines}\")\n raise ValueError(f\"Hostfile contains a bad entry: {line}, unable to proceed with launching\")\n\n if len(resource_pool) == 0:\n logger.error(f\"Bad hostfile text: {hostfile_lines}\")\n raise ValueError(\"Hostfile is empty or not formatted correctly, unable to proceed with launching.\")\n\n return resource_pool\n\n\ndef _stable_remove_duplicates(data):\n # Create a new list in the same order as original but with duplicates\n # removed, should never be more than ~16 elements so simple is best\n new_list = []\n for x in data:\n if x not in new_list:\n new_list.append(x)\n return new_list\n\n\ndef parse_resource_filter(host_info, include_str=\"\", exclude_str=\"\"):\n '''Parse an inclusion or exclusion string and filter a hostfile dictionary.\n\n String format is NODE_SPEC[@NODE_SPEC ...], where\n NODE_SPEC = NAME[:SLOT[,SLOT ...]].\n If :SLOT is omitted, include/exclude all slots on that host.\n\n Examples:\n include_str=\"worker-0@worker-1:0,2\" will use all slots on worker-0 and\n slots [0, 2] on worker-1.\n exclude_str=\"worker-1:0\" will use all available resources except\n slot 0 on worker-1.\n '''\n\n # Constants that define our syntax\n NODE_SEP = '@'\n SLOT_LIST_START = ':'\n SLOT_SEP = ','\n\n # Ensure include/exclude are mutually exclusive\n if (include_str != \"\") and (exclude_str != \"\"):\n raise ValueError('include_str and exclude_str are mutually exclusive.')\n\n # no-op\n if (include_str == \"\") and (exclude_str == \"\"):\n return host_info\n\n # Either build from scratch or remove items\n filtered_hosts = dict()\n if include_str:\n parse_str = include_str\n if exclude_str != \"\":\n filtered_hosts = deepcopy(host_info)\n parse_str = exclude_str\n\n # foreach node in the list\n for node_config in parse_str.split(NODE_SEP):\n # Node can either be alone or node:slot,slot,slot\n if SLOT_LIST_START in node_config:\n hostname, slots = node_config.split(SLOT_LIST_START)\n slots = [int(x) for x in slots.split(SLOT_SEP)]\n\n # sanity checks\n if hostname not in host_info:\n raise ValueError(f\"Hostname '{hostname}' not found in hostfile\")\n for slot in slots:\n if slot not in host_info[hostname]:\n raise ValueError(f\"No slot '{slot}' specified on host '{hostname}'\")\n\n # If include string, build the list from here\n if include_str:\n filtered_hosts[hostname] = slots\n elif exclude_str:\n for slot in slots:\n logger.info(f'removing {slot} from {hostname}')\n filtered_hosts[hostname].remove(slot)\n\n # User just specified the whole node\n else:\n hostname = node_config\n # sanity check hostname\n if hostname not in host_info:\n raise ValueError(f\"Hostname '{hostname}' not found in hostfile\")\n\n if include_str:\n filtered_hosts[hostname] = host_info[hostname]\n elif exclude_str:\n filtered_hosts[hostname] = []\n\n # Post-processing to remove duplicates and empty nodes\n del_keys = []\n for hostname in filtered_hosts:\n # Remove duplicates\n filtered_hosts[hostname] = _stable_remove_duplicates(filtered_hosts[hostname])\n # Remove empty hosts\n if len(filtered_hosts[hostname]) == 0:\n del_keys.append(hostname)\n for name in del_keys:\n del filtered_hosts[name]\n\n # Lastly, go over filtered_hosts and convert to a OrderedDict() to ensure\n # we map ranks to nodes correctly by maintaining host_info ordering.\n ordered_hosts = collections.OrderedDict()\n for host in host_info:\n if host in filtered_hosts:\n ordered_hosts[host] = filtered_hosts[host]\n\n return ordered_hosts\n\n\ndef parse_inclusion_exclusion(resource_pool, inclusion, exclusion):\n active_resources = collections.OrderedDict()\n for hostname, slots in resource_pool.items():\n active_resources[hostname] = list(range(slots))\n\n return parse_resource_filter(active_resources, include_str=inclusion, exclude_str=exclusion)\n\n\ndef encode_world_info(world_info):\n world_info_json = json.dumps(world_info).encode('utf-8')\n world_info_base64 = base64.urlsafe_b64encode(world_info_json).decode('utf-8')\n return world_info_base64\n\n\ndef run_autotuning(args, active_resources):\n tuner = Autotuner(args, active_resources)\n logger.info(\"[Start] Running autotuning\")\n\n tuner.tune()\n tuner.print_tuning_results()\n\n logger.info(\"[End] Running autotuning\")\n tuner.write_optimal_config()\n\n if args.autotuning == \"run\":\n tuner.run_after_tuning()\n\n\ndef parse_num_nodes(str_num_nodes: str, elastic_training: bool):\n node_list = str_num_nodes.split(\":\")\n\n if len(node_list) == 1:\n min_nodes, max_nodes = int(node_list[0]), -1\n elif len(node_list) == 2 and elastic_training:\n min_nodes, max_nodes = int(node_list[0]), int(node_list[1])\n elif len(node_list) == 2 and not elastic_training:\n raise RuntimeError(\"MIN:MAX format is only supported in elastic training\")\n else:\n raise RuntimeError(\"num_nodes {} is not in MIN:MAX format\".format(str_num_nodes))\n\n return min_nodes, max_nodes\n\n\ndef main(args=None):\n args = parse_args(args)\n\n # For when argparse interprets remaining args as a single string\n args.user_args = shlex.split(\" \".join(args.user_args))\n\n if args.elastic_training:\n assert args.master_addr != \"\", \"Master Addr is required when elastic training is enabled\"\n\n resource_pool = fetch_hostfile(args.hostfile)\n\n # respect CUDA_VISIBLE_DEVICES for a single node and no explicit resource filters\n cuda_visible_devices = os.environ.get(\"CUDA_VISIBLE_DEVICES\", \"\")\n if not resource_pool and len(cuda_visible_devices):\n detected_str = f\"Detected CUDA_VISIBLE_DEVICES={cuda_visible_devices}\"\n if len(args.include) or len(args.exclude) or args.num_nodes > 1 or args.num_gpus > 0:\n print(\n f\"{detected_str} but ignoring it because one or several of --include/--exclude/--num_gpus/--num_nodes cl args were used. If you want to use CUDA_VISIBLE_DEVICES don't pass any of these arguments to deepspeed.\"\n )\n else:\n args.include = f\"localhost:{cuda_visible_devices}\"\n print(f\"{detected_str}: setting --include={args.include}\")\n del os.environ[\"CUDA_VISIBLE_DEVICES\"]\n\n if args.num_nodes >= 0 or args.num_gpus >= 0:\n if args.include != \"\" or args.exclude != \"\":\n raise ValueError(\"Cannot specify num_nodes/gpus with include/exclude\")\n\n multi_node_exec = True\n if not resource_pool:\n resource_pool = {}\n device_count = get_accelerator().device_count()\n if device_count == 0:\n raise RuntimeError(\"Unable to proceed, no GPU resources available\")\n resource_pool['localhost'] = device_count\n args.master_addr = \"127.0.0.1\"\n multi_node_exec = False\n\n if not multi_node_exec and args.num_nodes > 1:\n raise ValueError(\"Num nodes is >1 but no extra nodes available via hostfile\")\n\n active_resources = parse_inclusion_exclusion(resource_pool, args.include, args.exclude)\n env = os.environ.copy()\n\n # validate that passwordless-ssh is workly properly with this hostfile\n if multi_node_exec and not args.no_ssh_check:\n first_host = list(active_resources.keys())[0]\n try:\n subprocess.check_call(f'ssh -o PasswordAuthentication=no {first_host} hostname',\n stderr=subprocess.DEVNULL,\n stdout=subprocess.DEVNULL,\n shell=True)\n except subprocess.CalledProcessError:\n raise RuntimeError(\n f\"Using hostfile at {args.hostfile} but host={first_host} was not reachable via ssh. If you are running with a single node please remove {args.hostfile} or setup passwordless ssh.\"\n )\n\n if not args.master_addr:\n assert multi_node_exec\n first_host = list(active_resources.keys())[0]\n hostname_cmd = [f\"ssh {first_host} hostname -I\"]\n try:\n result = subprocess.check_output(hostname_cmd, shell=True)\n except subprocess.CalledProcessError as err:\n logger.error(\n \"Unable to detect suitable master address via `hostname -I`, please manually specify one via --master_addr\"\n )\n raise err\n args.master_addr = result.decode('utf-8').split()[0]\n if not args.master_addr:\n raise RuntimeError(\n f\"Unable to detect suitable master address via `hostname -I`, please manually specify one via --master_addr\"\n )\n logger.info(f\"Using IP address of {args.master_addr} for node {first_host}\")\n\n if args.autotuning != \"\":\n run_autotuning(args, active_resources)\n return\n\n if args.num_nodes > 0:\n updated_active_resources = collections.OrderedDict()\n for count, hostname in enumerate(active_resources.keys()):\n if args.num_nodes == count:\n break\n updated_active_resources[hostname] = active_resources[hostname]\n active_resources = updated_active_resources\n\n if args.num_gpus > 0:\n updated_active_resources = collections.OrderedDict()\n for hostname in active_resources.keys():\n updated_active_resources[hostname] = list(range(args.num_gpus))\n active_resources = updated_active_resources\n\n if args.elastic_training:\n assert not args.no_local_rank, \"--no_local_rank argument is not supported in Elastic training\"\n\n # encode world info as base64 to make it easier to pass via command line\n world_info_base64 = encode_world_info(active_resources)\n\n multi_node_exec = args.force_multi or len(active_resources) > 1\n\n if not multi_node_exec:\n deepspeed_launch = [\n sys.executable, \"-u\", \"-m\", \"deepspeed.launcher.launch\", f\"--world_info={world_info_base64}\",\n f\"--master_addr={args.master_addr}\", f\"--master_port={args.master_port}\"\n ]\n if args.no_python:\n deepspeed_launch.append(\"--no_python\")\n if args.module:\n deepspeed_launch.append(\"--module\")\n if args.no_local_rank:\n deepspeed_launch.append(\"--no_local_rank\")\n if args.save_pid:\n deepspeed_launch += [\"--save_pid\", f\"{os.getpid()}\"]\n if args.enable_each_rank_log:\n deepspeed_launch.append(f\"--enable_each_rank_log={args.enable_each_rank_log}\")\n if args.elastic_training:\n deepspeed_launch.append(\"--enable_elastic_training\")\n deepspeed_launch.append(f\"--max_elastic_nodes={args.max_elastic_nodes}\")\n deepspeed_launch.append(f\"--min_elastic_nodes={args.min_elastic_nodes}\")\n if args.bind_cores_to_rank:\n deepspeed_launch.append(\"--bind_cores_to_rank\")\n if args.bind_core_list is not None:\n deepspeed_launch.append(f\"--bind_core_list={args.bind_core_list}\")\n cmd = deepspeed_launch + [args.user_script] + args.user_args\n else:\n args.launcher = args.launcher.lower()\n if args.launcher == PDSH_LAUNCHER:\n runner = PDSHRunner(args, world_info_base64)\n elif args.launcher == OPENMPI_LAUNCHER:\n runner = OpenMPIRunner(args, world_info_base64, resource_pool)\n elif args.launcher == MPICH_LAUNCHER:\n runner = MPICHRunner(args, world_info_base64, resource_pool)\n elif args.launcher == IMPI_LAUNCHER:\n runner = IMPIRunner(args, world_info_base64, resource_pool)\n elif args.launcher == MVAPICH_LAUNCHER:\n runner = MVAPICHRunner(args, world_info_base64, resource_pool)\n elif args.launcher == SLURM_LAUNCHER:\n runner = SlurmRunner(args, world_info_base64, resource_pool)\n else:\n raise NotImplementedError(f\"Unknown launcher {args.launcher}\")\n\n if not runner.backend_exists():\n raise RuntimeError(f\"launcher '{args.launcher}' not installed.\")\n\n curr_path = os.path.abspath('.')\n if 'PYTHONPATH' in env:\n env['PYTHONPATH'] = curr_path + \":\" + env['PYTHONPATH']\n else:\n env['PYTHONPATH'] = curr_path\n\n excluded_vars = []\n for exclude_key, var_list in EXCLUDE_ENVS.items():\n if exclude_key in env.keys():\n # key exists in launcher env -> var list should be used\n excluded_vars += var_list\n\n exports = \"\"\n for var in env.keys():\n if any([var.startswith(name) for name in EXPORT_ENVS]):\n if not any([var == name for name in excluded_vars]):\n runner.add_export(var, env[var])\n\n for environ_path in DEEPSPEED_ENVIRONMENT_PATHS:\n environ_file = DEEPSPEED_ENVIRONMENT_NAME\n # handle if users to enter path for `DS_ENV_FILE`\n if not os.path.isfile(environ_file):\n environ_file = os.path.join(environ_path, DEEPSPEED_ENVIRONMENT_NAME)\n if os.path.isfile(environ_file):\n logger.info(f\"deepspeed_env file = {environ_file}\")\n with open(environ_file, 'r') as fd:\n for var in fd.readlines():\n key, val = var.split('=', maxsplit=1)\n runner.add_export(key, val)\n\n if args.launcher == PDSH_LAUNCHER:\n cmd, kill_cmd = runner.get_cmd(env, active_resources)\n else:\n cmd = runner.get_cmd(env, active_resources)\n\n logger.info(f\"cmd = {' '.join(cmd)}\")\n result = subprocess.Popen(cmd, env=env)\n\n def sigkill_handler(signum, frame):\n result.send_signal(signal.SIGINT)\n time.sleep(0.1)\n result.send_signal(signal.SIGTERM)\n result_kill = subprocess.Popen(kill_cmd, env=env)\n result_kill.wait()\n time.sleep(1)\n sys.exit(1)\n\n if args.launcher == PDSH_LAUNCHER and multi_node_exec:\n signal.signal(signal.SIGINT, sigkill_handler)\n signal.signal(signal.SIGTERM, sigkill_handler)\n\n result.wait()\n\n # In case of failure must propagate the error-condition back to the caller (usually shell). The\n # actual error and traceback should have been printed in the subprocess, so in order to avoid\n # unnecessary noise we just quietly exit here with the same code as the subprocess\n if result.returncode > 0:\n sys.exit(result.returncode)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "deepspeed/launcher/runner.py" } ]
diff --git a/deepspeed/launcher/runner.py b/deepspeed/launcher/runner.py index aa7714bfa0b7..60bce75aeebd 100755 --- a/deepspeed/launcher/runner.py +++ b/deepspeed/launcher/runner.py @@ -578,6 +578,7 @@ def sigkill_handler(signum, frame): if args.launcher == PDSH_LAUNCHER and multi_node_exec: signal.signal(signal.SIGINT, sigkill_handler) + signal.signal(signal.SIGTERM, sigkill_handler) result.wait()
comic__grand-challenge.org-954
Replace pipenv with poetry Pipenv development seems to have stalled and there are several bugs that I'm not convinced will be fixed anytime soon, we should migrate to poetry.
[ { "content": "import glob\nimport os\nimport re\nimport uuid\nfrom datetime import timedelta\nfrom distutils.util import strtobool as strtobool_i\n\nimport sentry_sdk\nfrom corsheaders.defaults import default_headers\nfrom django.contrib.messages import constants as messages\nfrom django.core.exceptions import ImproperlyConfigured\nfrom sentry_sdk.integrations.celery import CeleryIntegration\nfrom sentry_sdk.integrations.django import DjangoIntegration\nfrom sentry_sdk.integrations.redis import RedisIntegration\n\nfrom config.denylist import USERNAME_DENYLIST\n\n\ndef strtobool(val) -> bool:\n \"\"\" Returns disutils.util.strtobool as a boolean \"\"\"\n return bool(strtobool_i(val))\n\n\nDEBUG = strtobool(os.environ.get(\"DEBUG\", \"True\"))\n\nADMINS = (\n # ('Your Name', '[email protected]'),\n)\n\n# Who gets the 404 notifications?\nmanager_email = os.environ.get(\"MANAGER_EMAIL\", None)\nif manager_email:\n MANAGERS = [(\"Manager\", manager_email)]\n\nIGNORABLE_404_URLS = [\n re.compile(r\".*\\.(php|cgi|asp).*\"),\n re.compile(r\"^/phpmyadmin.*\"),\n re.compile(r\"^/gen204.*\"),\n re.compile(r\"^/wp-content.*\"),\n re.compile(r\".*/trackback.*\"),\n]\n\n# Used as starting points for various other paths. realpath(__file__) starts in\n# the \"Comic\" app dir. We need to go one dir higher so path.join(\"..\")\nSITE_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))\nAPPS_DIR = os.path.join(SITE_ROOT, \"grandchallenge\")\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql_psycopg2\",\n \"NAME\": os.environ.get(\"POSTGRES_DB\", \"comic\"),\n \"USER\": os.environ.get(\"POSTGRES_USER\", \"comic\"),\n \"PASSWORD\": os.environ.get(\"POSTGRES_PASSWORD\", \"secretpassword\"),\n \"HOST\": os.environ.get(\"POSTGRES_HOST\", \"postgres\"),\n \"PORT\": \"\",\n }\n}\n\nEMAIL_BACKEND = \"djcelery_email.backends.CeleryEmailBackend\"\nEMAIL_HOST = os.environ.get(\"EMAIL_HOST\", \"\")\nEMAIL_HOST_USER = os.environ.get(\"EMAIL_HOST_USER\", \"\")\nEMAIL_HOST_PASSWORD = os.environ.get(\"EMAIL_HOST_PASSWORD\", \"\")\nEMAIL_PORT = int(os.environ.get(\"EMAIL_PORT\", \"25\"))\nEMAIL_USE_TLS = strtobool(os.environ.get(\"EMAIL_USE_TLS\", \"False\"))\nDEFAULT_FROM_EMAIL = os.environ.get(\n \"DEFAULT_FROM_EMAIL\", \"webmaster@localhost\"\n)\nSERVER_EMAIL = os.environ.get(\"SERVER_EMAIL\", \"root@localhost\")\n\nANONYMOUS_USER_NAME = \"AnonymousUser\"\n\nAUTH_PROFILE_MODULE = \"profiles.UserProfile\"\nUSERENA_USE_HTTPS = False\nUSERENA_DEFAULT_PRIVACY = \"open\"\nLOGIN_URL = \"/accounts/signin/\"\nLOGOUT_URL = \"/accounts/signout/\"\n\nLOGIN_REDIRECT_URL = \"/accounts/login-redirect/\"\nSOCIAL_AUTH_LOGIN_REDIRECT_URL = LOGIN_REDIRECT_URL\n\n# Do not give message popups saying \"you have been logged out\". Users are expected\n# to know they have been logged out when they click the logout button\nUSERENA_USE_MESSAGES = (False,)\n\n# Local time zone for this installation. Choices can be found here:\n# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n# although not all choices may be available on all operating systems.\n# On Unix systems, a value of None will cause Django to use the same\n# timezone as the operating system.\n# If running in a Windows environment this must be set to the same as your\n# system time zone.\nTIME_ZONE = \"UTC\"\n\n# Language code for this installation. All choices can be found here:\n# http://www.i18nguy.com/unicode/language-identifiers.html\nLANGUAGE_CODE = \"en-us\"\n\nSITE_ID = int(os.environ.get(\"SITE_ID\", \"1\"))\n\n# If you set this to False, Django will make some optimizations so as not\n# to load the internationalization machinery.\nUSE_I18N = True\n\n# If you set this to False, Django will not format dates, numbers and\n# calendars according to the current locale.\nUSE_L10N = True\n\n# If you set this to False, Django will not use timezone-aware datetimes.\nUSE_TZ = True\n\n##############################################################################\n#\n# Storage\n#\n##############################################################################\nDEFAULT_FILE_STORAGE = \"django.core.files.storage.FileSystemStorage\"\n\n# Absolute filesystem path to the directory that will hold user-uploaded files.\n# Example: \"/home/media/media.lawrence.com/media/\"\nMEDIA_ROOT = os.environ.get(\"MEDIA_ROOT\", \"/dbox/Dropbox/media/\")\n\n# URL that handles the media served from MEDIA_ROOT. Make sure to use a\n# trailing slash.\n# Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\"\nMEDIA_URL = \"/media/\"\n\n# In each challenge there can be a single directory out of which files can be\n# downloaded without logging in.\nCOMIC_PUBLIC_FOLDER_NAME = \"public_html\"\nCOMIC_ADDITIONAL_PUBLIC_FOLDER_NAMES = [\"results/public\"]\n\n# In each challenge there can be a single directory from which files can only\n# be downloaded by registered participants of that project\nCOMIC_REGISTERED_ONLY_FOLDER_NAME = \"datasets\"\n\n# Subdirectories on root for various files\nJQFILEUPLOAD_UPLOAD_SUBIDRECTORY = \"jqfileupload\"\nIMAGE_FILES_SUBDIRECTORY = \"images\"\nEVALUATION_FILES_SUBDIRECTORY = \"evaluation\"\n\n# This is for storing files that should not be served to the public\nAWS_DEFAULT_ACL = None\nPRIVATE_S3_STORAGE_KWARGS = {\n \"access_key\": os.environ.get(\"PRIVATE_S3_STORAGE_ACCESS_KEY\", \"\"),\n \"secret_key\": os.environ.get(\"PRIVATE_S3_STORAGE_SECRET_KEY\", \"\"),\n \"bucket_name\": os.environ.get(\n \"PRIVATE_S3_STORAGE_BUCKET_NAME\", \"grand-challenge-private\"\n ),\n \"auto_create_bucket\": True,\n \"endpoint_url\": os.environ.get(\n \"PRIVATE_S3_STORAGE_ENDPOINT_URL\", \"http://minio-private:9000\"\n ),\n # Do not overwrite files, we get problems with jqfileupload otherwise\n \"file_overwrite\": False,\n}\nPROTECTED_S3_STORAGE_KWARGS = {\n \"access_key\": os.environ.get(\"PROTECTED_S3_STORAGE_ACCESS_KEY\", \"\"),\n \"secret_key\": os.environ.get(\"PROTECTED_S3_STORAGE_SECRET_KEY\", \"\"),\n \"bucket_name\": os.environ.get(\n \"PROTECTED_S3_STORAGE_BUCKET_NAME\", \"grand-challenge-protected\"\n ),\n \"auto_create_bucket\": True,\n \"endpoint_url\": os.environ.get(\n \"PROTECTED_S3_STORAGE_ENDPOINT_URL\", \"http://minio-protected:9000\"\n ),\n # This is the domain where people will be able to go to download data\n # from this bucket. Usually we would use reverse to find this out,\n # but this needs to be defined before the database is populated\n \"custom_domain\": os.environ.get(\n \"PROTECTED_S3_CUSTOM_DOMAIN\", \"gc.localhost/media\"\n ),\n}\n\n##############################################################################\n#\n# Caching\n#\n##############################################################################\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"speedinfo.backends.proxy_cache\",\n \"CACHE_BACKEND\": \"django.core.cache.backends.memcached.MemcachedCache\",\n \"LOCATION\": \"memcached:11211\",\n }\n}\n\nROOT_URLCONF = \"config.urls\"\nSUBDOMAIN_URL_CONF = \"grandchallenge.subdomains.urls\"\nDEFAULT_SCHEME = os.environ.get(\"DEFAULT_SCHEME\", \"https\")\n\nSESSION_COOKIE_DOMAIN = os.environ.get(\n \"SESSION_COOKIE_DOMAIN\", \".gc.localhost\"\n)\n# We're always running behind a proxy so set these to true\nSESSION_COOKIE_SECURE = True\nCSRF_COOKIE_SECURE = True\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# Set the allowed hosts to the cookie domain\nALLOWED_HOSTS = [SESSION_COOKIE_DOMAIN, \"web\"]\n\n# Security options\nSECURE_HSTS_SECONDS = int(os.environ.get(\"SECURE_HSTS_SECONDS\", \"0\"))\nSECURE_HSTS_INCLUDE_SUBDOMAINS = strtobool(\n os.environ.get(\"SECURE_HSTS_INCLUDE_SUBDOMAINS\", \"False\")\n)\nSECURE_CONTENT_TYPE_NOSNIFF = strtobool(\n os.environ.get(\"SECURE_CONTENT_TYPE_NOSNIFF\", \"False\")\n)\nSECURE_BROWSER_XSS_FILTER = strtobool(\n os.environ.get(\"SECURE_BROWSER_XSS_FILTER\", \"False\")\n)\nX_FRAME_OPTIONS = os.environ.get(\"X_FRAME_OPTIONS\", \"SAMEORIGIN\")\n\n# Absolute path to the directory static files should be collected to.\n# Don't put anything in this directory yourself; store your static files\n# in apps' \"static/\" subdirectories and in STATICFILES_DIRS.\n# Example: \"/home/media/media.lawrence.com/static/\"\nSTATIC_ROOT = \"/static/\"\n\nSTATIC_HOST = os.environ.get(\"DJANGO_STATIC_HOST\", \"\")\nSTATIC_URL = f\"{STATIC_HOST}/static/\"\n\n# List of finder classes that know how to find static files in\n# various locations.\nSTATICFILES_FINDERS = (\n \"django.contrib.staticfiles.finders.FileSystemFinder\",\n \"django.contrib.staticfiles.finders.AppDirectoriesFinder\",\n)\n\n# Vendored static files will be put here\nSTATICFILES_DIRS = [\"/opt/static/\"]\n\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\n\n# Make this unique, and don't share it with anybody.\nSECRET_KEY = os.environ.get(\n \"SECRET_KEY\", \"d=%^l=xa02an9jn-$!*hy1)5yox$a-$2(ejt-2smimh=j4%8*b\"\n)\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [str(APPS_DIR)],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.contrib.auth.context_processors.auth\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.i18n\",\n \"django.template.context_processors.media\",\n \"django.template.context_processors.static\",\n \"django.template.context_processors.tz\",\n \"django.template.context_processors.request\",\n \"django.contrib.messages.context_processors.messages\",\n \"grandchallenge.core.context_processors.challenge\",\n \"grandchallenge.core.context_processors.google_keys\",\n \"grandchallenge.core.context_processors.debug\",\n \"grandchallenge.core.context_processors.sentry_dsn\",\n ]\n },\n }\n]\n\nMIDDLEWARE = (\n \"django.middleware.security.SecurityMiddleware\", # Keep security at top\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n # Keep whitenoise after security and before all else\n \"corsheaders.middleware.CorsMiddleware\", # Keep CORS near the top\n \"django.middleware.common.BrokenLinkEmailsMiddleware\",\n # Keep BrokenLinkEmailsMiddleware near the top\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.contrib.sites.middleware.CurrentSiteMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"simple_history.middleware.HistoryRequestMiddleware\",\n # subdomain_middleware after CurrentSiteMiddleware\n \"grandchallenge.subdomains.middleware.subdomain_middleware\",\n \"grandchallenge.subdomains.middleware.challenge_subdomain_middleware\",\n \"grandchallenge.subdomains.middleware.subdomain_urlconf_middleware\",\n # speedinfo at the end but before FetchFromCacheMiddleware\n \"speedinfo.middleware.ProfilerMiddleware\",\n)\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = \"config.wsgi.application\"\n\nDJANGO_APPS = [\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.sites\",\n \"django.contrib.messages\",\n \"whitenoise.runserver_nostatic\", # Keep whitenoise above staticfiles\n \"django.contrib.staticfiles\",\n \"django.contrib.humanize\",\n \"django.contrib.admin\",\n \"django.contrib.postgres\",\n \"django.contrib.flatpages\",\n]\n\nTHIRD_PARTY_APPS = [\n \"django_celery_results\", # database results backend\n \"django_celery_beat\", # periodic tasks\n \"djcelery_email\", # asynchronous emails\n \"userena\", # user profiles\n \"guardian\", # userena dependency, per object permissions\n \"easy_thumbnails\", # userena dependency\n \"social_django\", # social authentication with oauth2\n \"rest_framework\", # provides REST API\n \"rest_framework.authtoken\", # token auth for REST API\n \"crispy_forms\", # bootstrap forms\n \"favicon\", # favicon management\n \"django_select2\", # for multiple choice widgets\n \"django_summernote\", # for WYSIWYG page editing\n \"sorl.thumbnail\", # for dynamic thumbnails\n \"dal\", # for autocompletion of selection fields\n \"dal_select2\", # for autocompletion of selection fields\n \"django_extensions\", # custom extensions\n \"simple_history\", # for object history\n \"corsheaders\", # to allow api communication from subdomains\n \"speedinfo\", # for profiling views\n]\n\nLOCAL_APPS = [\n \"grandchallenge.admins\",\n \"grandchallenge.api\",\n \"grandchallenge.challenges\",\n \"grandchallenge.core\",\n \"grandchallenge.evaluation\",\n \"grandchallenge.jqfileupload\",\n \"grandchallenge.pages\",\n \"grandchallenge.participants\",\n \"grandchallenge.profiles\",\n \"grandchallenge.teams\",\n \"grandchallenge.uploads\",\n \"grandchallenge.cases\",\n \"grandchallenge.algorithms\",\n \"grandchallenge.container_exec\",\n \"grandchallenge.datasets\",\n \"grandchallenge.submission_conversion\",\n \"grandchallenge.statistics\",\n \"grandchallenge.archives\",\n \"grandchallenge.patients\",\n \"grandchallenge.studies\",\n \"grandchallenge.registrations\",\n \"grandchallenge.annotations\",\n \"grandchallenge.retina_core\",\n \"grandchallenge.retina_importers\",\n \"grandchallenge.retina_api\",\n \"grandchallenge.worklists\",\n \"grandchallenge.workstations\",\n \"grandchallenge.reader_studies\",\n \"grandchallenge.workstation_configs\",\n]\n\nINSTALLED_APPS = DJANGO_APPS + LOCAL_APPS + THIRD_PARTY_APPS\n\nADMIN_URL = f'{os.environ.get(\"DJANGO_ADMIN_URL\", \"django-admin\")}/'\n\nAUTHENTICATION_BACKENDS = (\n \"social_core.backends.google.GoogleOAuth2\",\n \"userena.backends.UserenaAuthenticationBackend\",\n \"guardian.backends.ObjectPermissionBackend\",\n \"django.contrib.auth.backends.ModelBackend\",\n)\n\nGOOGLE_MAPS_API_KEY = os.environ.get(\"GOOGLE_MAPS_API_KEY\", \"\")\nGOOGLE_ANALYTICS_ID = os.environ.get(\"GOOGLE_ANALYTICS_ID\", \"GA_TRACKING_ID\")\n\nSOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get(\n \"SOCIAL_AUTH_GOOGLE_OAUTH2_KEY\", \"\"\n)\nSOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get(\n \"SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET\", \"\"\n)\n\n# TODO: JM - Add the profile filling as a partial\nSOCIAL_AUTH_PIPELINE = (\n \"social_core.pipeline.social_auth.social_details\",\n \"social_core.pipeline.social_auth.social_uid\",\n \"social_core.pipeline.social_auth.auth_allowed\",\n \"social_core.pipeline.social_auth.social_user\",\n \"social_core.pipeline.social_auth.associate_by_email\",\n \"social_core.pipeline.user.get_username\",\n \"social_core.pipeline.user.create_user\",\n \"grandchallenge.profiles.social_auth.pipeline.profile.create_profile\",\n \"social_core.pipeline.social_auth.associate_user\",\n \"social_core.pipeline.social_auth.load_extra_data\",\n \"social_core.pipeline.user.user_details\",\n)\n\n# Do not sanitize redirects for social auth so we can redirect back to\n# other subdomains\nSOCIAL_AUTH_SANITIZE_REDIRECTS = False\nSOCIAL_AUTH_REDIRECT_IS_HTTPS = True\n\n# Django 1.6 introduced a new test runner, use it\nTEST_RUNNER = \"django.test.runner.DiscoverRunner\"\n\n# WYSIWYG editing with Summernote\nSUMMERNOTE_THEME = \"bs4\"\nSUMMERNOTE_CONFIG = {\n \"attachment_model\": \"uploads.SummernoteAttachment\",\n \"attachment_require_authentication\": True,\n \"summernote\": {\n \"width\": \"100%\",\n \"toolbar\": [\n [\"style\", [\"style\"]],\n [\n \"font\",\n [\"bold\", \"italic\", \"underline\", \"strikethrough\", \"clear\"],\n ],\n [\"para\", [\"ul\", \"ol\", \"paragraph\"]],\n [\"insert\", [\"link\", \"picture\", \"hr\"]],\n [\"view\", [\"fullscreen\", \"codeview\"]],\n [\"help\", [\"help\"]],\n ],\n },\n}\n\n# sorl.thumbnail settings\nTHUMBNAIL_FORMAT = \"PNG\"\nTHUMBNAIL_ALTERNATIVE_RESOLUTIONS = [1.5, 2]\n\n# Settings for allowed HTML\nBLEACH_ALLOWED_TAGS = [\n \"a\",\n \"abbr\",\n \"acronym\",\n \"b\",\n \"blockquote\",\n \"br\",\n \"code\",\n \"col\",\n \"div\",\n \"em\",\n \"h1\",\n \"h2\",\n \"h3\",\n \"h4\",\n \"h5\",\n \"h6\",\n \"hr\",\n \"i\",\n \"iframe\", # Allowed for now for continuous registration challenge\n \"img\",\n \"li\",\n \"ol\",\n \"p\",\n \"pre\",\n \"span\",\n \"strike\",\n \"strong\",\n \"table\",\n \"tbody\",\n \"thead\",\n \"td\",\n \"th\",\n \"tr\",\n \"u\",\n \"ul\",\n]\nBLEACH_ALLOWED_ATTRIBUTES = {\n \"*\": [\"class\", \"data-toggle\", \"id\", \"style\", \"role\"],\n \"a\": [\"href\", \"title\"],\n \"abbr\": [\"title\"],\n \"acronym\": [\"title\"],\n \"div\": [\"data-geochart\"], # Required for geocharts\n \"iframe\": [\n \"src\",\n \"sandbox\",\n \"data-groupname\",\n \"scrolling\",\n \"height\",\n ], # For continuous registration challenge and google group\n \"img\": [\"height\", \"src\", \"width\"],\n # For bootstrap tables: https://getbootstrap.com/docs/4.3/content/tables/\n \"th\": [\"scope\", \"colspan\"],\n \"td\": [\"colspan\"],\n}\nBLEACH_ALLOWED_STYLES = [\"height\", \"margin-left\", \"text-align\", \"width\"]\nBLEACH_ALLOWED_PROTOCOLS = [\"http\", \"https\", \"mailto\"]\nBLEACH_STRIP = strtobool(os.environ.get(\"BLEACH_STRIP\", \"True\"))\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"\n },\n {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"},\n {\n \"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"\n },\n]\n\n# A sample logging configuration. More info in configuration can be found at\n# https://docs.djangoproject.com/en/dev/topics/logging/ .\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"handlers\": {\n \"console\": {\"level\": \"DEBUG\", \"class\": \"logging.StreamHandler\"}\n },\n \"loggers\": {\n \"grandchallenge\": {\n \"level\": \"WARNING\",\n \"handlers\": [\"console\"],\n \"propagate\": True,\n },\n \"django.db.backends\": {\n \"level\": \"ERROR\",\n \"handlers\": [\"console\"],\n \"propagate\": False,\n },\n \"werkzeug\": {\n \"handlers\": [\"console\"],\n \"level\": \"DEBUG\",\n \"propagate\": True,\n },\n },\n}\n\nSENTRY_DSN = os.environ.get(\"DJANGO_SENTRY_DSN\", \"\")\nSENTRY_ENABLE_JS_REPORTING = strtobool(\n os.environ.get(\"SENTRY_ENABLE_JS_REPORTING\", \"False\")\n)\n\nsentry_sdk.init(\n dsn=SENTRY_DSN,\n integrations=[\n DjangoIntegration(),\n CeleryIntegration(),\n RedisIntegration(),\n ],\n)\n\nREST_FRAMEWORK = {\n \"DEFAULT_PERMISSION_CLASSES\": (\"rest_framework.permissions.IsAdminUser\",),\n \"DEFAULT_AUTHENTICATION_CLASSES\": (\n \"rest_framework.authentication.TokenAuthentication\",\n \"rest_framework.authentication.SessionAuthentication\",\n ),\n \"DEFAULT_PAGINATION_CLASS\": \"grandchallenge.api.pagination.MaxLimit1000OffsetPagination\",\n \"PAGE_SIZE\": 100,\n}\n\nVALID_SUBDOMAIN_REGEX = r\"[A-Za-z0-9](?:[A-Za-z0-9\\-]{0,61}[A-Za-z0-9])?\"\nCORS_ORIGIN_REGEX_WHITELIST = [\n rf\"^https:\\/\\/{VALID_SUBDOMAIN_REGEX}{re.escape(SESSION_COOKIE_DOMAIN)}$\"\n]\nCORS_ALLOW_HEADERS = [\n *default_headers,\n \"content-range\",\n \"content-disposition\",\n \"content-description\",\n]\n\nCELERY_BROKER_URL = os.environ.get(\"CELERY_BROKER_URL\", \"redis://redis:6379/0\")\nCELERY_RESULT_BACKEND = os.environ.get(\"CELERY_RESULT_BACKEND\", \"django-db\")\nCELERY_RESULT_PERSISTENT = True\nCELERY_TASK_SOFT_TIME_LIMIT = int(\n os.environ.get(\"CELERY_TASK_SOFT_TIME_LIMIT\", \"7200\")\n)\nCELERY_TASK_TIME_LIMIT = int(os.environ.get(\"CELERY_TASK_TIME_LIMIT\", \"7260\"))\n\nCONTAINER_EXEC_DOCKER_BASE_URL = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_BASE_URL\", \"unix://var/run/docker.sock\"\n)\nCONTAINER_EXEC_DOCKER_TLSVERIFY = strtobool(\n os.environ.get(\"CONTAINER_EXEC_DOCKER_TLSVERIFY\", \"False\")\n)\nCONTAINER_EXEC_DOCKER_TLSCACERT = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_TLSCACERT\", \"\"\n)\nCONTAINER_EXEC_DOCKER_TLSCERT = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_TLSCERT\", \"\"\n)\nCONTAINER_EXEC_DOCKER_TLSKEY = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_TLSKEY\", \"\"\n)\nCONTAINER_EXEC_MEMORY_LIMIT = os.environ.get(\n \"CONTAINER_EXEC_MEMORY_LIMIT\", \"4g\"\n)\nCONTAINER_EXEC_IO_IMAGE = \"alpine:3.9\"\nCONTAINER_EXEC_IO_SHA256 = (\n \"sha256:055936d3920576da37aa9bc460d70c5f212028bda1c08c0879aedf03d7a66ea1\"\n)\nCONTAINER_EXEC_CPU_QUOTA = int(\n os.environ.get(\"CONTAINER_EXEC_CPU_QUOTA\", \"100000\")\n)\nCONTAINER_EXEC_CPU_PERIOD = int(\n os.environ.get(\"CONTAINER_EXEC_CPU_PERIOD\", \"100000\")\n)\nCONTAINER_EXEC_PIDS_LIMIT = int(\n os.environ.get(\"CONTAINER_EXEC_PIDS_LIMIT\", \"128\")\n)\nCONTAINER_EXEC_CPU_SHARES = int(\n os.environ.get(\"CONTAINER_EXEC_CPU_SHARES\", \"1024\") # Default weight\n)\nCONTAINER_EXEC_DOCKER_RUNTIME = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_RUNTIME\", None\n)\n\nCELERY_BEAT_SCHEDULE = {\n \"cleanup_stale_uploads\": {\n \"task\": \"grandchallenge.jqfileupload.tasks.cleanup_stale_uploads\",\n \"schedule\": timedelta(hours=1),\n },\n \"clear_sessions\": {\n \"task\": \"grandchallenge.core.tasks.clear_sessions\",\n \"schedule\": timedelta(days=1),\n },\n \"update_filter_classes\": {\n \"task\": \"grandchallenge.challenges.tasks.update_filter_classes\",\n \"schedule\": timedelta(minutes=5),\n },\n \"validate_external_challenges\": {\n \"task\": \"grandchallenge.challenges.tasks.check_external_challenge_urls\",\n \"schedule\": timedelta(days=1),\n },\n \"stop_expired_services\": {\n \"task\": \"grandchallenge.container_exec.tasks.stop_expired_services\",\n \"kwargs\": {\"app_label\": \"workstations\", \"model_name\": \"session\"},\n \"schedule\": timedelta(minutes=5),\n },\n # Cleanup evaluation jobs on the evaluation queue\n \"mark_long_running_evaluation_jobs_failed\": {\n \"task\": \"grandchallenge.container_exec.tasks.mark_long_running_jobs_failed\",\n \"kwargs\": {\"app_label\": \"evaluation\", \"model_name\": \"job\"},\n \"options\": {\"queue\": \"evaluation\"},\n \"schedule\": timedelta(hours=1),\n },\n}\n\nCELERY_TASK_ROUTES = {\n \"grandchallenge.container_exec.tasks.execute_job\": \"evaluation\",\n \"grandchallenge.container_exec.tasks.start_service\": \"workstations\",\n \"grandchallenge.container_exec.tasks.stop_service\": \"workstations\",\n \"grandchallenge.container_exec.tasks.stop_expired_services\": \"workstations\",\n \"grandchallenge.cases.tasks.build_images\": \"images\",\n}\n\n# Set which template pack to use for forms\nCRISPY_TEMPLATE_PACK = \"bootstrap4\"\n\n# When using bootstrap error messages need to be renamed to danger\nMESSAGE_TAGS = {messages.ERROR: \"danger\"}\n\n# The name of the group whose members will be able to create reader studies\nREADER_STUDY_CREATORS_GROUP_NAME = \"reader_study_creators\"\n\n# The workstation that is accessible by all authorised users\nDEFAULT_WORKSTATION_SLUG = os.environ.get(\n \"DEFAULT_WORKSTATION_SLUG\", \"cirrus-core\"\n)\nWORKSTATIONS_BASE_IMAGE_QUERY_PARAM = \"image\"\nWORKSTATIONS_OVERLAY_QUERY_PARAM = \"overlay\"\nWORKSTATIONS_READY_STUDY_QUERY_PARAM = \"readerStudy\"\nWORKSTATIONS_CONFIG_QUERY_PARAM = \"config\"\n# The name of the network that the workstations will be attached to\nWORKSTATIONS_NETWORK_NAME = os.environ.get(\n \"WORKSTATIONS_NETWORK_NAME\", \"grand-challengeorg_workstations\"\n)\n# The total limit on the number of sessions\nWORKSTATIONS_MAXIMUM_SESSIONS = int(\n os.environ.get(\"WORKSTATIONS_MAXIMUM_SESSIONS\", \"10\")\n)\n# The name of the group whose members will be able to create workstations\nWORKSTATIONS_CREATORS_GROUP_NAME = \"workstation_creators\"\nWORKSTATIONS_SESSION_DURATION_LIMIT = int(\n os.environ.get(\"WORKSTATIONS_SESSION_DURATION_LIMIT\", \"10000\")\n)\n\n# The name of the group whose members will be able to create algorithms\nALGORITHMS_CREATORS_GROUP_NAME = \"algorithm_creators\"\n\n# Disallow some challenge names due to subdomain or media folder clashes\nDISALLOWED_CHALLENGE_NAMES = [\n \"m\",\n IMAGE_FILES_SUBDIRECTORY,\n \"logos\",\n \"banners\",\n \"mugshots\",\n \"docker\",\n EVALUATION_FILES_SUBDIRECTORY,\n \"evaluation-supplementary\",\n \"favicon\",\n \"i\",\n \"cache\", # for sorl-thumbnails\n JQFILEUPLOAD_UPLOAD_SUBIDRECTORY,\n *USERNAME_DENYLIST,\n]\n\nif MEDIA_ROOT[-1] != \"/\":\n msg = (\n \"MEDIA_ROOT setting should end in a slash. Found '\"\n + MEDIA_ROOT\n + \"'. Please add a slash\"\n )\n raise ImproperlyConfigured(msg)\n\nENABLE_DEBUG_TOOLBAR = False\n\nif DEBUG:\n EMAIL_BACKEND = \"django.core.mail.backends.dummy.EmailBackend\"\n\n # Allow localhost in development\n CORS_ORIGIN_REGEX_WHITELIST += [r\"^http://localhost:8888$\"]\n\n LOGGING[\"loggers\"][\"grandchallenge\"][\"level\"] = \"DEBUG\"\n\n if ENABLE_DEBUG_TOOLBAR:\n INSTALLED_APPS += (\"debug_toolbar\",)\n\n MIDDLEWARE = (\n \"debug_toolbar.middleware.DebugToolbarMiddleware\",\n *MIDDLEWARE,\n )\n\n DEBUG_TOOLBAR_CONFIG = {\n \"SHOW_TOOLBAR_CALLBACK\": \"config.toolbar_callback\"\n }\n\nif not COMIC_PUBLIC_FOLDER_NAME:\n raise ImproperlyConfigured(\n \"Don't know from which folder serving publiv files\"\n \"is allowed. Please add a setting like \"\n '\\'COMIC_PUBLIC_FOLDER_NAME = \"public_html\"'\n \" to your .conf file.\"\n )\n\nif not COMIC_REGISTERED_ONLY_FOLDER_NAME:\n raise ImproperlyConfigured(\n \"Don't know from which folder serving protected files\"\n \"is allowed. Please add a setting like \"\n '\\'COMIC_REGISTERED_ONLY_FOLDER_NAME = \"datasets\"'\n \" to your .conf file.\"\n )\n\n# Modality name constants\nMODALITY_OCT = \"OCT\" # Optical coherence tomography\nMODALITY_CF = \"Fundus Photography\" # Color fundus photography\nMODALITY_FA = \"Flurescein Angiography\" # Fluorescein angiography\nMODALITY_IR = \"Infrared Reflectance Imaging\" # Infrared Reflectance imaging\n\n# Maximum file size in bytes to be opened by SimpleITK.ReadImage in cases.models.Image.get_sitk_image()\nMAX_SITK_FILE_SIZE = 268435456 # == 256 mb\n\n# Tile size in pixels to be used when creating dzi for tif files\nDZI_TILE_SIZE = 2560\n\n# Default maximum width or height for thumbnails in retina workstation\nRETINA_DEFAULT_THUMBNAIL_SIZE = 128\n\n# Retina specific settings\nRETINA_IMAGE_CACHE_TIME = 60 * 60 * 24 * 7\nRETINA_GRADERS_GROUP_NAME = \"retina_graders\"\nRETINA_ADMINS_GROUP_NAME = \"retina_admins\"\nRETINA_IMPORT_USER_NAME = \"retina_import_user\"\nRETINA_EXCEPTION_ARCHIVE = \"Australia\"\n", "path": "app/config/settings.py" } ]
[ { "content": "import glob\nimport os\nimport re\nimport uuid\nfrom datetime import timedelta\nfrom distutils.util import strtobool as strtobool_i\n\nimport sentry_sdk\nfrom corsheaders.defaults import default_headers\nfrom django.contrib.messages import constants as messages\nfrom django.core.exceptions import ImproperlyConfigured\nfrom sentry_sdk.integrations.celery import CeleryIntegration\nfrom sentry_sdk.integrations.django import DjangoIntegration\nfrom sentry_sdk.integrations.redis import RedisIntegration\n\nfrom config.denylist import USERNAME_DENYLIST\n\n\ndef strtobool(val) -> bool:\n \"\"\" Returns disutils.util.strtobool as a boolean \"\"\"\n return bool(strtobool_i(val))\n\n\nDEBUG = strtobool(os.environ.get(\"DEBUG\", \"True\"))\n\nADMINS = (\n # ('Your Name', '[email protected]'),\n)\n\n# Who gets the 404 notifications?\nmanager_email = os.environ.get(\"MANAGER_EMAIL\", None)\nif manager_email:\n MANAGERS = [(\"Manager\", manager_email)]\n\nIGNORABLE_404_URLS = [\n re.compile(r\".*\\.(php|cgi|asp).*\"),\n re.compile(r\"^/phpmyadmin.*\"),\n re.compile(r\"^/gen204.*\"),\n re.compile(r\"^/wp-content.*\"),\n re.compile(r\".*/trackback.*\"),\n]\n\n# Used as starting points for various other paths. realpath(__file__) starts in\n# the \"Comic\" app dir. We need to go one dir higher so path.join(\"..\")\nSITE_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))\nAPPS_DIR = os.path.join(SITE_ROOT, \"grandchallenge\")\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql_psycopg2\",\n \"NAME\": os.environ.get(\"POSTGRES_DB\", \"comic\"),\n \"USER\": os.environ.get(\"POSTGRES_USER\", \"comic\"),\n \"PASSWORD\": os.environ.get(\"POSTGRES_PASSWORD\", \"secretpassword\"),\n \"HOST\": os.environ.get(\"POSTGRES_HOST\", \"postgres\"),\n \"PORT\": \"\",\n }\n}\n\nEMAIL_BACKEND = \"djcelery_email.backends.CeleryEmailBackend\"\nEMAIL_HOST = os.environ.get(\"EMAIL_HOST\", \"\")\nEMAIL_HOST_USER = os.environ.get(\"EMAIL_HOST_USER\", \"\")\nEMAIL_HOST_PASSWORD = os.environ.get(\"EMAIL_HOST_PASSWORD\", \"\")\nEMAIL_PORT = int(os.environ.get(\"EMAIL_PORT\", \"25\"))\nEMAIL_USE_TLS = strtobool(os.environ.get(\"EMAIL_USE_TLS\", \"False\"))\nDEFAULT_FROM_EMAIL = os.environ.get(\n \"DEFAULT_FROM_EMAIL\", \"webmaster@localhost\"\n)\nSERVER_EMAIL = os.environ.get(\"SERVER_EMAIL\", \"root@localhost\")\n\nANONYMOUS_USER_NAME = \"AnonymousUser\"\n\nAUTH_PROFILE_MODULE = \"profiles.UserProfile\"\nUSERENA_USE_HTTPS = False\nUSERENA_DEFAULT_PRIVACY = \"open\"\nLOGIN_URL = \"/accounts/signin/\"\nLOGOUT_URL = \"/accounts/signout/\"\n\nLOGIN_REDIRECT_URL = \"/accounts/login-redirect/\"\nSOCIAL_AUTH_LOGIN_REDIRECT_URL = LOGIN_REDIRECT_URL\n\n# Do not give message popups saying \"you have been logged out\". Users are expected\n# to know they have been logged out when they click the logout button\nUSERENA_USE_MESSAGES = (False,)\n\n# Local time zone for this installation. Choices can be found here:\n# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n# although not all choices may be available on all operating systems.\n# On Unix systems, a value of None will cause Django to use the same\n# timezone as the operating system.\n# If running in a Windows environment this must be set to the same as your\n# system time zone.\nTIME_ZONE = \"UTC\"\n\n# Language code for this installation. All choices can be found here:\n# http://www.i18nguy.com/unicode/language-identifiers.html\nLANGUAGE_CODE = \"en-us\"\n\nSITE_ID = int(os.environ.get(\"SITE_ID\", \"1\"))\n\n# If you set this to False, Django will make some optimizations so as not\n# to load the internationalization machinery.\nUSE_I18N = True\n\n# If you set this to False, Django will not format dates, numbers and\n# calendars according to the current locale.\nUSE_L10N = True\n\n# If you set this to False, Django will not use timezone-aware datetimes.\nUSE_TZ = True\n\n##############################################################################\n#\n# Storage\n#\n##############################################################################\nDEFAULT_FILE_STORAGE = \"django.core.files.storage.FileSystemStorage\"\n\n# Absolute filesystem path to the directory that will hold user-uploaded files.\n# Example: \"/home/media/media.lawrence.com/media/\"\nMEDIA_ROOT = os.environ.get(\"MEDIA_ROOT\", \"/dbox/Dropbox/media/\")\n\n# URL that handles the media served from MEDIA_ROOT. Make sure to use a\n# trailing slash.\n# Examples: \"http://media.lawrence.com/media/\", \"http://example.com/media/\"\nMEDIA_URL = \"/media/\"\n\n# In each challenge there can be a single directory out of which files can be\n# downloaded without logging in.\nCOMIC_PUBLIC_FOLDER_NAME = \"public_html\"\nCOMIC_ADDITIONAL_PUBLIC_FOLDER_NAMES = [\"results/public\"]\n\n# In each challenge there can be a single directory from which files can only\n# be downloaded by registered participants of that project\nCOMIC_REGISTERED_ONLY_FOLDER_NAME = \"datasets\"\n\n# Subdirectories on root for various files\nJQFILEUPLOAD_UPLOAD_SUBIDRECTORY = \"jqfileupload\"\nIMAGE_FILES_SUBDIRECTORY = \"images\"\nEVALUATION_FILES_SUBDIRECTORY = \"evaluation\"\n\n# This is for storing files that should not be served to the public\nAWS_DEFAULT_ACL = None\nPRIVATE_S3_STORAGE_KWARGS = {\n \"access_key\": os.environ.get(\"PRIVATE_S3_STORAGE_ACCESS_KEY\", \"\"),\n \"secret_key\": os.environ.get(\"PRIVATE_S3_STORAGE_SECRET_KEY\", \"\"),\n \"bucket_name\": os.environ.get(\n \"PRIVATE_S3_STORAGE_BUCKET_NAME\", \"grand-challenge-private\"\n ),\n \"auto_create_bucket\": True,\n \"endpoint_url\": os.environ.get(\n \"PRIVATE_S3_STORAGE_ENDPOINT_URL\", \"http://minio-private:9000\"\n ),\n # Do not overwrite files, we get problems with jqfileupload otherwise\n \"file_overwrite\": False,\n}\nPROTECTED_S3_STORAGE_KWARGS = {\n \"access_key\": os.environ.get(\"PROTECTED_S3_STORAGE_ACCESS_KEY\", \"\"),\n \"secret_key\": os.environ.get(\"PROTECTED_S3_STORAGE_SECRET_KEY\", \"\"),\n \"bucket_name\": os.environ.get(\n \"PROTECTED_S3_STORAGE_BUCKET_NAME\", \"grand-challenge-protected\"\n ),\n \"auto_create_bucket\": True,\n \"endpoint_url\": os.environ.get(\n \"PROTECTED_S3_STORAGE_ENDPOINT_URL\", \"http://minio-protected:9000\"\n ),\n # This is the domain where people will be able to go to download data\n # from this bucket. Usually we would use reverse to find this out,\n # but this needs to be defined before the database is populated\n \"custom_domain\": os.environ.get(\n \"PROTECTED_S3_CUSTOM_DOMAIN\", \"gc.localhost/media\"\n ),\n}\n\n##############################################################################\n#\n# Caching\n#\n##############################################################################\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"speedinfo.backends.proxy_cache\",\n \"CACHE_BACKEND\": \"django.core.cache.backends.memcached.MemcachedCache\",\n \"LOCATION\": \"memcached:11211\",\n }\n}\nSPEEDINFO_STORAGE = \"speedinfo.storage.cache.storage.CacheStorage\"\n\nROOT_URLCONF = \"config.urls\"\nSUBDOMAIN_URL_CONF = \"grandchallenge.subdomains.urls\"\nDEFAULT_SCHEME = os.environ.get(\"DEFAULT_SCHEME\", \"https\")\n\nSESSION_COOKIE_DOMAIN = os.environ.get(\n \"SESSION_COOKIE_DOMAIN\", \".gc.localhost\"\n)\n# We're always running behind a proxy so set these to true\nSESSION_COOKIE_SECURE = True\nCSRF_COOKIE_SECURE = True\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# Set the allowed hosts to the cookie domain\nALLOWED_HOSTS = [SESSION_COOKIE_DOMAIN, \"web\"]\n\n# Security options\nSECURE_HSTS_SECONDS = int(os.environ.get(\"SECURE_HSTS_SECONDS\", \"0\"))\nSECURE_HSTS_INCLUDE_SUBDOMAINS = strtobool(\n os.environ.get(\"SECURE_HSTS_INCLUDE_SUBDOMAINS\", \"False\")\n)\nSECURE_CONTENT_TYPE_NOSNIFF = strtobool(\n os.environ.get(\"SECURE_CONTENT_TYPE_NOSNIFF\", \"False\")\n)\nSECURE_BROWSER_XSS_FILTER = strtobool(\n os.environ.get(\"SECURE_BROWSER_XSS_FILTER\", \"False\")\n)\nX_FRAME_OPTIONS = os.environ.get(\"X_FRAME_OPTIONS\", \"SAMEORIGIN\")\n\n# Absolute path to the directory static files should be collected to.\n# Don't put anything in this directory yourself; store your static files\n# in apps' \"static/\" subdirectories and in STATICFILES_DIRS.\n# Example: \"/home/media/media.lawrence.com/static/\"\nSTATIC_ROOT = \"/static/\"\n\nSTATIC_HOST = os.environ.get(\"DJANGO_STATIC_HOST\", \"\")\nSTATIC_URL = f\"{STATIC_HOST}/static/\"\n\n# List of finder classes that know how to find static files in\n# various locations.\nSTATICFILES_FINDERS = (\n \"django.contrib.staticfiles.finders.FileSystemFinder\",\n \"django.contrib.staticfiles.finders.AppDirectoriesFinder\",\n)\n\n# Vendored static files will be put here\nSTATICFILES_DIRS = [\"/opt/static/\"]\n\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\n\n# Make this unique, and don't share it with anybody.\nSECRET_KEY = os.environ.get(\n \"SECRET_KEY\", \"d=%^l=xa02an9jn-$!*hy1)5yox$a-$2(ejt-2smimh=j4%8*b\"\n)\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [str(APPS_DIR)],\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.contrib.auth.context_processors.auth\",\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.i18n\",\n \"django.template.context_processors.media\",\n \"django.template.context_processors.static\",\n \"django.template.context_processors.tz\",\n \"django.template.context_processors.request\",\n \"django.contrib.messages.context_processors.messages\",\n \"grandchallenge.core.context_processors.challenge\",\n \"grandchallenge.core.context_processors.google_keys\",\n \"grandchallenge.core.context_processors.debug\",\n \"grandchallenge.core.context_processors.sentry_dsn\",\n ]\n },\n }\n]\n\nMIDDLEWARE = (\n \"django.middleware.security.SecurityMiddleware\", # Keep security at top\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n # Keep whitenoise after security and before all else\n \"corsheaders.middleware.CorsMiddleware\", # Keep CORS near the top\n \"django.middleware.common.BrokenLinkEmailsMiddleware\",\n # Keep BrokenLinkEmailsMiddleware near the top\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.contrib.sites.middleware.CurrentSiteMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"simple_history.middleware.HistoryRequestMiddleware\",\n # subdomain_middleware after CurrentSiteMiddleware\n \"grandchallenge.subdomains.middleware.subdomain_middleware\",\n \"grandchallenge.subdomains.middleware.challenge_subdomain_middleware\",\n \"grandchallenge.subdomains.middleware.subdomain_urlconf_middleware\",\n # speedinfo at the end but before FetchFromCacheMiddleware\n \"speedinfo.middleware.ProfilerMiddleware\",\n)\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = \"config.wsgi.application\"\n\nDJANGO_APPS = [\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.sessions\",\n \"django.contrib.sites\",\n \"django.contrib.messages\",\n \"whitenoise.runserver_nostatic\", # Keep whitenoise above staticfiles\n \"django.contrib.staticfiles\",\n \"django.contrib.humanize\",\n \"django.contrib.admin\",\n \"django.contrib.postgres\",\n \"django.contrib.flatpages\",\n]\n\nTHIRD_PARTY_APPS = [\n \"django_celery_results\", # database results backend\n \"django_celery_beat\", # periodic tasks\n \"djcelery_email\", # asynchronous emails\n \"userena\", # user profiles\n \"guardian\", # userena dependency, per object permissions\n \"easy_thumbnails\", # userena dependency\n \"social_django\", # social authentication with oauth2\n \"rest_framework\", # provides REST API\n \"rest_framework.authtoken\", # token auth for REST API\n \"crispy_forms\", # bootstrap forms\n \"favicon\", # favicon management\n \"django_select2\", # for multiple choice widgets\n \"django_summernote\", # for WYSIWYG page editing\n \"sorl.thumbnail\", # for dynamic thumbnails\n \"dal\", # for autocompletion of selection fields\n \"dal_select2\", # for autocompletion of selection fields\n \"django_extensions\", # custom extensions\n \"simple_history\", # for object history\n \"corsheaders\", # to allow api communication from subdomains\n \"speedinfo\", # for profiling views\n]\n\nLOCAL_APPS = [\n \"grandchallenge.admins\",\n \"grandchallenge.api\",\n \"grandchallenge.challenges\",\n \"grandchallenge.core\",\n \"grandchallenge.evaluation\",\n \"grandchallenge.jqfileupload\",\n \"grandchallenge.pages\",\n \"grandchallenge.participants\",\n \"grandchallenge.profiles\",\n \"grandchallenge.teams\",\n \"grandchallenge.uploads\",\n \"grandchallenge.cases\",\n \"grandchallenge.algorithms\",\n \"grandchallenge.container_exec\",\n \"grandchallenge.datasets\",\n \"grandchallenge.submission_conversion\",\n \"grandchallenge.statistics\",\n \"grandchallenge.archives\",\n \"grandchallenge.patients\",\n \"grandchallenge.studies\",\n \"grandchallenge.registrations\",\n \"grandchallenge.annotations\",\n \"grandchallenge.retina_core\",\n \"grandchallenge.retina_importers\",\n \"grandchallenge.retina_api\",\n \"grandchallenge.worklists\",\n \"grandchallenge.workstations\",\n \"grandchallenge.reader_studies\",\n \"grandchallenge.workstation_configs\",\n]\n\nINSTALLED_APPS = DJANGO_APPS + LOCAL_APPS + THIRD_PARTY_APPS\n\nADMIN_URL = f'{os.environ.get(\"DJANGO_ADMIN_URL\", \"django-admin\")}/'\n\nAUTHENTICATION_BACKENDS = (\n \"social_core.backends.google.GoogleOAuth2\",\n \"userena.backends.UserenaAuthenticationBackend\",\n \"guardian.backends.ObjectPermissionBackend\",\n \"django.contrib.auth.backends.ModelBackend\",\n)\n\nGOOGLE_MAPS_API_KEY = os.environ.get(\"GOOGLE_MAPS_API_KEY\", \"\")\nGOOGLE_ANALYTICS_ID = os.environ.get(\"GOOGLE_ANALYTICS_ID\", \"GA_TRACKING_ID\")\n\nSOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ.get(\n \"SOCIAL_AUTH_GOOGLE_OAUTH2_KEY\", \"\"\n)\nSOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ.get(\n \"SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET\", \"\"\n)\n\n# TODO: JM - Add the profile filling as a partial\nSOCIAL_AUTH_PIPELINE = (\n \"social_core.pipeline.social_auth.social_details\",\n \"social_core.pipeline.social_auth.social_uid\",\n \"social_core.pipeline.social_auth.auth_allowed\",\n \"social_core.pipeline.social_auth.social_user\",\n \"social_core.pipeline.social_auth.associate_by_email\",\n \"social_core.pipeline.user.get_username\",\n \"social_core.pipeline.user.create_user\",\n \"grandchallenge.profiles.social_auth.pipeline.profile.create_profile\",\n \"social_core.pipeline.social_auth.associate_user\",\n \"social_core.pipeline.social_auth.load_extra_data\",\n \"social_core.pipeline.user.user_details\",\n)\n\n# Do not sanitize redirects for social auth so we can redirect back to\n# other subdomains\nSOCIAL_AUTH_SANITIZE_REDIRECTS = False\nSOCIAL_AUTH_REDIRECT_IS_HTTPS = True\n\n# Django 1.6 introduced a new test runner, use it\nTEST_RUNNER = \"django.test.runner.DiscoverRunner\"\n\n# WYSIWYG editing with Summernote\nSUMMERNOTE_THEME = \"bs4\"\nSUMMERNOTE_CONFIG = {\n \"attachment_model\": \"uploads.SummernoteAttachment\",\n \"attachment_require_authentication\": True,\n \"summernote\": {\n \"width\": \"100%\",\n \"toolbar\": [\n [\"style\", [\"style\"]],\n [\n \"font\",\n [\"bold\", \"italic\", \"underline\", \"strikethrough\", \"clear\"],\n ],\n [\"para\", [\"ul\", \"ol\", \"paragraph\"]],\n [\"insert\", [\"link\", \"picture\", \"hr\"]],\n [\"view\", [\"fullscreen\", \"codeview\"]],\n [\"help\", [\"help\"]],\n ],\n },\n}\n\n# sorl.thumbnail settings\nTHUMBNAIL_FORMAT = \"PNG\"\nTHUMBNAIL_ALTERNATIVE_RESOLUTIONS = [1.5, 2]\n\n# Settings for allowed HTML\nBLEACH_ALLOWED_TAGS = [\n \"a\",\n \"abbr\",\n \"acronym\",\n \"b\",\n \"blockquote\",\n \"br\",\n \"code\",\n \"col\",\n \"div\",\n \"em\",\n \"h1\",\n \"h2\",\n \"h3\",\n \"h4\",\n \"h5\",\n \"h6\",\n \"hr\",\n \"i\",\n \"iframe\", # Allowed for now for continuous registration challenge\n \"img\",\n \"li\",\n \"ol\",\n \"p\",\n \"pre\",\n \"span\",\n \"strike\",\n \"strong\",\n \"table\",\n \"tbody\",\n \"thead\",\n \"td\",\n \"th\",\n \"tr\",\n \"u\",\n \"ul\",\n]\nBLEACH_ALLOWED_ATTRIBUTES = {\n \"*\": [\"class\", \"data-toggle\", \"id\", \"style\", \"role\"],\n \"a\": [\"href\", \"title\"],\n \"abbr\": [\"title\"],\n \"acronym\": [\"title\"],\n \"div\": [\"data-geochart\"], # Required for geocharts\n \"iframe\": [\n \"src\",\n \"sandbox\",\n \"data-groupname\",\n \"scrolling\",\n \"height\",\n ], # For continuous registration challenge and google group\n \"img\": [\"height\", \"src\", \"width\"],\n # For bootstrap tables: https://getbootstrap.com/docs/4.3/content/tables/\n \"th\": [\"scope\", \"colspan\"],\n \"td\": [\"colspan\"],\n}\nBLEACH_ALLOWED_STYLES = [\"height\", \"margin-left\", \"text-align\", \"width\"]\nBLEACH_ALLOWED_PROTOCOLS = [\"http\", \"https\", \"mailto\"]\nBLEACH_STRIP = strtobool(os.environ.get(\"BLEACH_STRIP\", \"True\"))\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n \"NAME\": \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"\n },\n {\"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\"},\n {\n \"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"\n },\n {\n \"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"\n },\n]\n\n# A sample logging configuration. More info in configuration can be found at\n# https://docs.djangoproject.com/en/dev/topics/logging/ .\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"handlers\": {\n \"console\": {\"level\": \"DEBUG\", \"class\": \"logging.StreamHandler\"}\n },\n \"loggers\": {\n \"grandchallenge\": {\n \"level\": \"WARNING\",\n \"handlers\": [\"console\"],\n \"propagate\": True,\n },\n \"django.db.backends\": {\n \"level\": \"ERROR\",\n \"handlers\": [\"console\"],\n \"propagate\": False,\n },\n \"werkzeug\": {\n \"handlers\": [\"console\"],\n \"level\": \"DEBUG\",\n \"propagate\": True,\n },\n },\n}\n\nSENTRY_DSN = os.environ.get(\"DJANGO_SENTRY_DSN\", \"\")\nSENTRY_ENABLE_JS_REPORTING = strtobool(\n os.environ.get(\"SENTRY_ENABLE_JS_REPORTING\", \"False\")\n)\n\nsentry_sdk.init(\n dsn=SENTRY_DSN,\n integrations=[\n DjangoIntegration(),\n CeleryIntegration(),\n RedisIntegration(),\n ],\n)\n\nREST_FRAMEWORK = {\n \"DEFAULT_PERMISSION_CLASSES\": (\"rest_framework.permissions.IsAdminUser\",),\n \"DEFAULT_AUTHENTICATION_CLASSES\": (\n \"rest_framework.authentication.TokenAuthentication\",\n \"rest_framework.authentication.SessionAuthentication\",\n ),\n \"DEFAULT_PAGINATION_CLASS\": \"grandchallenge.api.pagination.MaxLimit1000OffsetPagination\",\n \"PAGE_SIZE\": 100,\n}\n\nVALID_SUBDOMAIN_REGEX = r\"[A-Za-z0-9](?:[A-Za-z0-9\\-]{0,61}[A-Za-z0-9])?\"\nCORS_ORIGIN_REGEX_WHITELIST = [\n rf\"^https:\\/\\/{VALID_SUBDOMAIN_REGEX}{re.escape(SESSION_COOKIE_DOMAIN)}$\"\n]\nCORS_ALLOW_HEADERS = [\n *default_headers,\n \"content-range\",\n \"content-disposition\",\n \"content-description\",\n]\n\nCELERY_BROKER_URL = os.environ.get(\"CELERY_BROKER_URL\", \"redis://redis:6379/0\")\nCELERY_RESULT_BACKEND = os.environ.get(\"CELERY_RESULT_BACKEND\", \"django-db\")\nCELERY_RESULT_PERSISTENT = True\nCELERY_TASK_SOFT_TIME_LIMIT = int(\n os.environ.get(\"CELERY_TASK_SOFT_TIME_LIMIT\", \"7200\")\n)\nCELERY_TASK_TIME_LIMIT = int(os.environ.get(\"CELERY_TASK_TIME_LIMIT\", \"7260\"))\n\nCONTAINER_EXEC_DOCKER_BASE_URL = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_BASE_URL\", \"unix://var/run/docker.sock\"\n)\nCONTAINER_EXEC_DOCKER_TLSVERIFY = strtobool(\n os.environ.get(\"CONTAINER_EXEC_DOCKER_TLSVERIFY\", \"False\")\n)\nCONTAINER_EXEC_DOCKER_TLSCACERT = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_TLSCACERT\", \"\"\n)\nCONTAINER_EXEC_DOCKER_TLSCERT = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_TLSCERT\", \"\"\n)\nCONTAINER_EXEC_DOCKER_TLSKEY = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_TLSKEY\", \"\"\n)\nCONTAINER_EXEC_MEMORY_LIMIT = os.environ.get(\n \"CONTAINER_EXEC_MEMORY_LIMIT\", \"4g\"\n)\nCONTAINER_EXEC_IO_IMAGE = \"alpine:3.9\"\nCONTAINER_EXEC_IO_SHA256 = (\n \"sha256:055936d3920576da37aa9bc460d70c5f212028bda1c08c0879aedf03d7a66ea1\"\n)\nCONTAINER_EXEC_CPU_QUOTA = int(\n os.environ.get(\"CONTAINER_EXEC_CPU_QUOTA\", \"100000\")\n)\nCONTAINER_EXEC_CPU_PERIOD = int(\n os.environ.get(\"CONTAINER_EXEC_CPU_PERIOD\", \"100000\")\n)\nCONTAINER_EXEC_PIDS_LIMIT = int(\n os.environ.get(\"CONTAINER_EXEC_PIDS_LIMIT\", \"128\")\n)\nCONTAINER_EXEC_CPU_SHARES = int(\n os.environ.get(\"CONTAINER_EXEC_CPU_SHARES\", \"1024\") # Default weight\n)\nCONTAINER_EXEC_DOCKER_RUNTIME = os.environ.get(\n \"CONTAINER_EXEC_DOCKER_RUNTIME\", None\n)\n\nCELERY_BEAT_SCHEDULE = {\n \"cleanup_stale_uploads\": {\n \"task\": \"grandchallenge.jqfileupload.tasks.cleanup_stale_uploads\",\n \"schedule\": timedelta(hours=1),\n },\n \"clear_sessions\": {\n \"task\": \"grandchallenge.core.tasks.clear_sessions\",\n \"schedule\": timedelta(days=1),\n },\n \"update_filter_classes\": {\n \"task\": \"grandchallenge.challenges.tasks.update_filter_classes\",\n \"schedule\": timedelta(minutes=5),\n },\n \"validate_external_challenges\": {\n \"task\": \"grandchallenge.challenges.tasks.check_external_challenge_urls\",\n \"schedule\": timedelta(days=1),\n },\n \"stop_expired_services\": {\n \"task\": \"grandchallenge.container_exec.tasks.stop_expired_services\",\n \"kwargs\": {\"app_label\": \"workstations\", \"model_name\": \"session\"},\n \"schedule\": timedelta(minutes=5),\n },\n # Cleanup evaluation jobs on the evaluation queue\n \"mark_long_running_evaluation_jobs_failed\": {\n \"task\": \"grandchallenge.container_exec.tasks.mark_long_running_jobs_failed\",\n \"kwargs\": {\"app_label\": \"evaluation\", \"model_name\": \"job\"},\n \"options\": {\"queue\": \"evaluation\"},\n \"schedule\": timedelta(hours=1),\n },\n}\n\nCELERY_TASK_ROUTES = {\n \"grandchallenge.container_exec.tasks.execute_job\": \"evaluation\",\n \"grandchallenge.container_exec.tasks.start_service\": \"workstations\",\n \"grandchallenge.container_exec.tasks.stop_service\": \"workstations\",\n \"grandchallenge.container_exec.tasks.stop_expired_services\": \"workstations\",\n \"grandchallenge.cases.tasks.build_images\": \"images\",\n}\n\n# Set which template pack to use for forms\nCRISPY_TEMPLATE_PACK = \"bootstrap4\"\n\n# When using bootstrap error messages need to be renamed to danger\nMESSAGE_TAGS = {messages.ERROR: \"danger\"}\n\n# The name of the group whose members will be able to create reader studies\nREADER_STUDY_CREATORS_GROUP_NAME = \"reader_study_creators\"\n\n# The workstation that is accessible by all authorised users\nDEFAULT_WORKSTATION_SLUG = os.environ.get(\n \"DEFAULT_WORKSTATION_SLUG\", \"cirrus-core\"\n)\nWORKSTATIONS_BASE_IMAGE_QUERY_PARAM = \"image\"\nWORKSTATIONS_OVERLAY_QUERY_PARAM = \"overlay\"\nWORKSTATIONS_READY_STUDY_QUERY_PARAM = \"readerStudy\"\nWORKSTATIONS_CONFIG_QUERY_PARAM = \"config\"\n# The name of the network that the workstations will be attached to\nWORKSTATIONS_NETWORK_NAME = os.environ.get(\n \"WORKSTATIONS_NETWORK_NAME\", \"grand-challengeorg_workstations\"\n)\n# The total limit on the number of sessions\nWORKSTATIONS_MAXIMUM_SESSIONS = int(\n os.environ.get(\"WORKSTATIONS_MAXIMUM_SESSIONS\", \"10\")\n)\n# The name of the group whose members will be able to create workstations\nWORKSTATIONS_CREATORS_GROUP_NAME = \"workstation_creators\"\nWORKSTATIONS_SESSION_DURATION_LIMIT = int(\n os.environ.get(\"WORKSTATIONS_SESSION_DURATION_LIMIT\", \"10000\")\n)\n\n# The name of the group whose members will be able to create algorithms\nALGORITHMS_CREATORS_GROUP_NAME = \"algorithm_creators\"\n\n# Disallow some challenge names due to subdomain or media folder clashes\nDISALLOWED_CHALLENGE_NAMES = [\n \"m\",\n IMAGE_FILES_SUBDIRECTORY,\n \"logos\",\n \"banners\",\n \"mugshots\",\n \"docker\",\n EVALUATION_FILES_SUBDIRECTORY,\n \"evaluation-supplementary\",\n \"favicon\",\n \"i\",\n \"cache\", # for sorl-thumbnails\n JQFILEUPLOAD_UPLOAD_SUBIDRECTORY,\n *USERNAME_DENYLIST,\n]\n\nif MEDIA_ROOT[-1] != \"/\":\n msg = (\n \"MEDIA_ROOT setting should end in a slash. Found '\"\n + MEDIA_ROOT\n + \"'. Please add a slash\"\n )\n raise ImproperlyConfigured(msg)\n\nENABLE_DEBUG_TOOLBAR = False\n\nif DEBUG:\n EMAIL_BACKEND = \"django.core.mail.backends.dummy.EmailBackend\"\n\n # Allow localhost in development\n CORS_ORIGIN_REGEX_WHITELIST += [r\"^http://localhost:8888$\"]\n\n LOGGING[\"loggers\"][\"grandchallenge\"][\"level\"] = \"DEBUG\"\n\n if ENABLE_DEBUG_TOOLBAR:\n INSTALLED_APPS += (\"debug_toolbar\",)\n\n MIDDLEWARE = (\n \"debug_toolbar.middleware.DebugToolbarMiddleware\",\n *MIDDLEWARE,\n )\n\n DEBUG_TOOLBAR_CONFIG = {\n \"SHOW_TOOLBAR_CALLBACK\": \"config.toolbar_callback\"\n }\n\nif not COMIC_PUBLIC_FOLDER_NAME:\n raise ImproperlyConfigured(\n \"Don't know from which folder serving publiv files\"\n \"is allowed. Please add a setting like \"\n '\\'COMIC_PUBLIC_FOLDER_NAME = \"public_html\"'\n \" to your .conf file.\"\n )\n\nif not COMIC_REGISTERED_ONLY_FOLDER_NAME:\n raise ImproperlyConfigured(\n \"Don't know from which folder serving protected files\"\n \"is allowed. Please add a setting like \"\n '\\'COMIC_REGISTERED_ONLY_FOLDER_NAME = \"datasets\"'\n \" to your .conf file.\"\n )\n\n# Modality name constants\nMODALITY_OCT = \"OCT\" # Optical coherence tomography\nMODALITY_CF = \"Fundus Photography\" # Color fundus photography\nMODALITY_FA = \"Flurescein Angiography\" # Fluorescein angiography\nMODALITY_IR = \"Infrared Reflectance Imaging\" # Infrared Reflectance imaging\n\n# Maximum file size in bytes to be opened by SimpleITK.ReadImage in cases.models.Image.get_sitk_image()\nMAX_SITK_FILE_SIZE = 268435456 # == 256 mb\n\n# Tile size in pixels to be used when creating dzi for tif files\nDZI_TILE_SIZE = 2560\n\n# Default maximum width or height for thumbnails in retina workstation\nRETINA_DEFAULT_THUMBNAIL_SIZE = 128\n\n# Retina specific settings\nRETINA_IMAGE_CACHE_TIME = 60 * 60 * 24 * 7\nRETINA_GRADERS_GROUP_NAME = \"retina_graders\"\nRETINA_ADMINS_GROUP_NAME = \"retina_admins\"\nRETINA_IMPORT_USER_NAME = \"retina_import_user\"\nRETINA_EXCEPTION_ARCHIVE = \"Australia\"\n", "path": "app/config/settings.py" } ]
diff --git a/.python-version b/.python-version new file mode 100644 index 0000000000..aaf18d2948 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.7.5 diff --git a/Pipfile b/Pipfile deleted file mode 100644 index 3b9c659f50..0000000000 --- a/Pipfile +++ /dev/null @@ -1,63 +0,0 @@ -[[source]] -verify_ssl = true -url = "https://pypi.python.org/simple" -name = "pypi" - -[dev-packages] -pytest-django = "*" -pytest-cov = "*" -pytest-mock = "*" -factory-boy = "*" -django-debug-toolbar = "*" -black = "==19.3b0" -sphinx-autobuild = "*" -sphinx = "*" -pyupgrade = "*" -pytest-xdist = "*" -sphinx-autodoc-typehints = "*" -werkzeug = "*" -sphinx-rtd-theme = "*" - -[packages] -"beautifulsoup4" = "*" -celery = "*" -redis = "*" -django = "<2.3" -django-countries = "*" -django-crispy-forms = "*" -django-userena-ce = "*" -djangorestframework = "*" -docker = "*" -matplotlib = "*" -"oauth2" = "*" -python-magic = "*" -python-memcached = "*" -pytz = "*" -social-auth-app-django = "*" -gunicorn = "*" -django-celery-email = "*" -nbconvert = "*" -simpleitk = "*" -django-celery-beat = "*" -django-favicon-plus = "*" -"psycopg2" = "*" -"django-select2" = "*" -django-celery-results = "*" -django-summernote = "*" -bleach = "*" -jsonschema = "*" -tldextract = "*" -tifffile = "==2019.1.4" -sorl-thumbnail = "*" -django-autocomplete-light = "*" -django-storages = "*" -boto3 = "*" -whitenoise = "*" -brotli = "*" -djangorestframework-guardian = "*" -django-extensions = "*" -django-simple-history = "*" -sentry-sdk = "*" -django-cors-headers = "*" -pyvips = "*" -django-speedinfo = "*" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index e1e1fbed91..0000000000 --- a/Pipfile.lock +++ /dev/null @@ -1,1480 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "44a89c7014dde77e560d5cde46c888fffb6100eafd1bf2a9e3e0416d556430f8" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "amqp": { - "hashes": [ - "sha256:19a917e260178b8d410122712bac69cb3e6db010d68f6101e7307508aded5e68", - "sha256:19d851b879a471fcfdcf01df9936cff924f422baa77653289f7095dedd5fb26a" - ], - "version": "==2.5.1" - }, - "attrs": { - "hashes": [ - "sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2", - "sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396" - ], - "version": "==19.2.0" - }, - "beautifulsoup4": { - "hashes": [ - "sha256:5279c36b4b2ec2cb4298d723791467e3000e5384a43ea0cdf5d45207c7e97169", - "sha256:dcdef580e18a76d54002088602eba453eec38ebbcafafeaabd8cab12b6155d57" - ], - "index": "pypi", - "version": "==4.8.1" - }, - "billiard": { - "hashes": [ - "sha256:01afcb4e7c4fd6480940cfbd4d9edc19d7a7509d6ada533984d0d0f49901ec82", - "sha256:b8809c74f648dfe69b973c8e660bcec00603758c9db8ba89d7719f88d5f01f26" - ], - "version": "==3.6.1.0" - }, - "bleach": { - "hashes": [ - "sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16", - "sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa" - ], - "index": "pypi", - "version": "==3.1.0" - }, - "boto3": { - "hashes": [ - "sha256:9b18eeb4f0943af80fc40cd480931e4900f7fb3850dfafe68903148fdd832e6b", - "sha256:a985ed608640f8f7be49f83f298172876b695cc2218e335334a9da3bf6b0a968" - ], - "index": "pypi", - "version": "==1.9.245" - }, - "botocore": { - "hashes": [ - "sha256:16a09307cef306312d4c3ea18ed3902ae1e084c905bda091db2689e9852753ef", - "sha256:b21b694a6bccbe12b64d3e452d081016b67172c92d0b1be2904f60cd4dd5598d" - ], - "version": "==1.12.245" - }, - "brotli": { - "hashes": [ - "sha256:0538dc1744fd17c314d2adc409ea7d1b779783b89fd95bcfb0c2acc93a6ea5a7", - "sha256:0970a47f471782912d7705160b2b0a9306e68e6fadf9cffcaeb42d8f0951e26c", - "sha256:113f51658e6fe548dce4b3749f6ef6c24de4184ba9c10a909cbee4261c2a5da0", - "sha256:1e1aa9c4d1558889f42749c8baf846007953bfd32c8209230cf1cd1f5ef33495", - "sha256:2f2f4f78f29ac4a45d15b3d9fc3fd9705e0ad313a44b129f6e1d0c6916bad0e2", - "sha256:3269f6de1dd150fd0cce1c158b61ff5ac06d627fd3ae9c6ea03aed26fbbff7ea", - "sha256:50dd9ad2a2bb12da4e9002a438672d182f98e546e99952de80280a1e1729664f", - "sha256:5519a4b01b1a4f965083cbfa2ef2b9774c5a5f352341c47b50776ad109423d72", - "sha256:5eb27722d320370315971c427eb8aa7cc0791f2a458840d357ac653bd0ad3a14", - "sha256:5f06b4d5b6f58e5b5c220c2f23cad034dc5efa51b01fde2351ced1605bd980e2", - "sha256:72848d25a5f9e736db4af4512e0c3feecc094d57d241f8f1ae959115a2c39756", - "sha256:743001bca75f4a6b4454be3510feca46f9d61a0c782a9bc2bc684bdb245e279e", - "sha256:9d1c2dd27a1083fefd05b1b2f8df4a6bc2aaa6c21dd82cd41c8ae5e7c23a87f8", - "sha256:a13ce9b419fe9f277c63f700efb0e444331509d1881b5610d2ba7e9080606967", - "sha256:a19ef0952b9d2803df88dff07f45a6c92d5676afb9b8d69cf32232d684036d11", - "sha256:ad766ca8b8c1419b71a22756b45264f45725c86133dc80a7cbe30b6b78c75620", - "sha256:ad7963f261988ee0883816b6b9f206f11461c9b3cb5cfbca0c9ab5adc406d395", - "sha256:c16201060c5a3f8742e3deae759014251ac92f382f82bc2a41dc079ff18c3f24", - "sha256:c43b202f65891861a9a336984a103de25de235f756de69e32db893156f767013", - "sha256:c675c6cce4295cb1a692f3de7416aacace7314e064b94bc86e93aceefce7fd3e", - "sha256:d17cec0b992b1434f5f9df9986563605a4d1b1acd5574c87fc2ac014bcbd3316", - "sha256:dc91f6129953861a73d9a65c52a8dd682b561a9ebaf65283541645cab6489917", - "sha256:e2f4cbd1760d2bf2f30e396c2301999aab0191aec031a6a8a04950b2f575a536", - "sha256:f192e6d3556714105c10486bbd6d045e38a0c04d9da3cef21e0a8dfd8e162df4", - "sha256:f775b07026af2b1b0b5a8b05e41571cdcf3a315a67df265d60af301656a5425b", - "sha256:f969ec7f56ba9636679e69ca07fba548312ccaca37412ee823c7f413541ad7e0", - "sha256:f9dc52cd70907aafb99a773b66b156f2f995c7a0d284397c487c8b71ddbef2f9", - "sha256:fc7212e36ebeb81aebf7949c92897b622490d7c0e333a479c0395591e7994600" - ], - "index": "pypi", - "version": "==1.0.7" - }, - "celery": { - "hashes": [ - "sha256:4c4532aa683f170f40bd76f928b70bc06ff171a959e06e71bf35f2f9d6031ef9", - "sha256:528e56767ae7e43a16cfef24ee1062491f5754368d38fcfffa861cdb9ef219be" - ], - "index": "pypi", - "version": "==4.3.0" - }, - "certifi": { - "hashes": [ - "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", - "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" - ], - "version": "==2019.9.11" - }, - "cffi": { - "hashes": [ - "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774", - "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d", - "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90", - "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b", - "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63", - "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45", - "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25", - "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3", - "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b", - "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647", - "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016", - "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4", - "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb", - "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753", - "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7", - "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9", - "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f", - "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8", - "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f", - "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc", - "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42", - "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3", - "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909", - "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45", - "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d", - "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512", - "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff", - "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201" - ], - "version": "==1.12.3" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "cycler": { - "hashes": [ - "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", - "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8" - ], - "version": "==0.10.0" - }, - "decorator": { - "hashes": [ - "sha256:86156361c50488b84a3f148056ea716ca587df2f0de1d34750d35c21312725de", - "sha256:f069f3a01830ca754ba5258fde2278454a0b5b79e0d7f5c13b3b97e57d4acff6" - ], - "version": "==4.4.0" - }, - "defusedxml": { - "hashes": [ - "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93", - "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5" - ], - "markers": "python_version >= '3.0'", - "version": "==0.6.0" - }, - "django": { - "hashes": [ - "sha256:4025317ca01f75fc79250ff7262a06d8ba97cd4f82e93394b2a0a6a4a925caeb", - "sha256:a8ca1033acac9f33995eb2209a6bf18a4681c3e5269a878e9a7e0b7384ed1ca3" - ], - "index": "pypi", - "version": "==2.2.6" - }, - "django-appconf": { - "hashes": [ - "sha256:35f13ca4d567f132b960e2cd4c832c2d03cb6543452d34e29b7ba10371ba80e3", - "sha256:c98a7af40062e996b921f5962a1c4f3f0c979fa7885f7be4710cceb90ebe13a6" - ], - "version": "==1.0.3" - }, - "django-autocomplete-light": { - "hashes": [ - "sha256:29ce2626a11eab2333e5aa9f95166a6d4400f11b5a05e8f23fa77017b1a9089a" - ], - "index": "pypi", - "version": "==3.4.1" - }, - "django-celery-beat": { - "hashes": [ - "sha256:61c92d4b600a9f24406ee0b8d01a9b192253e15d047e3325e1d81e2cacf7aba6", - "sha256:659b39232c454ac27022bf679939bce0471fd482f3ee9276f5199716cb4afad9" - ], - "index": "pypi", - "version": "==1.5.0" - }, - "django-celery-email": { - "hashes": [ - "sha256:02694114f8a4e4b363cfae48b960473396899cae08351e29b0c5e431d647ef9e", - "sha256:83ad3d4edfccbcdeb8319314ed8c36cf2d017bbb02cae8b459bf6678a804ea44" - ], - "index": "pypi", - "version": "==2.0.2" - }, - "django-celery-results": { - "hashes": [ - "sha256:932277e9382528f74778b30cf90e17941cba577b7d73cee09ed55e4972972c32", - "sha256:e735dc3e705a0e21afc3b6fa2918ec388258145fcbaad3727c493c5707d25034" - ], - "index": "pypi", - "version": "==1.1.2" - }, - "django-compat": { - "hashes": [ - "sha256:3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b" - ], - "version": "==1.0.15" - }, - "django-cors-headers": { - "hashes": [ - "sha256:5762ec9c2d59f38c76828dc1d4308baca4bc0d3e1d6f217683e7a24a1c4611a3", - "sha256:ee02f4b699e9b6645602a46d0adb430ee940a1bf8df64f77e516f8d7711fee60" - ], - "index": "pypi", - "version": "==3.1.1" - }, - "django-countries": { - "hashes": [ - "sha256:1cefad9ec804d6a0318b91c5394b5aef00336755928f44d0a6420507719d65c8", - "sha256:22e96236101783cfe5222ef5174972242a7e8176336d119a4dc111aedce35897" - ], - "index": "pypi", - "version": "==5.5" - }, - "django-crispy-forms": { - "hashes": [ - "sha256:5952bab971110d0b86c278132dae0aa095beee8f723e625c3d3fa28888f1675f", - "sha256:705ededc554ad8736157c666681165fe22ead2dec0d5446d65fc9dd976a5a876" - ], - "index": "pypi", - "version": "==1.7.2" - }, - "django-extensions": { - "hashes": [ - "sha256:526d84b16ee180e45e2305f19d3e01ff3f9f513133839c0b4478b97310ade82a", - "sha256:a78105d5a5e1c3ef44fbe41bc5a19102bda64dbad05515bf791ac6d5d2499ebf" - ], - "index": "pypi", - "version": "==2.2.3" - }, - "django-favicon-plus": { - "hashes": [ - "sha256:3394a951d8dc611eb1ea027ad1181d7f650ca234506585b27e93d7ed06b981bf" - ], - "index": "pypi", - "version": "==0.0.8" - }, - "django-guardian": { - "hashes": [ - "sha256:8cf4efd67a863eb32beafd4335a38ffb083630f8ab2045212d27f8f9c3abe5a6", - "sha256:e638c9a23eeac534bb68b133975539ed8782f733ab6f35c0b23b4c39cd06b1bb" - ], - "version": "==2.1.0" - }, - "django-select2": { - "hashes": [ - "sha256:ad12132e764ce8099bc2746e6af2f33a952b49eb63f3b062eb4739cd4304ee2f", - "sha256:e4beb0e4af27f71e9e2e2f52441aecdb24d401942f18a0375031767cd0e2e5a0" - ], - "index": "pypi", - "version": "==7.1.1" - }, - "django-simple-history": { - "hashes": [ - "sha256:7273add61d3f89453c475531627f8c69cbfc41d6fb99d45278dddc3bafe39284", - "sha256:7f3044439e401fb02b12231b675590865a27a149f6bd99587e429cbe6a9dd6a6" - ], - "index": "pypi", - "version": "==2.7.3" - }, - "django-speedinfo": { - "hashes": [ - "sha256:1d50df3e43319b0169f9632c28b3da36c03e79c4de209a78ed9cdd75b78d13fc", - "sha256:b7bf6d3d1bf982a219e92f963f09fcda9c90a2b01e85b828bb5cd79fec02a32e" - ], - "index": "pypi", - "version": "==1.4.1" - }, - "django-storages": { - "hashes": [ - "sha256:87287b7ad2e789cd603373439994e1ac6f94d9dc2e5f8173d2a87aa3ed458bd9", - "sha256:f3b3def96493d3ccde37b864cea376472baf6e8a596504b209278801c510b807" - ], - "index": "pypi", - "version": "==1.7.2" - }, - "django-summernote": { - "hashes": [ - "sha256:7e2a7cfa806dba508aceee872a7a556b0f86ebcc176f9c3951d4ae56871de609" - ], - "index": "pypi", - "version": "==0.8.11.4" - }, - "django-timezone-field": { - "hashes": [ - "sha256:1a7bbcf984ae191c6dfe713994b4ff4062dc21e47a909356c93e76d027c87c8f", - "sha256:a25af66b86d13709aa8c69a361c1ea68322cda64b5bbf9141fb67b8b44aa4e43" - ], - "version": "==3.1" - }, - "django-userena-ce": { - "hashes": [ - "sha256:33eb5c5105f06cdf2635d7758b809fe2906981acba476ba08fda9cb2d2708c87", - "sha256:75486a0a6d9b9a79cceaccd204593391e513814fb1a9d01d762c600455f00293" - ], - "index": "pypi", - "version": "==4.1.1" - }, - "djangorestframework": { - "hashes": [ - "sha256:5488aed8f8df5ec1d70f04b2114abc52ae6729748a176c453313834a9ee179c8", - "sha256:dc81cbf9775c6898a580f6f1f387c4777d12bd87abf0f5406018d32ccae71090" - ], - "index": "pypi", - "version": "==3.10.3" - }, - "djangorestframework-guardian": { - "hashes": [ - "sha256:3bd3dd6ea58e1bceca5048faf6f8b1a93bb5dcff30ba5eb91b9a0e190a48a0c7" - ], - "index": "pypi", - "version": "==0.3.0" - }, - "docker": { - "hashes": [ - "sha256:6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1", - "sha256:8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7" - ], - "index": "pypi", - "version": "==4.1.0" - }, - "docutils": { - "hashes": [ - "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", - "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", - "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" - ], - "version": "==0.15.2" - }, - "easy-thumbnails": { - "hashes": [ - "sha256:23fbe3415c93b2369ece8ebdfb5faa05540943bef8b941b3118ce769ba95e275" - ], - "version": "==2.6" - }, - "entrypoints": { - "hashes": [ - "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", - "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" - ], - "version": "==0.3" - }, - "gunicorn": { - "hashes": [ - "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", - "sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3" - ], - "index": "pypi", - "version": "==19.9.0" - }, - "html2text": { - "hashes": [ - "sha256:55ce85704f244fc18890c5ded89fa22ff7333e41e9f3cad04d51f48d62ad8834", - "sha256:6f56057c5c2993b5cc5b347cb099bdf6d095828fef1b53ef4e2a2bf2a1be9b4f" - ], - "version": "==2019.9.26" - }, - "httplib2": { - "hashes": [ - "sha256:34537dcdd5e0f2386d29e0e2c6d4a1703a3b982d34c198a5102e6e5d6194b107", - "sha256:409fa5509298f739b34d5a652df762cb0042507dc93f6633e306b11289d6249d" - ], - "version": "==0.14.0" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "importlib-metadata": { - "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" - ], - "version": "==0.23" - }, - "ipython-genutils": { - "hashes": [ - "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8", - "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8" - ], - "version": "==0.2.0" - }, - "jinja2": { - "hashes": [ - "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", - "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de" - ], - "version": "==2.10.3" - }, - "jmespath": { - "hashes": [ - "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", - "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c" - ], - "version": "==0.9.4" - }, - "jsonschema": { - "hashes": [ - "sha256:5f9c0a719ca2ce14c5de2fd350a64fd2d13e8539db29836a86adc990bb1a068f", - "sha256:8d4a2b7b6c2237e0199c8ea1a6d3e05bf118e289ae2b9d7ba444182a2959560d" - ], - "index": "pypi", - "version": "==3.0.2" - }, - "jupyter-core": { - "hashes": [ - "sha256:1368a838bba378c3c99f54c2961489831ea929ec7689a1d59d9844e584bc27dc", - "sha256:85103cee6548992780912c1a0a9ec2583a4a18f1ef79a248ec0db4446500bce3" - ], - "version": "==4.6.0" - }, - "kiwisolver": { - "hashes": [ - "sha256:05b5b061e09f60f56244adc885c4a7867da25ca387376b02c1efc29cc16bcd0f", - "sha256:26f4fbd6f5e1dabff70a9ba0d2c4bd30761086454aa30dddc5b52764ee4852b7", - "sha256:3b2378ad387f49cbb328205bda569b9f87288d6bc1bf4cd683c34523a2341efe", - "sha256:400599c0fe58d21522cae0e8b22318e09d9729451b17ee61ba8e1e7c0346565c", - "sha256:47b8cb81a7d18dbaf4fed6a61c3cecdb5adec7b4ac292bddb0d016d57e8507d5", - "sha256:53eaed412477c836e1b9522c19858a8557d6e595077830146182225613b11a75", - "sha256:58e626e1f7dfbb620d08d457325a4cdac65d1809680009f46bf41eaf74ad0187", - "sha256:5a52e1b006bfa5be04fe4debbcdd2688432a9af4b207a3f429c74ad625022641", - "sha256:5c7ca4e449ac9f99b3b9d4693debb1d6d237d1542dd6a56b3305fe8a9620f883", - "sha256:682e54f0ce8f45981878756d7203fd01e188cc6c8b2c5e2cf03675390b4534d5", - "sha256:79bfb2f0bd7cbf9ea256612c9523367e5ec51d7cd616ae20ca2c90f575d839a2", - "sha256:7f4dd50874177d2bb060d74769210f3bce1af87a8c7cf5b37d032ebf94f0aca3", - "sha256:8944a16020c07b682df861207b7e0efcd2f46c7488619cb55f65882279119389", - "sha256:8aa7009437640beb2768bfd06da049bad0df85f47ff18426261acecd1cf00897", - "sha256:939f36f21a8c571686eb491acfffa9c7f1ac345087281b412d63ea39ca14ec4a", - "sha256:9733b7f64bd9f807832d673355f79703f81f0b3e52bfce420fc00d8cb28c6a6c", - "sha256:a02f6c3e229d0b7220bd74600e9351e18bc0c361b05f29adae0d10599ae0e326", - "sha256:a0c0a9f06872330d0dd31b45607197caab3c22777600e88031bfe66799e70bb0", - "sha256:acc4df99308111585121db217681f1ce0eecb48d3a828a2f9bbf9773f4937e9e", - "sha256:b64916959e4ae0ac78af7c3e8cef4becee0c0e9694ad477b4c6b3a536de6a544", - "sha256:d3fcf0819dc3fea58be1fd1ca390851bdb719a549850e708ed858503ff25d995", - "sha256:d52e3b1868a4e8fd18b5cb15055c76820df514e26aa84cc02f593d99fef6707f", - "sha256:db1a5d3cc4ae943d674718d6c47d2d82488ddd94b93b9e12d24aabdbfe48caee", - "sha256:e3a21a720791712ed721c7b95d433e036134de6f18c77dbe96119eaf7aa08004", - "sha256:e8bf074363ce2babeb4764d94f8e65efd22e6a7c74860a4f05a6947afc020ff2", - "sha256:f16814a4a96dc04bf1da7d53ee8d5b1d6decfc1a92a63349bb15d37b6a263dd9", - "sha256:f2b22153870ca5cf2ab9c940d7bc38e8e9089fa0f7e5856ea195e1cf4ff43d5a", - "sha256:f790f8b3dff3d53453de6a7b7ddd173d2e020fb160baff578d578065b108a05f" - ], - "version": "==1.1.0" - }, - "kombu": { - "hashes": [ - "sha256:31edb84947996fdda065b6560c128d5673bb913ff34aa19e7b84755217a24deb", - "sha256:c9078124ce2616b29cf6607f0ac3db894c59154252dee6392cdbbe15e5c4b566" - ], - "version": "==4.6.5" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" - ], - "version": "==1.1.1" - }, - "matplotlib": { - "hashes": [ - "sha256:1febd22afe1489b13c6749ea059d392c03261b2950d1d45c17e3aed812080c93", - "sha256:31a30d03f39528c79f3a592857be62a08595dec4ac034978ecd0f814fa0eec2d", - "sha256:4442ce720907f67a79d45de9ada47be81ce17e6c2f448b3c64765af93f6829c9", - "sha256:796edbd1182cbffa7e1e7a97f1e141f875a8501ba8dd834269ae3cd45a8c976f", - "sha256:934e6243df7165aad097572abf5b6003c77c9b6c480c3c4de6f2ef1b5fdd4ec0", - "sha256:bab9d848dbf1517bc58d1f486772e99919b19efef5dd8596d4b26f9f5ee08b6b", - "sha256:c1fe1e6cdaa53f11f088b7470c2056c0df7d80ee4858dadf6cbe433fcba4323b", - "sha256:e5b8aeca9276a3a988caebe9f08366ed519fff98f77c6df5b64d7603d0e42e36", - "sha256:ec6bd0a6a58df3628ff269978f4a4b924a0d371ad8ce1f8e2b635b99e482877a" - ], - "index": "pypi", - "version": "==3.1.1" - }, - "mistune": { - "hashes": [ - "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e", - "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4" - ], - "version": "==0.8.4" - }, - "more-itertools": { - "hashes": [ - "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", - "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" - ], - "version": "==7.2.0" - }, - "nbconvert": { - "hashes": [ - "sha256:427a468ec26e7d68a529b95f578d5cbf018cb4c1f889e897681c2b6d11897695", - "sha256:48d3c342057a2cf21e8df820d49ff27ab9f25fc72b8f15606bd47967333b2709" - ], - "index": "pypi", - "version": "==5.6.0" - }, - "nbformat": { - "hashes": [ - "sha256:b9a0dbdbd45bb034f4f8893cafd6f652ea08c8c1674ba83f2dc55d3955743b0b", - "sha256:f7494ef0df60766b7cabe0a3651556345a963b74dbc16bc7c18479041170d402" - ], - "version": "==4.4.0" - }, - "numpy": { - "hashes": [ - "sha256:05dbfe72684cc14b92568de1bc1f41e5f62b00f714afc9adee42f6311738091f", - "sha256:0d82cb7271a577529d07bbb05cb58675f2deb09772175fab96dc8de025d8ac05", - "sha256:10132aa1fef99adc85a905d82e8497a580f83739837d7cbd234649f2e9b9dc58", - "sha256:12322df2e21f033a60c80319c25011194cd2a21294cc66fee0908aeae2c27832", - "sha256:16f19b3aa775dddc9814e02a46b8e6ae6a54ed8cf143962b4e53f0471dbd7b16", - "sha256:3d0b0989dd2d066db006158de7220802899a1e5c8cf622abe2d0bd158fd01c2c", - "sha256:438a3f0e7b681642898fd7993d38e2bf140a2d1eafaf3e89bb626db7f50db355", - "sha256:5fd214f482ab53f2cea57414c5fb3e58895b17df6e6f5bca5be6a0bb6aea23bb", - "sha256:73615d3edc84dd7c4aeb212fa3748fb83217e00d201875a47327f55363cef2df", - "sha256:7bd355ad7496f4ce1d235e9814ec81ee3d28308d591c067ce92e49f745ba2c2f", - "sha256:7d077f2976b8f3de08a0dcf5d72083f4af5411e8fddacd662aae27baa2601196", - "sha256:a4092682778dc48093e8bda8d26ee8360153e2047826f95a3f5eae09f0ae3abf", - "sha256:b458de8624c9f6034af492372eb2fee41a8e605f03f4732f43fc099e227858b2", - "sha256:e70fc8ff03a961f13363c2c95ef8285e0cf6a720f8271836f852cc0fa64e97c8", - "sha256:ee8e9d7cad5fe6dde50ede0d2e978d81eafeaa6233fb0b8719f60214cf226578", - "sha256:f4a4f6aba148858a5a5d546a99280f71f5ee6ec8182a7d195af1a914195b21a2" - ], - "version": "==1.17.2" - }, - "oauth2": { - "hashes": [ - "sha256:15b5c42301f46dd63113f1214b0d81a8b16254f65a86d3c32a1b52297f3266e6", - "sha256:c006a85e7c60107c7cc6da1b184b5c719f6dd7202098196dfa6e55df669b59bf" - ], - "index": "pypi", - "version": "==1.9.0.post1" - }, - "oauthlib": { - "hashes": [ - "sha256:bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889", - "sha256:df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea" - ], - "version": "==3.1.0" - }, - "pandocfilters": { - "hashes": [ - "sha256:b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9" - ], - "version": "==1.4.2" - }, - "pillow": { - "hashes": [ - "sha256:00fdeb23820f30e43bba78eb9abb00b7a937a655de7760b2e09101d63708b64e", - "sha256:01f948e8220c85eae1aa1a7f8edddcec193918f933fb07aaebe0bfbbcffefbf1", - "sha256:08abf39948d4b5017a137be58f1a52b7101700431f0777bec3d897c3949f74e6", - "sha256:099a61618b145ecb50c6f279666bbc398e189b8bc97544ae32b8fcb49ad6b830", - "sha256:2c1c61546e73de62747e65807d2cc4980c395d4c5600ecb1f47a650c6fa78c79", - "sha256:2ed9c4f694861642401f27dc3cb99772be67cd190e84845c749dae0a06c3bfae", - "sha256:338581b30b908e111be578f0297255f6b57a51358cd16fa0e6f664c9a1f88bff", - "sha256:38c7d48a21cd06fdeee93987147b9b1c55b73b4cfcbf83240568bfbd5adee447", - "sha256:43fd026f613c8e48a25eba1a92f4d2ad7f3903c95d8c33a11611a7717d2ab654", - "sha256:4548236844327a718ce3bb182ab32a16fa2050c61e334e959f554cac052fb0df", - "sha256:5090857876c58885cfa388dc649e5db30aae98a068c26f3fd0ac9d7d9a4d9572", - "sha256:5bbba34f97a26a93f5e8dec469ca4ddd712451418add43da946dbaed7f7a98d2", - "sha256:65a28969a025a0eb4594637b6103201dc4ed2a9508bdab56ac33e43e3081c404", - "sha256:892bb52b70bd5ea9dbbc3ac44f38e84f5a04e9d8b1bff48159d96cb795b81159", - "sha256:8a9becd5cbd5062f973bcd2e7bc79483af310222de112b6541f8af1f93a3cc42", - "sha256:972a7aaeb7c4a2795b52eef52ee991ef040b31009f36deca6207a986607b55f3", - "sha256:97b119c436bfa96a92ac2ca525f7025836d4d4e64b1c9f9eff8dbaf3ff1d86f3", - "sha256:9ba37698e242223f8053cc158f130aee046a96feacbeab65893dbe94f5530118", - "sha256:b1b0e1f626a0f079c0d3696db70132fb1f29aa87c66aecb6501a9b8be64ce9f7", - "sha256:c14c1224fd1a5be2733530d648a316974dbbb3c946913562c6005a76f21ca042", - "sha256:c79a8546c48ae6465189e54e3245a97ddf21161e33ff7eaa42787353417bb2b6", - "sha256:ceb76935ac4ebdf6d7bc845482a4450b284c6ccfb281e34da51d510658ab34d8", - "sha256:e22bffaad04b4d16e1c091baed7f2733fc1ebb91e0c602abf1b6834d17158b1f", - "sha256:ec883b8e44d877bda6f94a36313a1c6063f8b1997aa091628ae2f34c7f97c8d5", - "sha256:f1baa54d50ec031d1a9beb89974108f8f2c0706f49798f4777df879df0e1adb6", - "sha256:f53a5385932cda1e2c862d89460992911a89768c65d176ff8c50cddca4d29bed" - ], - "version": "==6.2.0" - }, - "psycopg2": { - "hashes": [ - "sha256:128d0fa910ada0157bba1cb74a9c5f92bb8a1dca77cf91a31eb274d1f889e001", - "sha256:227fd46cf9b7255f07687e5bde454d7d67ae39ca77e170097cdef8ebfc30c323", - "sha256:2315e7f104681d498ccf6fd70b0dba5bce65d60ac92171492bfe228e21dcc242", - "sha256:4b5417dcd2999db0f5a891d54717cfaee33acc64f4772c4bc574d4ff95ed9d80", - "sha256:640113ddc943522aaf71294e3f2d24013b0edd659b7820621492c9ebd3a2fb0b", - "sha256:897a6e838319b4bf648a574afb6cabcb17d0488f8c7195100d48d872419f4457", - "sha256:8dceca81409898c870e011c71179454962dec152a1a6b86a347f4be74b16d864", - "sha256:b1b8e41da09a0c3ef0b3d4bb72da0dde2abebe583c1e8462973233fd5ad0235f", - "sha256:cb407fccc12fc29dc331f2b934913405fa49b9b75af4f3a72d0f50f57ad2ca23", - "sha256:d3a27550a8185e53b244ad7e79e307594b92fede8617d80200a8cce1fba2c60f", - "sha256:f0e6b697a975d9d3ccd04135316c947dd82d841067c7800ccf622a8717e98df1" - ], - "index": "pypi", - "version": "==2.8.3" - }, - "pycparser": { - "hashes": [ - "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3" - ], - "version": "==2.19" - }, - "pygments": { - "hashes": [ - "sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127", - "sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297" - ], - "version": "==2.4.2" - }, - "pyjwt": { - "hashes": [ - "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", - "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96" - ], - "version": "==1.7.1" - }, - "pyparsing": { - "hashes": [ - "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", - "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" - ], - "version": "==2.4.2" - }, - "pyrsistent": { - "hashes": [ - "sha256:34b47fa169d6006b32e99d4b3c4031f155e6e68ebcc107d6454852e8e0ee6533" - ], - "version": "==0.15.4" - }, - "python-crontab": { - "hashes": [ - "sha256:ef1eef66c75fa95a934e203e18721987e7824f9b40cad698edbcfaf2ace11d6c" - ], - "version": "==2.3.9" - }, - "python-dateutil": { - "hashes": [ - "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", - "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" - ], - "markers": "python_version >= '2.7'", - "version": "==2.8.0" - }, - "python-magic": { - "hashes": [ - "sha256:f2674dcfad52ae6c49d4803fa027809540b130db1dec928cfbb9240316831375", - "sha256:f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5" - ], - "index": "pypi", - "version": "==0.4.15" - }, - "python-memcached": { - "hashes": [ - "sha256:4dac64916871bd3550263323fc2ce18e1e439080a2d5670c594cf3118d99b594", - "sha256:a2e28637be13ee0bf1a8b6843e7490f9456fd3f2a4cb60471733c7b5d5557e4f" - ], - "index": "pypi", - "version": "==1.59" - }, - "python3-openid": { - "hashes": [ - "sha256:0086da6b6ef3161cfe50fb1ee5cceaf2cda1700019fda03c2c5c440ca6abe4fa", - "sha256:628d365d687e12da12d02c6691170f4451db28d6d68d050007e4a40065868502" - ], - "markers": "python_version >= '3.0'", - "version": "==3.1.0" - }, - "pytz": { - "hashes": [ - "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d", - "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be" - ], - "index": "pypi", - "version": "==2019.3" - }, - "pyvips": { - "hashes": [ - "sha256:8992acde85331c08bf4cd0b8213d99bc65c523fc67eade93820d600de138ad04" - ], - "index": "pypi", - "version": "==2.1.8" - }, - "redis": { - "hashes": [ - "sha256:98a22fb750c9b9bb46e75e945dc3f61d0ab30d06117cbb21ff9cd1d315fedd3b", - "sha256:c504251769031b0dd7dd5cf786050a6050197c6de0d37778c80c08cb04ae8275" - ], - "index": "pypi", - "version": "==3.3.8" - }, - "requests": { - "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" - ], - "version": "==2.22.0" - }, - "requests-file": { - "hashes": [ - "sha256:75c175eed739270aec3c5279ffd74e6527dada275c5c0d76b5817e9c86bb7dea", - "sha256:8f04aa6201bacda0567e7ac7f677f1499b0fc76b22140c54bc06edf1ba92e2fa" - ], - "version": "==1.4.3" - }, - "requests-oauthlib": { - "hashes": [ - "sha256:bd6533330e8748e94bf0b214775fed487d309b8b8fe823dc45641ebcd9a32f57", - "sha256:d3ed0c8f2e3bbc6b344fa63d6f933745ab394469da38db16bdddb461c7e25140" - ], - "version": "==1.2.0" - }, - "s3transfer": { - "hashes": [ - "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", - "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba" - ], - "version": "==0.2.1" - }, - "sentry-sdk": { - "hashes": [ - "sha256:15e51e74b924180c98bcd636cb4634945b0a99a124d50b433c3a9dc6a582e8db", - "sha256:1d6a2ee908ec6d8f96c27d78bc39e203df4d586d287c233140af7d8d1aca108a" - ], - "index": "pypi", - "version": "==0.12.3" - }, - "simpleitk": { - "hashes": [ - "sha256:0e8ee33ac7ac8f584d974db8cae200590254375a18594c4843b286d6cb70f132", - "sha256:230208afeae8f9b63f36079abd175ed5e86d1beb763b0e33e07bb9e08d8067f4", - "sha256:29e2ffe31c72ab5da5eac004c8889c5f8d44885125caa16760cfe2a68c8b0ca8", - "sha256:3f02b3bf348c41fdb7b9e6862351e79c5d7a29c8505bcafa8fb33070512031fd", - "sha256:418c12278be6e6abb09d38a5e0f0a9018edbaf6ba12c2de3e5eb030a1ae7a864", - "sha256:4518192d2d58f871d232289410312522578cb3f042694ffa219edd5d2bf4521c", - "sha256:45e9c4317aee670dd9cfe301842ff5fd546076836d2e54954c0a8dfc58270e62", - "sha256:4a49b77dff0cad086bc66acb49e5dc4d956afd55abc473ef58ee0ee535807e81", - "sha256:4a4d2bf927fd93815fcf2d7b43c8f609bed72e02d6fe77a4fa0bb7c9b43210f2", - "sha256:6ee0a1da7475caa82b395ff3ecf88f6f3288ff3306d9615ec991f86da0606263", - "sha256:70e1a3c6c022a319b5936053a9d25c86954dbd0258a9631cf8e6822866af064a", - "sha256:789ca18ee88d84f4ec283f9b46ed50eb2bdb280da401b9904fbf3c7ece71194e", - "sha256:7db4959edb3c819c543d66f2cc0ff8f28b663c9a4d985d01cb458a8f55124b79", - "sha256:8476ff362fb2616251e6925bb01895d7d417c3c311b972830db9041bad9da7c5", - "sha256:8afd3094eaf8f465502def86d87428fbdb8d82b812e98fcc25e56a3913d51199", - "sha256:93fed3b379eb3393a164f52b2b6bb94fc07f93a1c46961d9e830e4e15de3e0ea", - "sha256:a5cd99aef18b88df95d3c7e396709ed7a650c66e945ac99b45899e71852b2ead", - "sha256:b96d58523bbe33e1feaddf85a0bd677942ffb71d40cb7c4e693e1dde6fa70d0c", - "sha256:bfffad9470749db91eccfb8db9626ab50b0c1bf2e940616c177e3c7a13e8b523", - "sha256:d524e4707ff53f6d22e95746d51eee88cb604ebbe1589f9b553c668a765877ae", - "sha256:d691c93aa1419f077b08d19ddbb602941dd60df0d4520221a606935a6b626bcb", - "sha256:e5351cbd1a23369d7c8fa24ff51ee11231b0b46e7912fbdc4cefd062713325ca", - "sha256:ea7d3599162b6b3c51bc5de4c8c401938720a17751d425575893b171ecf51b0a", - "sha256:ef765be04357cfd78e84fbdb43430a1b6c1f530ebbde793c101f5dadedd079c1" - ], - "index": "pypi", - "version": "==1.2.2" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "social-auth-app-django": { - "hashes": [ - "sha256:6d0dd18c2d9e71ca545097d57b44d26f59e624a12833078e8e52f91baf849778", - "sha256:9237e3d7b6f6f59494c3b02e0cce6efc69c9d33ad9d1a064e3b2318bcbe89ae3", - "sha256:f151396e5b16e2eee12cd2e211004257826ece24fc4ae97a147df386c1cd7082" - ], - "index": "pypi", - "version": "==3.1.0" - }, - "social-auth-core": { - "hashes": [ - "sha256:47cd2458c8fefd02466b0c514643e02ad8b61d8b4b69f7573e80882e3a97b0f0", - "sha256:8320666548a532eb158968eda542bbe1863682357c432d8c4e28034a7f1e3b58", - "sha256:d81ed681e3c0722300b61a0792c5db5d21206793f95ca810f010c1cc931c8d89" - ], - "version": "==3.2.0" - }, - "sorl-thumbnail": { - "hashes": [ - "sha256:8dfe5fda91a5047d1d35a0b9effe7b000764a01d648e15ca076f44e9c34b6dbd", - "sha256:d9e3f018d19293824803e4ffead96b19dfcd44fa7987cea392f50436817bef34" - ], - "index": "pypi", - "version": "==12.5.0" - }, - "soupsieve": { - "hashes": [ - "sha256:605f89ad5fdbfefe30cdc293303665eff2d188865d4dbe4eb510bba1edfbfce3", - "sha256:b91d676b330a0ebd5b21719cb6e9b57c57d433671f65b9c28dd3461d9a1ed0b6" - ], - "version": "==1.9.4" - }, - "sqlparse": { - "hashes": [ - "sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177", - "sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873" - ], - "version": "==0.3.0" - }, - "testpath": { - "hashes": [ - "sha256:46c89ebb683f473ffe2aab0ed9f12581d4d078308a3cb3765d79c6b2317b0109", - "sha256:b694b3d9288dbd81685c5d2e7140b81365d46c29f5db4bc659de5aa6b98780f8" - ], - "version": "==0.4.2" - }, - "tifffile": { - "hashes": [ - "sha256:645e3a427743b9a892c835bcc363b043e732489621d2b6de12933c82b591398c", - "sha256:6b875c4342a55cbed8ef4af3aa9d7a06b02219089b9f5d7a457918dc73f61f9d" - ], - "index": "pypi", - "version": "==2019.1.4" - }, - "tldextract": { - "hashes": [ - "sha256:2c1c5d9d454f79734b4f3da0d603856dd9f820753410a3e9abf0a0c9fde33e97", - "sha256:b72bef6013de67c7fa181250bc2c2e089a994d259c09ca95a9771f2f97e29ed1" - ], - "index": "pypi", - "version": "==2.2.1" - }, - "traitlets": { - "hashes": [ - "sha256:70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44", - "sha256:d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7" - ], - "version": "==4.3.3" - }, - "urllib3": { - "hashes": [ - "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", - "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" - ], - "markers": "python_version >= '3.4'", - "version": "==1.25.6" - }, - "vine": { - "hashes": [ - "sha256:133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", - "sha256:ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af" - ], - "version": "==1.3.0" - }, - "webencodings": { - "hashes": [ - "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", - "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" - ], - "version": "==0.5.1" - }, - "websocket-client": { - "hashes": [ - "sha256:1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9", - "sha256:1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a" - ], - "version": "==0.56.0" - }, - "whitenoise": { - "hashes": [ - "sha256:22f79cf8f1f509639330f93886acaece8ec5ac5e9600c3b981d33c34e8a42dfd", - "sha256:6dfea214b7c12efd689007abf9afa87a426586e9dbc051873ad2c8e535e2a1ac" - ], - "index": "pypi", - "version": "==4.1.4" - }, - "zipp": { - "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" - ], - "version": "==0.6.0" - } - }, - "develop": { - "alabaster": { - "hashes": [ - "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", - "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" - ], - "version": "==0.7.12" - }, - "apipkg": { - "hashes": [ - "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", - "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" - ], - "version": "==1.5" - }, - "appdirs": { - "hashes": [ - "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", - "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e" - ], - "version": "==1.4.3" - }, - "argh": { - "hashes": [ - "sha256:a9b3aaa1904eeb78e32394cd46c6f37ac0fb4af6dc488daa58971bdc7d7fcaf3", - "sha256:e9535b8c84dc9571a48999094fda7f33e63c3f1b74f3e5f3ac0105a58405bb65" - ], - "version": "==0.26.2" - }, - "atomicwrites": { - "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" - ], - "version": "==1.3.0" - }, - "attrs": { - "hashes": [ - "sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2", - "sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396" - ], - "version": "==19.2.0" - }, - "babel": { - "hashes": [ - "sha256:af92e6106cb7c55286b25b38ad7695f8b4efb36a90ba483d7f7a6628c46158ab", - "sha256:e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28" - ], - "version": "==2.7.0" - }, - "black": { - "hashes": [ - "sha256:09a9dcb7c46ed496a9850b76e4e825d6049ecd38b611f1224857a79bd985a8cf", - "sha256:68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c" - ], - "index": "pypi", - "version": "==19.3b0" - }, - "certifi": { - "hashes": [ - "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", - "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" - ], - "version": "==2019.9.11" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "click": { - "hashes": [ - "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", - "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" - ], - "version": "==7.0" - }, - "coverage": { - "hashes": [ - "sha256:08907593569fe59baca0bf152c43f3863201efb6113ecb38ce7e97ce339805a6", - "sha256:0be0f1ed45fc0c185cfd4ecc19a1d6532d72f86a2bac9de7e24541febad72650", - "sha256:141f08ed3c4b1847015e2cd62ec06d35e67a3ac185c26f7635f4406b90afa9c5", - "sha256:19e4df788a0581238e9390c85a7a09af39c7b539b29f25c89209e6c3e371270d", - "sha256:23cc09ed395b03424d1ae30dcc292615c1372bfba7141eb85e11e50efaa6b351", - "sha256:245388cda02af78276b479f299bbf3783ef0a6a6273037d7c60dc73b8d8d7755", - "sha256:331cb5115673a20fb131dadd22f5bcaf7677ef758741312bee4937d71a14b2ef", - "sha256:386e2e4090f0bc5df274e720105c342263423e77ee8826002dcffe0c9533dbca", - "sha256:3a794ce50daee01c74a494919d5ebdc23d58873747fa0e288318728533a3e1ca", - "sha256:60851187677b24c6085248f0a0b9b98d49cba7ecc7ec60ba6b9d2e5574ac1ee9", - "sha256:63a9a5fc43b58735f65ed63d2cf43508f462dc49857da70b8980ad78d41d52fc", - "sha256:6b62544bb68106e3f00b21c8930e83e584fdca005d4fffd29bb39fb3ffa03cb5", - "sha256:6ba744056423ef8d450cf627289166da65903885272055fb4b5e113137cfa14f", - "sha256:7494b0b0274c5072bddbfd5b4a6c6f18fbbe1ab1d22a41e99cd2d00c8f96ecfe", - "sha256:826f32b9547c8091679ff292a82aca9c7b9650f9fda3e2ca6bf2ac905b7ce888", - "sha256:93715dffbcd0678057f947f496484e906bf9509f5c1c38fc9ba3922893cda5f5", - "sha256:9a334d6c83dfeadae576b4d633a71620d40d1c379129d587faa42ee3e2a85cce", - "sha256:af7ed8a8aa6957aac47b4268631fa1df984643f07ef00acd374e456364b373f5", - "sha256:bf0a7aed7f5521c7ca67febd57db473af4762b9622254291fbcbb8cd0ba5e33e", - "sha256:bf1ef9eb901113a9805287e090452c05547578eaab1b62e4ad456fcc049a9b7e", - "sha256:c0afd27bc0e307a1ffc04ca5ec010a290e49e3afbe841c5cafc5c5a80ecd81c9", - "sha256:dd579709a87092c6dbee09d1b7cfa81831040705ffa12a1b248935274aee0437", - "sha256:df6712284b2e44a065097846488f66840445eb987eb81b3cc6e4149e7b6982e1", - "sha256:e07d9f1a23e9e93ab5c62902833bf3e4b1f65502927379148b6622686223125c", - "sha256:e2ede7c1d45e65e209d6093b762e98e8318ddeff95317d07a27a2140b80cfd24", - "sha256:e4ef9c164eb55123c62411f5936b5c2e521b12356037b6e1c2617cef45523d47", - "sha256:eca2b7343524e7ba246cab8ff00cab47a2d6d54ada3b02772e908a45675722e2", - "sha256:eee64c616adeff7db37cc37da4180a3a5b6177f5c46b187894e633f088fb5b28", - "sha256:ef824cad1f980d27f26166f86856efe11eff9912c4fed97d3804820d43fa550c", - "sha256:efc89291bd5a08855829a3c522df16d856455297cf35ae827a37edac45f466a7", - "sha256:fa964bae817babece5aa2e8c1af841bebb6d0b9add8e637548809d040443fee0", - "sha256:ff37757e068ae606659c28c3bd0d923f9d29a85de79bf25b2b34b148473b5025" - ], - "version": "==4.5.4" - }, - "django": { - "hashes": [ - "sha256:4025317ca01f75fc79250ff7262a06d8ba97cd4f82e93394b2a0a6a4a925caeb", - "sha256:a8ca1033acac9f33995eb2209a6bf18a4681c3e5269a878e9a7e0b7384ed1ca3" - ], - "index": "pypi", - "version": "==2.2.6" - }, - "django-debug-toolbar": { - "hashes": [ - "sha256:17c53cd6bf4e7d69902aedf9a1d26c5d3b7369b54c5718744704f27b5a72f35d", - "sha256:9a23ada2e43cd989195db3c18710b5d7451134a0d48127ab64c1d2ad81700342" - ], - "index": "pypi", - "version": "==2.0" - }, - "docutils": { - "hashes": [ - "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", - "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", - "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" - ], - "version": "==0.15.2" - }, - "execnet": { - "hashes": [ - "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50", - "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547" - ], - "version": "==1.7.1" - }, - "factory-boy": { - "hashes": [ - "sha256:728df59b372c9588b83153facf26d3d28947fc750e8e3c95cefa9bed0e6394ee", - "sha256:faf48d608a1735f0d0a3c9cbf536d64f9132b547dae7ba452c4d99a79e84a370" - ], - "index": "pypi", - "version": "==2.12.0" - }, - "faker": { - "hashes": [ - "sha256:45cc9cca3de8beba5a2da3bd82a6e5544f53da1a702645c8485f682366c15026", - "sha256:a6459ff518d1fc6ee2238a7209e6c899517872c7e1115510279033ffe6fe8ef3" - ], - "version": "==2.0.2" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "imagesize": { - "hashes": [ - "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", - "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" - ], - "version": "==1.1.0" - }, - "importlib-metadata": { - "hashes": [ - "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", - "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" - ], - "version": "==0.23" - }, - "jinja2": { - "hashes": [ - "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", - "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de" - ], - "version": "==2.10.3" - }, - "livereload": { - "hashes": [ - "sha256:78d55f2c268a8823ba499305dcac64e28ddeb9a92571e12d543cd304faf5817b", - "sha256:89254f78d7529d7ea0a3417d224c34287ebfe266b05e67e51facaf82c27f0f66" - ], - "version": "==2.6.1" - }, - "markupsafe": { - "hashes": [ - "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", - "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", - "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", - "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", - "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", - "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", - "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", - "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", - "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", - "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", - "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", - "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", - "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", - "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", - "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", - "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", - "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", - "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", - "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", - "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", - "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", - "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", - "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", - "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", - "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", - "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", - "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", - "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" - ], - "version": "==1.1.1" - }, - "more-itertools": { - "hashes": [ - "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", - "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" - ], - "version": "==7.2.0" - }, - "packaging": { - "hashes": [ - "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", - "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" - ], - "version": "==19.2" - }, - "pathtools": { - "hashes": [ - "sha256:7c35c5421a39bb82e58018febd90e3b6e5db34c5443aaaf742b3f33d4655f1c0" - ], - "version": "==0.1.2" - }, - "pluggy": { - "hashes": [ - "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", - "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" - ], - "version": "==0.13.0" - }, - "port-for": { - "hashes": [ - "sha256:b16a84bb29c2954db44c29be38b17c659c9c27e33918dec16b90d375cc596f1c" - ], - "version": "==0.3.1" - }, - "py": { - "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" - ], - "version": "==1.8.0" - }, - "pygments": { - "hashes": [ - "sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127", - "sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297" - ], - "version": "==2.4.2" - }, - "pyparsing": { - "hashes": [ - "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", - "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" - ], - "version": "==2.4.2" - }, - "pytest": { - "hashes": [ - "sha256:7e4800063ccfc306a53c461442526c5571e1462f61583506ce97e4da6a1d88c8", - "sha256:ca563435f4941d0cb34767301c27bc65c510cb82e90b9ecf9cb52dc2c63caaa0" - ], - "version": "==5.2.1" - }, - "pytest-cov": { - "hashes": [ - "sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", - "sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626" - ], - "index": "pypi", - "version": "==2.8.1" - }, - "pytest-django": { - "hashes": [ - "sha256:264fb4c506db5d48a6364c311a0b00b7b48a52715bad8839b2d8bee9b99ed6bb", - "sha256:4adfe5fb3ed47f0ba55506dd3daf688b1f74d5e69148c10ad2dd2f79f40c0d62" - ], - "index": "pypi", - "version": "==3.5.1" - }, - "pytest-forked": { - "hashes": [ - "sha256:5fe33fbd07d7b1302c95310803a5e5726a4ff7f19d5a542b7ce57c76fed8135f", - "sha256:d352aaced2ebd54d42a65825722cb433004b4446ab5d2044851d9cc7a00c9e38" - ], - "version": "==1.0.2" - }, - "pytest-mock": { - "hashes": [ - "sha256:34520283d459cdf1d0dbb58a132df804697f1b966ecedf808bbf3d255af8f659", - "sha256:f1ab8aefe795204efe7a015900296d1719e7bf0f4a0558d71e8599da1d1309d0" - ], - "index": "pypi", - "version": "==1.11.1" - }, - "pytest-xdist": { - "hashes": [ - "sha256:5d1b1d4461518a6023d56dab62fb63670d6f7537f23e2708459a557329accf48", - "sha256:a8569b027db70112b290911ce2ed732121876632fb3f40b1d39cd2f72f58b147" - ], - "index": "pypi", - "version": "==1.30.0" - }, - "python-dateutil": { - "hashes": [ - "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", - "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" - ], - "markers": "python_version >= '2.7'", - "version": "==2.8.0" - }, - "pytz": { - "hashes": [ - "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d", - "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be" - ], - "index": "pypi", - "version": "==2019.3" - }, - "pyupgrade": { - "hashes": [ - "sha256:016ca25d9233c0abc09577981580a5f96a15f34b9d955d6dd40f709f5065c946", - "sha256:e8984d5c5b4c509b111720d73d85caa5fc75c90b37609b037d19b25b5b3c9eb5" - ], - "index": "pypi", - "version": "==1.24.1" - }, - "pyyaml": { - "hashes": [ - "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", - "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", - "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", - "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", - "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", - "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", - "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", - "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", - "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", - "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", - "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", - "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", - "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" - ], - "version": "==5.1.2" - }, - "requests": { - "hashes": [ - "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", - "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" - ], - "version": "==2.22.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "snowballstemmer": { - "hashes": [ - "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", - "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52" - ], - "version": "==2.0.0" - }, - "sphinx": { - "hashes": [ - "sha256:0d586b0f8c2fc3cc6559c5e8fd6124628110514fda0e5d7c82e682d749d2e845", - "sha256:839a3ed6f6b092bb60f492024489cc9e6991360fb9f52ed6361acd510d261069" - ], - "index": "pypi", - "version": "==2.2.0" - }, - "sphinx-autobuild": { - "hashes": [ - "sha256:66388f81884666e3821edbe05dd53a0cfb68093873d17320d0610de8db28c74e", - "sha256:e60aea0789cab02fa32ee63c7acae5ef41c06f1434d9fd0a74250a61f5994692" - ], - "index": "pypi", - "version": "==0.7.1" - }, - "sphinx-autodoc-typehints": { - "hashes": [ - "sha256:0d968ec3ee4f7fe7695ab6facf5cd2d74d3cea67584277458ad9b2788ebbcc3b", - "sha256:8edca714fd3de8e43467d7e51dd3812fe999f8874408a639f7c38a9e1a5a4eb3" - ], - "index": "pypi", - "version": "==1.8.0" - }, - "sphinx-rtd-theme": { - "hashes": [ - "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4", - "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a" - ], - "index": "pypi", - "version": "==0.4.3" - }, - "sphinxcontrib-applehelp": { - "hashes": [ - "sha256:edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897", - "sha256:fb8dee85af95e5c30c91f10e7eb3c8967308518e0f7488a2828ef7bc191d0d5d" - ], - "version": "==1.0.1" - }, - "sphinxcontrib-devhelp": { - "hashes": [ - "sha256:6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34", - "sha256:9512ecb00a2b0821a146736b39f7aeb90759834b07e81e8cc23a9c70bacb9981" - ], - "version": "==1.0.1" - }, - "sphinxcontrib-htmlhelp": { - "hashes": [ - "sha256:4670f99f8951bd78cd4ad2ab962f798f5618b17675c35c5ac3b2132a14ea8422", - "sha256:d4fd39a65a625c9df86d7fa8a2d9f3cd8299a3a4b15db63b50aac9e161d8eff7" - ], - "version": "==1.0.2" - }, - "sphinxcontrib-jsmath": { - "hashes": [ - "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", - "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" - ], - "version": "==1.0.1" - }, - "sphinxcontrib-qthelp": { - "hashes": [ - "sha256:513049b93031beb1f57d4daea74068a4feb77aa5630f856fcff2e50de14e9a20", - "sha256:79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f" - ], - "version": "==1.0.2" - }, - "sphinxcontrib-serializinghtml": { - "hashes": [ - "sha256:c0efb33f8052c04fd7a26c0a07f1678e8512e0faec19f4aa8f2473a8b81d5227", - "sha256:db6615af393650bf1151a6cd39120c29abaf93cc60db8c48eb2dddbfdc3a9768" - ], - "version": "==1.1.3" - }, - "sqlparse": { - "hashes": [ - "sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177", - "sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873" - ], - "version": "==0.3.0" - }, - "text-unidecode": { - "hashes": [ - "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8", - "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93" - ], - "version": "==1.3" - }, - "tokenize-rt": { - "hashes": [ - "sha256:2f44eee8f620102f8a03c50142795121faf86e020d208896ea7a7047bbe933cf", - "sha256:53f5c22d36e5c6f8e3fdbc6cb4dd151d1b3d38cea1b85b5fef6268f153733899" - ], - "version": "==3.2.0" - }, - "toml": { - "hashes": [ - "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", - "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" - ], - "version": "==0.10.0" - }, - "tornado": { - "hashes": [ - "sha256:349884248c36801afa19e342a77cc4458caca694b0eda633f5878e458a44cb2c", - "sha256:398e0d35e086ba38a0427c3b37f4337327231942e731edaa6e9fd1865bbd6f60", - "sha256:4e73ef678b1a859f0cb29e1d895526a20ea64b5ffd510a2307b5998c7df24281", - "sha256:559bce3d31484b665259f50cd94c5c28b961b09315ccd838f284687245f416e5", - "sha256:abbe53a39734ef4aba061fca54e30c6b4639d3e1f59653f0da37a0003de148c7", - "sha256:c845db36ba616912074c5b1ee897f8e0124df269468f25e4fe21fe72f6edd7a9", - "sha256:c9399267c926a4e7c418baa5cbe91c7d1cf362d505a1ef898fde44a07c9dd8a5" - ], - "version": "==6.0.3" - }, - "urllib3": { - "hashes": [ - "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", - "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" - ], - "markers": "python_version >= '3.4'", - "version": "==1.25.6" - }, - "watchdog": { - "hashes": [ - "sha256:965f658d0732de3188211932aeb0bb457587f04f63ab4c1e33eab878e9de961d" - ], - "version": "==0.9.0" - }, - "wcwidth": { - "hashes": [ - "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", - "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" - ], - "version": "==0.1.7" - }, - "werkzeug": { - "hashes": [ - "sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7", - "sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4" - ], - "index": "pypi", - "version": "==0.16.0" - }, - "zipp": { - "hashes": [ - "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", - "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" - ], - "version": "==0.6.0" - } - } -} diff --git a/app/config/settings.py b/app/config/settings.py index 6ceb780d86..0399b00015 100644 --- a/app/config/settings.py +++ b/app/config/settings.py @@ -184,6 +184,7 @@ def strtobool(val) -> bool: "LOCATION": "memcached:11211", } } +SPEEDINFO_STORAGE = "speedinfo.storage.cache.storage.CacheStorage" ROOT_URLCONF = "config.urls" SUBDOMAIN_URL_CONF = "grandchallenge.subdomains.urls" diff --git a/app/tests/core_tests/test_formatting.py b/app/tests/core_tests/test_formatting.py index 58499649d0..61befe9f9c 100644 --- a/app/tests/core_tests/test_formatting.py +++ b/app/tests/core_tests/test_formatting.py @@ -2,5 +2,7 @@ def test_code_is_black(): - res = call(["black", "--check", "--config", "/tmp/pyproject.toml", "/app"]) + res = call( + ["black", "--check", "--config", "/opt/poetry/pyproject.toml", "/app"] + ) assert res == 0 diff --git a/dockerfiles/web/Dockerfile b/dockerfiles/web/Dockerfile index 7a7e441db0..afa80e8764 100644 --- a/dockerfiles/web/Dockerfile +++ b/dockerfiles/web/Dockerfile @@ -18,19 +18,20 @@ RUN apt-get update && \ ENV PYTHONUNBUFFERED 1 -RUN mkdir -p /opt/pipenv /app /static /opt/static /dbox/Dropbox/media +RUN mkdir -p /opt/poetry /app /static /opt/static /dbox/Dropbox/media RUN python -m pip install -U pip -RUN python -m pip install -U pipenv +RUN python -m pip install -U poetry -# Install base python packages -WORKDIR /opt/pipenv -COPY Pipfile /opt/pipenv -COPY Pipfile.lock /opt/pipenv -RUN pipenv install --system - -RUN groupadd -g 2001 -r django && useradd -u 2001 -r -g django django +RUN groupadd -g 2001 -r django && useradd -m -u 2001 -r -g django django +RUN chown django:django /opt/poetry /app /static /opt/static /dbox/Dropbox/media +USER django:django -RUN chown django:django /app /static /opt/static /dbox/Dropbox/media +# Install base python packages +WORKDIR /opt/poetry +COPY pyproject.toml /opt/poetry +COPY poetry.lock /opt/poetry +RUN poetry install --no-dev +ENV PATH="/home/django/.cache/pypoetry/virtualenvs/grand-challenge.org-py3.7/bin:$PATH" ################### # Webpack # @@ -47,22 +48,18 @@ RUN npm install && npm run build ################### FROM base as test -WORKDIR /opt/pipenv -RUN pipenv install --system --dev +RUN poetry install -USER django:django WORKDIR /app COPY --chown=django:django ./app/ /app/ COPY --from=npm --chown=django:django /src/dist/ /opt/static/vendor/ RUN python manage.py collectstatic --noinput -COPY --chown=django:django pyproject.toml /tmp/pyproject.toml ################## # Dist Container # ################## FROM base as dist -USER django:django WORKDIR /app COPY --chown=django:django ./app/ /app/ COPY --from=npm --chown=django:django /src/dist/ /opt/static/vendor/ diff --git a/docs/getting-started.rst b/docs/getting-started.rst index 041db7c433..146bbc6d28 100644 --- a/docs/getting-started.rst +++ b/docs/getting-started.rst @@ -157,9 +157,9 @@ with the service running in the docker container. $ ./cycle_docker_compose.sh -2. Make sure you have ``pipenv`` installed. -3. In a new terminal, create a new virtual python environment using ``pipenv install --dev`` in this repository's root folder. -4. Activate the virtual env: ``pipenv shell``. +2. Make sure you have ``poetry`` installed. +3. In a new terminal, create a new virtual python environment using ``poetry install`` in this repository's root folder. +4. Activate the virtual env: ``poetry shell``. 5. Load the environmental variables contained in ``.env.local`` .. code-block:: console @@ -179,7 +179,7 @@ with the service running in the docker container. 8. To setup PyCharm: - 1. ``File`` -> ``Settings`` -> ``Project: grand-challenge.org`` -> ``Project Interpreter`` -> Select your created pipenv environment + 1. ``File`` -> ``Settings`` -> ``Project: grand-challenge.org`` -> ``Project Interpreter`` -> Select your created virtual environment 2. For each run/debug configuration, make sure the environmental variables are loaded, the easiest is to use `this plugin <https://plugins.jetbrains.com/plugin/7861-envfile>`_. Or they can be pasted after pressing the folder icon in the ``Environmental variables`` field. @@ -223,53 +223,27 @@ Having built the web container with ``cycle_docker_compose.sh`` you can use this This will create the docs in the ``docs/_build/html`` directory. -Using pipenv -~~~~~~~~~~~~ - -Alternatively, to build the docs locally you need to install the environment on your local machine, we use pipenv for this. - -1. Install pipenv - -.. code-block:: console - - $ pip install pipenv - -2. Install the environment from the root of the ``grand-challenge.org`` repo with - -.. code-block:: console - - $ pipenv install - -3. You can then launch a shell in this newly created environment to build the docs - -.. code-block:: console - - $ pipenv shell - $ cd docs - $ make html - - Adding new dependencies ----------------------- -Pipenv is used to manage the dependencies of the platform. +Poetry is used to manage the dependencies of the platform. To add a new dependency use .. code-block:: console - $ pipenv install <whatever> + $ poetry add <whatever> -and then commit the ``Pipfile`` and ``Pipfile.lock``. -If this is a development dependency then use the ``--dev`` flag, see the ``pipenv`` documentation for more details. +and then commit the ``pyproject.toml`` and ``poetry.lock``. +If this is a development dependency then use the ``--dev`` flag, see the ``poetry`` documentation for more details. -Versions are unpinned in the ``Pipfile``, to update the resolved dependencies use +Versions are unpinned in the ``pyproject.toml`` file, to update the resolved dependencies use .. code-block:: console - $ pipenv update + $ poetry lock -and commit the update ``Pipfile.lock``. +and commit the update ``poetry.lock``. The containers will need to be rebuilt after running these steps, so stop the ``cycle_docker_compose.sh`` process with ``CTRL+C`` and restart. Going to Production diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000..c185163f4b --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1742 @@ +[[package]] +category = "dev" +description = "A configurable sidebar-enabled Sphinx theme" +name = "alabaster" +optional = false +python-versions = "*" +version = "0.7.12" + +[[package]] +category = "main" +description = "Low-level AMQP client for Python (fork of amqplib)." +name = "amqp" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.5.1" + +[package.dependencies] +vine = ">=1.1.3,<5.0.0a1" + +[[package]] +category = "dev" +description = "apipkg: namespace control and lazy-import mechanism" +name = "apipkg" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.5" + +[[package]] +category = "dev" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "appdirs" +optional = false +python-versions = "*" +version = "1.4.3" + +[[package]] +category = "dev" +description = "An unobtrusive argparse wrapper with natural syntax" +name = "argh" +optional = false +python-versions = "*" +version = "0.26.2" + +[[package]] +category = "dev" +description = "Atomic file writes." +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0" + +[[package]] +category = "main" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.3.0" + +[[package]] +category = "dev" +description = "Internationalization utilities" +name = "babel" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.7.0" + +[package.dependencies] +pytz = ">=2015.7" + +[[package]] +category = "main" +description = "Screen-scraping library" +name = "beautifulsoup4" +optional = false +python-versions = "*" +version = "4.8.1" + +[package.dependencies] +soupsieve = ">=1.2" + +[[package]] +category = "main" +description = "Python multiprocessing fork with improvements and bugfixes" +name = "billiard" +optional = false +python-versions = "*" +version = "3.6.1.0" + +[[package]] +category = "dev" +description = "The uncompromising code formatter." +name = "black" +optional = false +python-versions = ">=3.6" +version = "19.3b0" + +[package.dependencies] +appdirs = "*" +attrs = ">=18.1.0" +click = ">=6.5" +toml = ">=0.9.4" + +[[package]] +category = "main" +description = "An easy safelist-based HTML-sanitizing tool." +name = "bleach" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.1.0" + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[[package]] +category = "main" +description = "The AWS SDK for Python" +name = "boto3" +optional = false +python-versions = "*" +version = "1.10.1" + +[package.dependencies] +botocore = ">=1.13.1,<1.14.0" +jmespath = ">=0.7.1,<1.0.0" +s3transfer = ">=0.2.0,<0.3.0" + +[[package]] +category = "main" +description = "Low-level, data-driven core of boto 3." +name = "botocore" +optional = false +python-versions = "*" +version = "1.13.1" + +[package.dependencies] +docutils = ">=0.10,<0.16" +jmespath = ">=0.7.1,<1.0.0" + +[package.dependencies.python-dateutil] +python = ">=2.7" +version = ">=2.1,<3.0.0" + +[package.dependencies.urllib3] +python = ">=3.4" +version = ">=1.20,<1.26" + +[[package]] +category = "main" +description = "Python bindings for the Brotli compression library" +name = "brotli" +optional = false +python-versions = "*" +version = "1.0.7" + +[[package]] +category = "main" +description = "Distributed Task Queue." +name = "celery" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "4.3.0" + +[package.dependencies] +billiard = ">=3.6.0,<4.0" +kombu = ">=4.4.0,<5.0" +pytz = ">0.0-dev" +vine = ">=1.3.0" + +[[package]] +category = "main" +description = "Python package for providing Mozilla's CA Bundle." +name = "certifi" +optional = false +python-versions = "*" +version = "2019.9.11" + +[[package]] +category = "main" +description = "Foreign Function Interface for Python calling C code." +name = "cffi" +optional = false +python-versions = "*" +version = "1.13.1" + +[package.dependencies] +pycparser = "*" + +[[package]] +category = "main" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" +optional = false +python-versions = "*" +version = "3.0.4" + +[[package]] +category = "dev" +description = "Composable command line interface toolkit" +name = "click" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "7.0" + +[[package]] +category = "dev" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\"" +name = "colorama" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.4.1" + +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = "*" +version = "4.4.2" + +[[package]] +category = "main" +description = "Composable style cycles" +name = "cycler" +optional = false +python-versions = "*" +version = "0.10.0" + +[package.dependencies] +six = "*" + +[[package]] +category = "main" +description = "Better living through Python with decorators" +name = "decorator" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "4.4.0" + +[[package]] +category = "main" +description = "XML bomb protection for Python stdlib modules" +name = "defusedxml" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.6.0" + +[[package]] +category = "main" +description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +name = "django" +optional = false +python-versions = ">=3.5" +version = "2.2.6" + +[package.dependencies] +pytz = "*" +sqlparse = "*" + +[[package]] +category = "main" +description = "A helper class for handling configuration defaults of packaged apps gracefully." +name = "django-appconf" +optional = false +python-versions = "*" +version = "1.0.3" + +[package.dependencies] +django = "*" +six = "*" + +[[package]] +category = "main" +description = "Fresh autocompletes for Django" +name = "django-autocomplete-light" +optional = false +python-versions = "*" +version = "3.4.1" + +[[package]] +category = "main" +description = "Database-backed Periodic Tasks." +name = "django-celery-beat" +optional = false +python-versions = "*" +version = "1.5.0" + +[package.dependencies] +django-timezone-field = ">=2.0" +python-crontab = ">=2.3.4" + +[[package]] +category = "main" +description = "An async Django email backend using celery" +name = "django-celery-email" +optional = false +python-versions = "*" +version = "2.0.2" + +[package.dependencies] +celery = ">=4.0" +django = ">=1.8" +django-appconf = "*" + +[[package]] +category = "main" +description = "Celery result backends for Django." +name = "django-celery-results" +optional = false +python-versions = "*" +version = "1.1.2" + +[package.dependencies] +celery = ">=4.3,<5.0" + +[[package]] +category = "main" +description = "For- and backwards compatibility layer for Django 1.4, 1.7, 1.8, 1.9, 1.10, and 1.11" +name = "django-compat" +optional = false +python-versions = "*" +version = "1.0.15" + +[package.dependencies] +six = ">=1.10.0" + +[[package]] +category = "main" +description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +name = "django-cors-headers" +optional = false +python-versions = ">=3.5" +version = "3.1.1" + +[package.dependencies] +Django = ">=1.11" + +[[package]] +category = "main" +description = "Provides a country field for Django models." +name = "django-countries" +optional = false +python-versions = "*" +version = "5.5" + +[package.dependencies] +six = "*" + +[[package]] +category = "main" +description = "Best way to have Django DRY forms" +name = "django-crispy-forms" +optional = false +python-versions = "*" +version = "1.8.0" + +[[package]] +category = "dev" +description = "A configurable set of panels that display various debug information about the current request/response." +name = "django-debug-toolbar" +optional = false +python-versions = ">=3.5" +version = "2.0" + +[package.dependencies] +Django = ">=1.11" +sqlparse = ">=0.2.0" + +[[package]] +category = "main" +description = "Extensions for Django" +name = "django-extensions" +optional = false +python-versions = "*" +version = "2.2.5" + +[package.dependencies] +six = ">=1.2" + +[[package]] +category = "main" +description = "Favicon app for django" +name = "django-favicon-plus" +optional = false +python-versions = "*" +version = "0.0.8" + +[package.dependencies] +django = "*" +django-compat = "*" +pillow = "*" + +[[package]] +category = "main" +description = "Implementation of per object permissions for Django." +name = "django-guardian" +optional = false +python-versions = ">=3.5" +version = "2.1.0" + +[[package]] +category = "main" +description = "Select2 option fields for Django" +name = "django-select2" +optional = false +python-versions = "*" +version = "7.1.1" + +[package.dependencies] +django = ">=2.0" +django-appconf = ">=0.6.0" + +[[package]] +category = "main" +description = "Store model history and view/revert changes from admin site." +name = "django-simple-history" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.7.3" + +[package.dependencies] +six = "*" + +[[package]] +category = "main" +description = "Live profiling tool for Django framework to measure views performance" +name = "django-speedinfo" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.0.0" + +[[package]] +category = "main" +description = "Support for many storage backends in Django" +name = "django-storages" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.7.2" + +[package.dependencies] +Django = ">=1.11" + +[[package]] +category = "main" +description = "Summernote plugin for Django" +name = "django-summernote" +optional = false +python-versions = "*" +version = "0.8.11.4" + +[package.dependencies] +django = "*" + +[[package]] +category = "main" +description = "A Django app providing database and form fields for pytz timezone objects." +name = "django-timezone-field" +optional = false +python-versions = "*" +version = "3.1" + +[package.dependencies] +django = ">=1.11" +pytz = "*" + +[[package]] +category = "main" +description = "Complete user management application for Django" +name = "django-userena-ce" +optional = false +python-versions = "*" +version = "4.1.1" + +[package.dependencies] +Django = ">=1.11" +django-guardian = ">=1.4.2" +easy-thumbnails = "*" +html2text = "*" + +[[package]] +category = "main" +description = "Web APIs for Django, made easy." +name = "djangorestframework" +optional = false +python-versions = ">=3.5" +version = "3.10.3" + +[[package]] +category = "main" +description = "django-guardian support for Django REST Framework" +name = "djangorestframework-guardian" +optional = false +python-versions = "*" +version = "0.3.0" + +[package.dependencies] +django = "*" +django-guardian = "*" +djangorestframework = "*" + +[[package]] +category = "main" +description = "A Python library for the Docker Engine API." +name = "docker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "4.1.0" + +[package.dependencies] +requests = ">=2.14.2,<2.18.0 || >2.18.0" +six = ">=1.4.0" +websocket-client = ">=0.32.0" + +[package.dependencies.pypiwin32] +python = ">=3.6" +version = "223" + +[[package]] +category = "main" +description = "Docutils -- Python Documentation Utilities" +name = "docutils" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.15.2" + +[[package]] +category = "main" +description = "Easy thumbnails for Django" +name = "easy-thumbnails" +optional = false +python-versions = "*" +version = "2.6" + +[package.dependencies] +[package.dependencies.django] +python = ">=3" +version = ">=1.8" + +[package.dependencies.pillow] +python = ">=2.7" +version = "*" + +[[package]] +category = "main" +description = "Discover and load entry points from installed packages." +name = "entrypoints" +optional = false +python-versions = ">=2.7" +version = "0.3" + +[[package]] +category = "dev" +description = "execnet: rapid multi-Python deployment" +name = "execnet" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.7.1" + +[package.dependencies] +apipkg = ">=1.4" + +[[package]] +category = "dev" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +name = "factory-boy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.12.0" + +[package.dependencies] +Faker = ">=0.7.0" + +[[package]] +category = "dev" +description = "Faker is a Python package that generates fake data for you." +name = "faker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.0.3" + +[package.dependencies] +python-dateutil = ">=2.4" +six = ">=1.10" +text-unidecode = "1.3" + +[[package]] +category = "main" +description = "WSGI HTTP Server for UNIX" +name = "gunicorn" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "19.9.0" + +[[package]] +category = "main" +description = "Turn HTML into equivalent Markdown-structured text." +name = "html2text" +optional = false +python-versions = ">=3.5" +version = "2019.9.26" + +[[package]] +category = "main" +description = "A comprehensive HTTP client library." +name = "httplib2" +optional = false +python-versions = "*" +version = "0.14.0" + +[[package]] +category = "main" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8" + +[[package]] +category = "main" +description = "Image transformation, compression, and decompression codecs" +marker = "platform_system == \"Windows\"" +name = "imagecodecs" +optional = false +python-versions = ">=2.7" +version = "2019.5.22" + +[package.dependencies] +numpy = ">=1.11.3" + +[[package]] +category = "dev" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +name = "imagesize" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.1.0" + +[[package]] +category = "main" +description = "Read metadata from Python packages" +name = "importlib-metadata" +optional = false +python-versions = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3" +version = "0.23" + +[package.dependencies] +zipp = ">=0.5" + +[[package]] +category = "main" +description = "Vestigial utilities from IPython" +name = "ipython-genutils" +optional = false +python-versions = "*" +version = "0.2.0" + +[[package]] +category = "main" +description = "A very fast and expressive template engine." +name = "jinja2" +optional = false +python-versions = "*" +version = "2.10.3" + +[package.dependencies] +MarkupSafe = ">=0.23" + +[[package]] +category = "main" +description = "JSON Matching Expressions" +name = "jmespath" +optional = false +python-versions = "*" +version = "0.9.4" + +[[package]] +category = "main" +description = "An implementation of JSON Schema validation for Python" +name = "jsonschema" +optional = false +python-versions = "*" +version = "3.1.1" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-metadata = "*" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[[package]] +category = "main" +description = "Jupyter core package. A base package on which Jupyter projects rely." +name = "jupyter-core" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2" +version = "4.6.1" + +[package.dependencies] +pywin32 = ">=1.0" +traitlets = "*" + +[[package]] +category = "main" +description = "A fast implementation of the Cassowary constraint solver" +name = "kiwisolver" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.1.0" + +[package.dependencies] +setuptools = "*" + +[[package]] +category = "main" +description = "Messaging library for Python." +name = "kombu" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "4.6.5" + +[package.dependencies] +amqp = "2.5.1" +importlib-metadata = ">=0.18" + +[[package]] +category = "dev" +description = "Python LiveReload is an awesome tool for web developers" +name = "livereload" +optional = false +python-versions = "*" +version = "2.6.1" + +[package.dependencies] +six = "*" +tornado = "*" + +[[package]] +category = "main" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.1" + +[[package]] +category = "main" +description = "Python plotting package" +name = "matplotlib" +optional = false +python-versions = ">=3.6" +version = "3.1.1" + +[package.dependencies] +cycler = ">=0.10" +kiwisolver = ">=1.0.1" +numpy = ">=1.11" +pyparsing = ">=2.0.1,<2.0.4 || >2.0.4,<2.1.2 || >2.1.2,<2.1.6 || >2.1.6" +python-dateutil = ">=2.1" + +[[package]] +category = "main" +description = "The fastest markdown parser in pure Python" +name = "mistune" +optional = false +python-versions = "*" +version = "0.8.4" + +[[package]] +category = "main" +description = "More routines for operating on iterables, beyond itertools" +name = "more-itertools" +optional = false +python-versions = ">=3.4" +version = "7.2.0" + +[[package]] +category = "main" +description = "Converting Jupyter Notebooks" +name = "nbconvert" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "5.6.0" + +[package.dependencies] +bleach = "*" +defusedxml = "*" +entrypoints = ">=0.2.2" +jinja2 = ">=2.4" +jupyter-core = "*" +mistune = ">=0.8.1,<2" +nbformat = ">=4.4" +pandocfilters = ">=1.4.1" +pygments = "*" +testpath = "*" +traitlets = ">=4.2" + +[[package]] +category = "main" +description = "The Jupyter Notebook format" +name = "nbformat" +optional = false +python-versions = "*" +version = "4.4.0" + +[package.dependencies] +ipython-genutils = "*" +jsonschema = ">=2.4,<2.5.0 || >2.5.0" +jupyter-core = "*" +traitlets = ">=4.1" + +[[package]] +category = "main" +description = "NumPy is the fundamental package for array computing with Python." +name = "numpy" +optional = false +python-versions = ">=3.5" +version = "1.17.3" + +[[package]] +category = "main" +description = "library for OAuth version 1.9" +name = "oauth2" +optional = false +python-versions = "*" +version = "1.9.0.post1" + +[package.dependencies] +httplib2 = "*" + +[[package]] +category = "main" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +name = "oauthlib" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.1.0" + +[[package]] +category = "dev" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.2" + +[package.dependencies] +pyparsing = ">=2.0.2" +six = "*" + +[[package]] +category = "main" +description = "Utilities for writing pandoc filters in python" +name = "pandocfilters" +optional = false +python-versions = "*" +version = "1.4.2" + +[[package]] +category = "dev" +description = "File system general utilities" +name = "pathtools" +optional = false +python-versions = "*" +version = "0.1.2" + +[[package]] +category = "main" +description = "Python Imaging Library (Fork)" +name = "pillow" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "6.2.1" + +[[package]] +category = "main" +description = "Interface Python with pkg-config" +name = "pkgconfig" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.5.1" + +[[package]] +category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.13.0" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[[package]] +category = "dev" +description = "Utility that helps with local TCP ports managment. It can find an unused TCP localhost port and remember the association." +name = "port-for" +optional = false +python-versions = "*" +version = "0.3.1" + +[[package]] +category = "main" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +name = "psycopg2" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "2.8.4" + +[[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.8.0" + +[[package]] +category = "main" +description = "C parser in Python" +name = "pycparser" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.19" + +[[package]] +category = "main" +description = "Pygments is a syntax highlighting package written in Python." +name = "pygments" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.4.2" + +[[package]] +category = "main" +description = "JSON Web Token implementation in Python" +name = "pyjwt" +optional = false +python-versions = "*" +version = "1.7.1" + +[[package]] +category = "main" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.2" + +[[package]] +category = "main" +description = "" +marker = "sys_platform == \"win32\" and python_version >= \"3.6\"" +name = "pypiwin32" +optional = false +python-versions = "*" +version = "223" + +[package.dependencies] +pywin32 = ">=223" + +[[package]] +category = "main" +description = "Persistent/Functional/Immutable data structures" +name = "pyrsistent" +optional = false +python-versions = "*" +version = "0.15.4" + +[package.dependencies] +six = "*" + +[[package]] +category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=3.5" +version = "5.2.1" + +[package.dependencies] +atomicwrites = ">=1.0" +attrs = ">=17.4.0" +colorama = "*" +more-itertools = ">=4.0.0" +packaging = "*" +pluggy = ">=0.12,<1.0" +py = ">=1.5.0" +wcwidth = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[[package]] +category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8.1" + +[package.dependencies] +coverage = ">=4.4" +pytest = ">=3.6" + +[[package]] +category = "dev" +description = "A Django plugin for pytest." +name = "pytest-django" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.6.0" + +[package.dependencies] +pytest = ">=3.6" + +[[package]] +category = "dev" +description = "run tests in isolated forked subprocesses" +name = "pytest-forked" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.1.3" + +[package.dependencies] +pytest = ">=3.1.0" + +[[package]] +category = "dev" +description = "Thin-wrapper around the mock package for easier use with py.test" +name = "pytest-mock" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.11.2" + +[package.dependencies] +pytest = ">=2.7" + +[[package]] +category = "dev" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +name = "pytest-xdist" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.30.0" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=4.4.0" +pytest-forked = "*" +six = "*" + +[[package]] +category = "main" +description = "Python Crontab API" +name = "python-crontab" +optional = false +python-versions = "*" +version = "2.4.0" + +[package.dependencies] +python-dateutil = "*" + +[[package]] +category = "main" +description = "Extensions to the standard Python datetime module" +name = "python-dateutil" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.8.0" + +[package.dependencies] +six = ">=1.5" + +[[package]] +category = "main" +description = "File type identification using libmagic" +name = "python-magic" +optional = false +python-versions = "*" +version = "0.4.15" + +[[package]] +category = "main" +description = "Pure python memcached client" +name = "python-memcached" +optional = false +python-versions = "*" +version = "1.59" + +[package.dependencies] +six = ">=1.4.0" + +[[package]] +category = "main" +description = "OpenID support for modern servers and consumers." +marker = "python_version >= \"3.0\"" +name = "python3-openid" +optional = false +python-versions = "*" +version = "3.1.0" + +[package.dependencies] +defusedxml = "*" + +[[package]] +category = "main" +description = "World timezone definitions, modern and historical" +name = "pytz" +optional = false +python-versions = "*" +version = "2019.3" + +[[package]] +category = "dev" +description = "A tool to automatically upgrade syntax for newer versions." +name = "pyupgrade" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "1.25.1" + +[package.dependencies] +tokenize-rt = ">=3.2.0" + +[[package]] +category = "main" +description = "binding for the libvips image processing library, API mode" +name = "pyvips" +optional = false +python-versions = "*" +version = "2.1.8" + +[package.dependencies] +cffi = ">=1.0.0" +pkgconfig = "*" + +[[package]] +category = "main" +description = "Python for Window Extensions" +marker = "sys_platform == \"win32\" and python_version >= \"3.6\" or sys_platform == \"win32\"" +name = "pywin32" +optional = false +python-versions = "*" +version = "225" + +[[package]] +category = "dev" +description = "YAML parser and emitter for Python" +name = "pyyaml" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "5.1.2" + +[[package]] +category = "main" +description = "Python client for Redis key-value store" +name = "redis" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.3.11" + +[[package]] +category = "main" +description = "Python HTTP for Humans." +name = "requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.22.0" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<3.1.0" +idna = ">=2.5,<2.9" +urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" + +[[package]] +category = "main" +description = "File transport adapter for Requests" +name = "requests-file" +optional = false +python-versions = "*" +version = "1.4.3" + +[package.dependencies] +requests = ">=1.0.0" +six = "*" + +[[package]] +category = "main" +description = "OAuthlib authentication support for Requests." +name = "requests-oauthlib" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.2.0" + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[[package]] +category = "main" +description = "An Amazon S3 Transfer Manager" +name = "s3transfer" +optional = false +python-versions = "*" +version = "0.2.1" + +[package.dependencies] +botocore = ">=1.12.36,<2.0.0" + +[[package]] +category = "main" +description = "Python client for Sentry (https://getsentry.com)" +name = "sentry-sdk" +optional = false +python-versions = "*" +version = "0.13.0" + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.9" + +[[package]] +category = "main" +description = "SimpleITK is a simplified interface to the Insight Toolkit (ITK) for image registration and segmentation" +name = "simpleitk" +optional = false +python-versions = "*" +version = "1.2.3" + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +name = "six" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "1.12.0" + +[[package]] +category = "dev" +description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." +name = "snowballstemmer" +optional = false +python-versions = "*" +version = "2.0.0" + +[[package]] +category = "main" +description = "Python Social Authentication, Django integration." +name = "social-auth-app-django" +optional = false +python-versions = "*" +version = "3.1.0" + +[package.dependencies] +six = "*" +social-auth-core = ">=1.2.0" + +[[package]] +category = "main" +description = "Python social authentication made simple." +name = "social-auth-core" +optional = false +python-versions = "*" +version = "3.2.0" + +[package.dependencies] +PyJWT = ">=1.4.0" +oauthlib = ">=1.0.3" +requests = ">=2.9.1" +requests-oauthlib = ">=0.6.1" +six = ">=1.10.0" + +[package.dependencies.defusedxml] +python = ">=3.0" +version = ">=0.5.0rc1" + +[package.dependencies.python3-openid] +python = ">=3.0" +version = ">=3.0.10" + +[[package]] +category = "main" +description = "Thumbnails for Django" +name = "sorl-thumbnail" +optional = false +python-versions = "*" +version = "12.5.0" + +[[package]] +category = "main" +description = "A modern CSS selector implementation for Beautiful Soup." +name = "soupsieve" +optional = false +python-versions = "*" +version = "1.9.4" + +[[package]] +category = "dev" +description = "Python documentation generator" +name = "sphinx" +optional = false +python-versions = ">=3.5" +version = "2.2.0" + +[package.dependencies] +Jinja2 = ">=2.3" +Pygments = ">=2.0" +alabaster = ">=0.7,<0.8" +babel = ">=1.3,<2.0 || >2.0" +colorama = ">=0.3.5" +docutils = ">=0.12" +imagesize = "*" +packaging = "*" +requests = ">=2.5.0" +setuptools = "*" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = "*" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = "*" + +[[package]] +category = "dev" +description = "Watch a Sphinx directory and rebuild the documentation when a change is detected. Also includes a livereload enabled web server." +name = "sphinx-autobuild" +optional = false +python-versions = "*" +version = "0.7.1" + +[package.dependencies] +PyYAML = ">=3.10" +argh = ">=0.24.1" +livereload = ">=2.3.0" +pathtools = ">=0.1.2" +port-for = "0.3.1" +tornado = ">=3.2" +watchdog = ">=0.7.1" + +[[package]] +category = "dev" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +name = "sphinx-autodoc-typehints" +optional = false +python-versions = ">=3.5.2" +version = "1.8.0" + +[package.dependencies] +Sphinx = ">=2.1" + +[[package]] +category = "dev" +description = "Read the Docs theme for Sphinx" +name = "sphinx-rtd-theme" +optional = false +python-versions = "*" +version = "0.4.3" + +[package.dependencies] +sphinx = "*" + +[[package]] +category = "dev" +description = "" +name = "sphinxcontrib-applehelp" +optional = false +python-versions = "*" +version = "1.0.1" + +[[package]] +category = "dev" +description = "" +name = "sphinxcontrib-devhelp" +optional = false +python-versions = "*" +version = "1.0.1" + +[[package]] +category = "dev" +description = "" +name = "sphinxcontrib-htmlhelp" +optional = false +python-versions = "*" +version = "1.0.2" + +[[package]] +category = "dev" +description = "A sphinx extension which renders display math in HTML via JavaScript" +name = "sphinxcontrib-jsmath" +optional = false +python-versions = ">=3.5" +version = "1.0.1" + +[[package]] +category = "dev" +description = "" +name = "sphinxcontrib-qthelp" +optional = false +python-versions = "*" +version = "1.0.2" + +[[package]] +category = "dev" +description = "" +name = "sphinxcontrib-serializinghtml" +optional = false +python-versions = "*" +version = "1.1.3" + +[[package]] +category = "main" +description = "Non-validating SQL parser" +name = "sqlparse" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.3.0" + +[[package]] +category = "main" +description = "Test utilities for code working with files and commands" +name = "testpath" +optional = false +python-versions = "*" +version = "0.4.2" + +[[package]] +category = "dev" +description = "The most basic Text::Unidecode port" +name = "text-unidecode" +optional = false +python-versions = "*" +version = "1.3" + +[[package]] +category = "main" +description = "Read and write TIFF(r) files" +name = "tifffile" +optional = false +python-versions = ">=2.7" +version = "2019.1.4" + +[package.dependencies] +imagecodecs = ">=2019.1.1" +numpy = ">=1.11.3" + +[[package]] +category = "main" +description = "Accurately separate the TLD from the registered domain and subdomains of a URL, using the Public Suffix List. By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." +name = "tldextract" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.2.2" + +[package.dependencies] +idna = "*" +requests = ">=2.1.0" +requests-file = ">=1.4" +setuptools = "*" + +[[package]] +category = "dev" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +name = "tokenize-rt" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "3.2.0" + +[[package]] +category = "dev" +description = "Python Library for Tom's Obvious, Minimal Language" +name = "toml" +optional = false +python-versions = "*" +version = "0.10.0" + +[[package]] +category = "dev" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +name = "tornado" +optional = false +python-versions = ">= 3.5" +version = "6.0.3" + +[[package]] +category = "main" +description = "Traitlets Python config system" +name = "traitlets" +optional = false +python-versions = "*" +version = "4.3.3" + +[package.dependencies] +decorator = "*" +ipython-genutils = "*" +six = "*" + +[[package]] +category = "main" +description = "HTTP library with thread-safe connection pooling, file post, and more." +name = "urllib3" +optional = false +python-versions = "*" +version = "1.22" + +[[package]] +category = "main" +description = "Promises, promises, promises." +name = "vine" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0" + +[[package]] +category = "dev" +description = "Filesystem events monitoring" +name = "watchdog" +optional = false +python-versions = "*" +version = "0.9.0" + +[package.dependencies] +PyYAML = ">=3.10" +argh = ">=0.24.1" +pathtools = ">=0.1.1" + +[[package]] +category = "dev" +description = "Measures number of Terminal column cells of wide-character codes" +name = "wcwidth" +optional = false +python-versions = "*" +version = "0.1.7" + +[[package]] +category = "main" +description = "Character encoding aliases for legacy web content" +name = "webencodings" +optional = false +python-versions = "*" +version = "0.5.1" + +[[package]] +category = "main" +description = "WebSocket client for Python. hybi13 is supported." +name = "websocket-client" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.56.0" + +[package.dependencies] +six = "*" + +[[package]] +category = "dev" +description = "The comprehensive WSGI web application library." +name = "werkzeug" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.16.0" + +[[package]] +category = "main" +description = "Radically simplified static file serving for WSGI applications" +name = "whitenoise" +optional = false +python-versions = "*" +version = "4.1.4" + +[[package]] +category = "main" +description = "Backport of pathlib-compatible object wrapper for zip files" +name = "zipp" +optional = false +python-versions = ">=2.7" +version = "0.6.0" + +[package.dependencies] +more-itertools = "*" + +[metadata] +content-hash = "54122c835ab71dd5f2622914812254dcb7281ed3806b3a1f99c4a0d851f9cd0f" +python-versions = ">=3.7" + +[metadata.hashes] +alabaster = ["446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", "a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"] +amqp = ["19a917e260178b8d410122712bac69cb3e6db010d68f6101e7307508aded5e68", "19d851b879a471fcfdcf01df9936cff924f422baa77653289f7095dedd5fb26a"] +apipkg = ["37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", "58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"] +appdirs = ["9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", "d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"] +argh = ["a9b3aaa1904eeb78e32394cd46c6f37ac0fb4af6dc488daa58971bdc7d7fcaf3", "e9535b8c84dc9571a48999094fda7f33e63c3f1b74f3e5f3ac0105a58405bb65"] +atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"] +attrs = ["08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", "f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"] +babel = ["af92e6106cb7c55286b25b38ad7695f8b4efb36a90ba483d7f7a6628c46158ab", "e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28"] +beautifulsoup4 = ["5279c36b4b2ec2cb4298d723791467e3000e5384a43ea0cdf5d45207c7e97169", "6135db2ba678168c07950f9a16c4031822c6f4aec75a65e0a97bc5ca09789931", "dcdef580e18a76d54002088602eba453eec38ebbcafafeaabd8cab12b6155d57"] +billiard = ["01afcb4e7c4fd6480940cfbd4d9edc19d7a7509d6ada533984d0d0f49901ec82", "b8809c74f648dfe69b973c8e660bcec00603758c9db8ba89d7719f88d5f01f26"] +black = ["09a9dcb7c46ed496a9850b76e4e825d6049ecd38b611f1224857a79bd985a8cf", "68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c"] +bleach = ["213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16", "3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa"] +boto3 = ["2904bfb928116fea3a83247de6c3687eb9bf942d764e361f5574d5ac11be2ad3", "77806f23320554b5d3175f4ef864e4ca6eb04d97a95ad6d2b3e3ef7736472c35"] +botocore = ["05d42876001fe6513742edcdb550f0aeabff2b123678a6b661cfeb6c26066b8e", "acceec0b79df7e5f8b15eab3033f6a7c7f69d0bc27aa01d65aa5ba4d90742787"] +brotli = ["0538dc1744fd17c314d2adc409ea7d1b779783b89fd95bcfb0c2acc93a6ea5a7", "0970a47f471782912d7705160b2b0a9306e68e6fadf9cffcaeb42d8f0951e26c", "113f51658e6fe548dce4b3749f6ef6c24de4184ba9c10a909cbee4261c2a5da0", "1e1aa9c4d1558889f42749c8baf846007953bfd32c8209230cf1cd1f5ef33495", "2f2f4f78f29ac4a45d15b3d9fc3fd9705e0ad313a44b129f6e1d0c6916bad0e2", "3269f6de1dd150fd0cce1c158b61ff5ac06d627fd3ae9c6ea03aed26fbbff7ea", "50dd9ad2a2bb12da4e9002a438672d182f98e546e99952de80280a1e1729664f", "5519a4b01b1a4f965083cbfa2ef2b9774c5a5f352341c47b50776ad109423d72", "5eb27722d320370315971c427eb8aa7cc0791f2a458840d357ac653bd0ad3a14", "5f06b4d5b6f58e5b5c220c2f23cad034dc5efa51b01fde2351ced1605bd980e2", "72848d25a5f9e736db4af4512e0c3feecc094d57d241f8f1ae959115a2c39756", "743001bca75f4a6b4454be3510feca46f9d61a0c782a9bc2bc684bdb245e279e", "9d1c2dd27a1083fefd05b1b2f8df4a6bc2aaa6c21dd82cd41c8ae5e7c23a87f8", "a13ce9b419fe9f277c63f700efb0e444331509d1881b5610d2ba7e9080606967", "a19ef0952b9d2803df88dff07f45a6c92d5676afb9b8d69cf32232d684036d11", "ad766ca8b8c1419b71a22756b45264f45725c86133dc80a7cbe30b6b78c75620", "ad7963f261988ee0883816b6b9f206f11461c9b3cb5cfbca0c9ab5adc406d395", "c16201060c5a3f8742e3deae759014251ac92f382f82bc2a41dc079ff18c3f24", "c43b202f65891861a9a336984a103de25de235f756de69e32db893156f767013", "c675c6cce4295cb1a692f3de7416aacace7314e064b94bc86e93aceefce7fd3e", "d17cec0b992b1434f5f9df9986563605a4d1b1acd5574c87fc2ac014bcbd3316", "dc91f6129953861a73d9a65c52a8dd682b561a9ebaf65283541645cab6489917", "e2f4cbd1760d2bf2f30e396c2301999aab0191aec031a6a8a04950b2f575a536", "f192e6d3556714105c10486bbd6d045e38a0c04d9da3cef21e0a8dfd8e162df4", "f775b07026af2b1b0b5a8b05e41571cdcf3a315a67df265d60af301656a5425b", "f969ec7f56ba9636679e69ca07fba548312ccaca37412ee823c7f413541ad7e0", "f9dc52cd70907aafb99a773b66b156f2f995c7a0d284397c487c8b71ddbef2f9", "fc7212e36ebeb81aebf7949c92897b622490d7c0e333a479c0395591e7994600"] +celery = ["4c4532aa683f170f40bd76f928b70bc06ff171a959e06e71bf35f2f9d6031ef9", "528e56767ae7e43a16cfef24ee1062491f5754368d38fcfffa861cdb9ef219be"] +certifi = ["e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", "fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"] +cffi = ["00d890313797d9fe4420506613384b43099ad7d2b905c0752dbcc3a6f14d80fa", "0cf9e550ac6c5e57b713437e2f4ac2d7fd0cd10336525a27224f5fc1ec2ee59a", "0ea23c9c0cdd6778146a50d867d6405693ac3b80a68829966c98dd5e1bbae400", "193697c2918ecdb3865acf6557cddf5076bb39f1f654975e087b67efdff83365", "1ae14b542bf3b35e5229439c35653d2ef7d8316c1fffb980f9b7647e544baa98", "1e389e069450609c6ffa37f21f40cce36f9be7643bbe5051ab1de99d5a779526", "263242b6ace7f9cd4ea401428d2d45066b49a700852334fd55311bde36dcda14", "33142ae9807665fa6511cfa9857132b2c3ee6ddffb012b3f0933fc11e1e830d5", "364f8404034ae1b232335d8c7f7b57deac566f148f7222cef78cf8ae28ef764e", "47368f69fe6529f8f49a5d146ddee713fc9057e31d61e8b6dc86a6a5e38cecc1", "4895640844f17bec32943995dc8c96989226974dfeb9dd121cc45d36e0d0c434", "558b3afef987cf4b17abd849e7bedf64ee12b28175d564d05b628a0f9355599b", "5ba86e1d80d458b338bda676fd9f9d68cb4e7a03819632969cf6d46b01a26730", "63424daa6955e6b4c70dc2755897f5be1d719eabe71b2625948b222775ed5c43", "6381a7d8b1ebd0bc27c3bc85bc1bfadbb6e6f756b4d4db0aa1425c3719ba26b4", "6381ab708158c4e1639da1f2a7679a9bbe3e5a776fc6d1fd808076f0e3145331", "6fd58366747debfa5e6163ada468a90788411f10c92597d3b0a912d07e580c36", "728ec653964655d65408949b07f9b2219df78badd601d6c49e28d604efe40599", "7cfcfda59ef1f95b9f729c56fe8a4041899f96b72685d36ef16a3440a0f85da8", "819f8d5197c2684524637f940445c06e003c4a541f9983fd30d6deaa2a5487d8", "825ecffd9574557590e3225560a8a9d751f6ffe4a49e3c40918c9969b93395fa", "9009e917d8f5ef780c2626e29b6bc126f4cb2a4d43ca67aa2b40f2a5d6385e78", "9c77564a51d4d914ed5af096cd9843d90c45b784b511723bd46a8a9d09cf16fc", "a19089fa74ed19c4fe96502a291cfdb89223a9705b1d73b3005df4256976142e", "a40ed527bffa2b7ebe07acc5a3f782da072e262ca994b4f2085100b5a444bbb2", "bb75ba21d5716abc41af16eac1145ab2e471deedde1f22c6f99bd9f995504df0", "e22a00c0c81ffcecaf07c2bfb3672fa372c50e2bd1024ffee0da191c1b27fc71", "e55b5a746fb77f10c83e8af081979351722f6ea48facea79d470b3731c7b2891", "ec2fa3ee81707a5232bf2dfbd6623fdb278e070d596effc7e2d788f2ada71a05", "fd82eb4694be712fcae03c717ca2e0fc720657ac226b80bbb597e971fc6928c2"] +chardet = ["84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"] +click = ["2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"] +colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"] +coverage = ["007eeef7e23f9473622f7d94a3e029a45d55a92a1f083f0f3512f5ab9a669b05", "0388c12539372bb92d6dde68b4627f0300d948965bbb7fc104924d715fdc0965", "079248312838c4c8f3494934ab7382a42d42d5f365f0cf7516f938dbb3f53f3f", "17307429935f96c986a1b1674f78079528833410750321d22b5fb35d1883828e", "1afccd7e27cac1b9617be8c769f6d8a6d363699c9b86820f40c74cfb3328921c", "2ad357d12971e77360034c1596011a03f50c0f9e1ecd12e081342b8d1aee2236", "2b4d7f03a8a6632598cbc5df15bbca9f778c43db7cf1a838f4fa2c8599a8691a", "2e1a5c6adebb93c3b175103c2f855eda957283c10cf937d791d81bef8872d6ca", "309d91bd7a35063ec7a0e4d75645488bfab3f0b66373e7722f23da7f5b0f34cc", "358d635b1fc22a425444d52f26287ae5aea9e96e254ff3c59c407426f44574f4", "3f4d0b3403d3e110d2588c275540649b1841725f5a11a7162620224155d00ba2", "43a155eb76025c61fc20c3d03b89ca28efa6f5be572ab6110b2fb68eda96bfea", "493082f104b5ca920e97a485913de254cbe351900deed72d4264571c73464cd0", "4c4f368ffe1c2e7602359c2c50233269f3abe1c48ca6b288dcd0fb1d1c679733", "5ff16548492e8a12e65ff3d55857ccd818584ed587a6c2898a9ebbe09a880674", "66f393e10dd866be267deb3feca39babba08ae13763e0fc7a1063cbe1f8e49f6", "700d7579995044dc724847560b78ac786f0ca292867447afda7727a6fbaa082e", "81912cfe276e0069dca99e1e4e6be7b06b5fc8342641c6b472cb2fed7de7ae18", "82cbd3317320aa63c65555aa4894bf33a13fb3a77f079059eb5935eea415938d", "845fddf89dca1e94abe168760a38271abfc2e31863fbb4ada7f9a99337d7c3dc", "87d942863fe74b1c3be83a045996addf1639218c2cb89c5da18c06c0fe3917ea", "9721f1b7275d3112dc7ccf63f0553c769f09b5c25a26ee45872c7f5c09edf6c1", "a4497faa4f1c0fc365ba05eaecfb6b5d24e3c8c72e95938f9524e29dadb15e76", "a7cfaebd8f24c2b537fa6a271229b051cdac9c1734bb6f939ccfc7c055689baa", "ab3508df9a92c1d3362343d235420d08e2662969b83134f8a97dc1451cbe5e84", "b0059630ca5c6b297690a6bf57bf2fdac1395c24b7935fd73ee64190276b743b", "b6cebae1502ce5b87d7c6f532fa90ab345cfbda62b95aeea4e431e164d498a3d", "bd4800e32b4c8d99c3a2c943f1ac430cbf80658d884123d19639bcde90dad44a", "cdd92dd9471e624cd1d8c1a2703d25f114b59b736b0f1f659a98414e535ffb3d", "d00e29b78ff610d300b2c37049a41234d48ea4f2d2581759ebcf67caaf731c31", "d1ee76f560c3c3e8faada866a07a32485445e16ed2206ac8378bd90dadffb9f0", "dd707a21332615108b736ef0b8513d3edaf12d2a7d5fc26cd04a169a8ae9b526", "e3ba9b14607c23623cf38f90b23f5bed4a3be87cbfa96e2e9f4eabb975d1e98b", "e9a0e1caed2a52f15c96507ab78a48f346c05681a49c5b003172f8073da6aa6b", "eea9135432428d3ca7ee9be86af27cb8e56243f73764a9b6c3e0bda1394916be", "f29841e865590af72c4b90d7b5b8e93fd560f5dea436c1d5ee8053788f9285de", "f3a5c6d054c531536a83521c00e5d4004f1e126e2e2556ce399bef4180fbe540", "f87f522bde5540d8a4b11df80058281ac38c44b13ce29ced1e294963dd51a8f8", "f8c55dd0f56d3d618dfacf129e010cbe5d5f94b6951c1b2f13ab1a2f79c284da", "f98b461cb59f117887aa634a66022c0bd394278245ed51189f63a036516e32de"] +cycler = ["1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", "cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"] +decorator = ["86156361c50488b84a3f148056ea716ca587df2f0de1d34750d35c21312725de", "f069f3a01830ca754ba5258fde2278454a0b5b79e0d7f5c13b3b97e57d4acff6"] +defusedxml = ["6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93", "f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"] +django = ["4025317ca01f75fc79250ff7262a06d8ba97cd4f82e93394b2a0a6a4a925caeb", "a8ca1033acac9f33995eb2209a6bf18a4681c3e5269a878e9a7e0b7384ed1ca3"] +django-appconf = ["35f13ca4d567f132b960e2cd4c832c2d03cb6543452d34e29b7ba10371ba80e3", "c98a7af40062e996b921f5962a1c4f3f0c979fa7885f7be4710cceb90ebe13a6"] +django-autocomplete-light = ["29ce2626a11eab2333e5aa9f95166a6d4400f11b5a05e8f23fa77017b1a9089a"] +django-celery-beat = ["61c92d4b600a9f24406ee0b8d01a9b192253e15d047e3325e1d81e2cacf7aba6", "659b39232c454ac27022bf679939bce0471fd482f3ee9276f5199716cb4afad9"] +django-celery-email = ["02694114f8a4e4b363cfae48b960473396899cae08351e29b0c5e431d647ef9e", "83ad3d4edfccbcdeb8319314ed8c36cf2d017bbb02cae8b459bf6678a804ea44"] +django-celery-results = ["932277e9382528f74778b30cf90e17941cba577b7d73cee09ed55e4972972c32", "e735dc3e705a0e21afc3b6fa2918ec388258145fcbaad3727c493c5707d25034"] +django-compat = ["3ac9a3bedc56b9365d9eb241bc5157d0c193769bf995f9a78dc1bc24e7c2331b"] +django-cors-headers = ["5762ec9c2d59f38c76828dc1d4308baca4bc0d3e1d6f217683e7a24a1c4611a3", "ee02f4b699e9b6645602a46d0adb430ee940a1bf8df64f77e516f8d7711fee60"] +django-countries = ["1cefad9ec804d6a0318b91c5394b5aef00336755928f44d0a6420507719d65c8", "22e96236101783cfe5222ef5174972242a7e8176336d119a4dc111aedce35897"] +django-crispy-forms = ["0320b303420fec9ce94e045321dfd180ca0e31e0336211c5d30ad0bda5ebbb5d", "22b6634e3a6316623e4eb062527fe5be5f97ea8b83020c65bcd8fac4747807b5"] +django-debug-toolbar = ["17c53cd6bf4e7d69902aedf9a1d26c5d3b7369b54c5718744704f27b5a72f35d", "9a23ada2e43cd989195db3c18710b5d7451134a0d48127ab64c1d2ad81700342"] +django-extensions = ["a9db7c56a556d244184f589f2437b4228de86ee45e5ebb837fb20c6d54e95ea5", "b58320d3fe3d6ae7d1d8e38959713fa92272f4921e662d689058d942a5b444f7"] +django-favicon-plus = ["3394a951d8dc611eb1ea027ad1181d7f650ca234506585b27e93d7ed06b981bf"] +django-guardian = ["8cf4efd67a863eb32beafd4335a38ffb083630f8ab2045212d27f8f9c3abe5a6", "e638c9a23eeac534bb68b133975539ed8782f733ab6f35c0b23b4c39cd06b1bb"] +django-select2 = ["ad12132e764ce8099bc2746e6af2f33a952b49eb63f3b062eb4739cd4304ee2f", "e4beb0e4af27f71e9e2e2f52441aecdb24d401942f18a0375031767cd0e2e5a0"] +django-simple-history = ["7273add61d3f89453c475531627f8c69cbfc41d6fb99d45278dddc3bafe39284", "7f3044439e401fb02b12231b675590865a27a149f6bd99587e429cbe6a9dd6a6"] +django-speedinfo = ["9a4fe0e5709a7017a13371a1f73cce1fe3b3d1425c5887337e938567bd5db0e0", "c515d3eadee2c3249a7abbf51fed7f80bf517c5029a89a0737a508b98030b7c2"] +django-storages = ["87287b7ad2e789cd603373439994e1ac6f94d9dc2e5f8173d2a87aa3ed458bd9", "f3b3def96493d3ccde37b864cea376472baf6e8a596504b209278801c510b807"] +django-summernote = ["7e2a7cfa806dba508aceee872a7a556b0f86ebcc176f9c3951d4ae56871de609"] +django-timezone-field = ["1a7bbcf984ae191c6dfe713994b4ff4062dc21e47a909356c93e76d027c87c8f", "a25af66b86d13709aa8c69a361c1ea68322cda64b5bbf9141fb67b8b44aa4e43"] +django-userena-ce = ["33eb5c5105f06cdf2635d7758b809fe2906981acba476ba08fda9cb2d2708c87", "75486a0a6d9b9a79cceaccd204593391e513814fb1a9d01d762c600455f00293"] +djangorestframework = ["5488aed8f8df5ec1d70f04b2114abc52ae6729748a176c453313834a9ee179c8", "dc81cbf9775c6898a580f6f1f387c4777d12bd87abf0f5406018d32ccae71090"] +djangorestframework-guardian = ["1883756452d9bfcc2a51fb4e039a6837a8f6697c756447aa83af085749b59330", "3bd3dd6ea58e1bceca5048faf6f8b1a93bb5dcff30ba5eb91b9a0e190a48a0c7"] +docker = ["6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1", "8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7"] +docutils = ["6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", "9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", "a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"] +easy-thumbnails = ["23fbe3415c93b2369ece8ebdfb5faa05540943bef8b941b3118ce769ba95e275"] +entrypoints = ["589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", "c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"] +execnet = ["cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50", "d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"] +factory-boy = ["728df59b372c9588b83153facf26d3d28947fc750e8e3c95cefa9bed0e6394ee", "faf48d608a1735f0d0a3c9cbf536d64f9132b547dae7ba452c4d99a79e84a370"] +faker = ["5902379d8df308a204fc11c4f621590ee83975805a6c7b2228203b9defa45250", "5e8c755c619f332d5ec28b7586389665f136bcf528e165eb925e87c06a63eda7"] +gunicorn = ["aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", "fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3"] +html2text = ["55ce85704f244fc18890c5ded89fa22ff7333e41e9f3cad04d51f48d62ad8834", "6f56057c5c2993b5cc5b347cb099bdf6d095828fef1b53ef4e2a2bf2a1be9b4f"] +httplib2 = ["34537dcdd5e0f2386d29e0e2c6d4a1703a3b982d34c198a5102e6e5d6194b107", "409fa5509298f739b34d5a652df762cb0042507dc93f6633e306b11289d6249d"] +idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] +imagecodecs = ["11c3df3a8e83a69222b2c5a43f7e55d18de418ffe59a82722de887111b77f958", "1d8eb81829bf68b8621967f81bf0a268f8dd9d1d0c6f3db3f6ead89b6900fae4", "23283b7d8697a5af707812a1752a8eb6f3a9c3af2a2ffb0da70af4bfd9a4fc2a", "2709035dd4c0b81a0981919396edcbd864a0d6aeb4974fd2b55b5a464aaf8a2c", "2f53f39a68814e631fad9aadb44db09dd41c382523be7379ede5ea1e47823eac", "5a5a6e6ec552e8f0d57a6821ab64c82b05da8724376d9691829524bc58a67dbf", "5bbb6d87bdf8a24298436f0fa343f40f6a0f342d6883fe0b3c772572bcde4a7c", "7cd689549b41dc979fed1a26e668a080e67b276b029d179ae3a9eb27a0187719", "b658b6d40c03e7056e61ecf6127a4fe2316e40601a47b11dc8c62c6d88d027cb", "b9ba45bad9417e0a9e2e0b26c894624baee56110a2a8d3305b0a411946d7732b", "dcef710e9585b8914082c7e3d1de501ac4af5e46e6523638698e4b7095f1f18d", "e3f62a77e0332cb58b5799730ced1f807df14d5a49ec8ed665d17d9a5a2473c1", "e6cfd730f3a07fa5cc72a24df080bdc581f9d5118745db6b60270364cb4b22dd"] +imagesize = ["3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", "f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5"] +importlib-metadata = ["aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", "d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"] +ipython-genutils = ["72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8", "eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"] +jinja2 = ["74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", "9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"] +jmespath = ["3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6", "bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"] +jsonschema = ["2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f", "94c0a13b4a0616458b42529091624e66700a17f847453e52279e35509a5b7631"] +jupyter-core = ["464769f7387d7a62a2403d067f1ddc616655b7f77f5d810c0dd62cb54bfd0fb9", "a183e0ec2e8f6adddf62b0a3fc6a2237e3e0056d381e536d3e7c7ecc3067e244"] +kiwisolver = ["05b5b061e09f60f56244adc885c4a7867da25ca387376b02c1efc29cc16bcd0f", "26f4fbd6f5e1dabff70a9ba0d2c4bd30761086454aa30dddc5b52764ee4852b7", "3b2378ad387f49cbb328205bda569b9f87288d6bc1bf4cd683c34523a2341efe", "400599c0fe58d21522cae0e8b22318e09d9729451b17ee61ba8e1e7c0346565c", "47b8cb81a7d18dbaf4fed6a61c3cecdb5adec7b4ac292bddb0d016d57e8507d5", "53eaed412477c836e1b9522c19858a8557d6e595077830146182225613b11a75", "58e626e1f7dfbb620d08d457325a4cdac65d1809680009f46bf41eaf74ad0187", "5a52e1b006bfa5be04fe4debbcdd2688432a9af4b207a3f429c74ad625022641", "5c7ca4e449ac9f99b3b9d4693debb1d6d237d1542dd6a56b3305fe8a9620f883", "682e54f0ce8f45981878756d7203fd01e188cc6c8b2c5e2cf03675390b4534d5", "79bfb2f0bd7cbf9ea256612c9523367e5ec51d7cd616ae20ca2c90f575d839a2", "7f4dd50874177d2bb060d74769210f3bce1af87a8c7cf5b37d032ebf94f0aca3", "8944a16020c07b682df861207b7e0efcd2f46c7488619cb55f65882279119389", "8aa7009437640beb2768bfd06da049bad0df85f47ff18426261acecd1cf00897", "939f36f21a8c571686eb491acfffa9c7f1ac345087281b412d63ea39ca14ec4a", "9733b7f64bd9f807832d673355f79703f81f0b3e52bfce420fc00d8cb28c6a6c", "a02f6c3e229d0b7220bd74600e9351e18bc0c361b05f29adae0d10599ae0e326", "a0c0a9f06872330d0dd31b45607197caab3c22777600e88031bfe66799e70bb0", "acc4df99308111585121db217681f1ce0eecb48d3a828a2f9bbf9773f4937e9e", "b64916959e4ae0ac78af7c3e8cef4becee0c0e9694ad477b4c6b3a536de6a544", "d3fcf0819dc3fea58be1fd1ca390851bdb719a549850e708ed858503ff25d995", "d52e3b1868a4e8fd18b5cb15055c76820df514e26aa84cc02f593d99fef6707f", "db1a5d3cc4ae943d674718d6c47d2d82488ddd94b93b9e12d24aabdbfe48caee", "e3a21a720791712ed721c7b95d433e036134de6f18c77dbe96119eaf7aa08004", "e8bf074363ce2babeb4764d94f8e65efd22e6a7c74860a4f05a6947afc020ff2", "f16814a4a96dc04bf1da7d53ee8d5b1d6decfc1a92a63349bb15d37b6a263dd9", "f2b22153870ca5cf2ab9c940d7bc38e8e9089fa0f7e5856ea195e1cf4ff43d5a", "f790f8b3dff3d53453de6a7b7ddd173d2e020fb160baff578d578065b108a05f"] +kombu = ["31edb84947996fdda065b6560c128d5673bb913ff34aa19e7b84755217a24deb", "c9078124ce2616b29cf6607f0ac3db894c59154252dee6392cdbbe15e5c4b566"] +livereload = ["78d55f2c268a8823ba499305dcac64e28ddeb9a92571e12d543cd304faf5817b", "89254f78d7529d7ea0a3417d224c34287ebfe266b05e67e51facaf82c27f0f66"] +markupsafe = ["00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", "09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", "09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", "43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", "535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", "62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", "717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", "88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", "98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", "ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", "b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", "ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", "c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", "cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"] +matplotlib = ["1febd22afe1489b13c6749ea059d392c03261b2950d1d45c17e3aed812080c93", "31a30d03f39528c79f3a592857be62a08595dec4ac034978ecd0f814fa0eec2d", "4442ce720907f67a79d45de9ada47be81ce17e6c2f448b3c64765af93f6829c9", "796edbd1182cbffa7e1e7a97f1e141f875a8501ba8dd834269ae3cd45a8c976f", "934e6243df7165aad097572abf5b6003c77c9b6c480c3c4de6f2ef1b5fdd4ec0", "bab9d848dbf1517bc58d1f486772e99919b19efef5dd8596d4b26f9f5ee08b6b", "c1fe1e6cdaa53f11f088b7470c2056c0df7d80ee4858dadf6cbe433fcba4323b", "e5b8aeca9276a3a988caebe9f08366ed519fff98f77c6df5b64d7603d0e42e36", "ec6bd0a6a58df3628ff269978f4a4b924a0d371ad8ce1f8e2b635b99e482877a"] +mistune = ["59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e", "88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"] +more-itertools = ["409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", "92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"] +nbconvert = ["427a468ec26e7d68a529b95f578d5cbf018cb4c1f889e897681c2b6d11897695", "48d3c342057a2cf21e8df820d49ff27ab9f25fc72b8f15606bd47967333b2709"] +nbformat = ["b9a0dbdbd45bb034f4f8893cafd6f652ea08c8c1674ba83f2dc55d3955743b0b", "f7494ef0df60766b7cabe0a3651556345a963b74dbc16bc7c18479041170d402"] +numpy = ["0b0dd8f47fb177d00fa6ef2d58783c4f41ad3126b139c91dd2f7c4b3fdf5e9a5", "25ffe71f96878e1da7e014467e19e7db90ae7d4e12affbc73101bcf61785214e", "26efd7f7d755e6ca966a5c0ac5a930a87dbbaab1c51716ac26a38f42ecc9bc4b", "28b1180c758abf34a5c3fea76fcee66a87def1656724c42bb14a6f9717a5bdf7", "2e418f0a59473dac424f888dd57e85f77502a593b207809211c76e5396ae4f5c", "30c84e3a62cfcb9e3066f25226e131451312a044f1fe2040e69ce792cb7de418", "4650d94bb9c947151737ee022b934b7d9a845a7c76e476f3e460f09a0c8c6f39", "4dd830a11e8724c9c9379feed1d1be43113f8bcce55f47ea7186d3946769ce26", "4f2a2b279efde194877aff1f76cf61c68e840db242a5c7169f1ff0fd59a2b1e2", "62d22566b3e3428dfc9ec972014c38ed9a4db4f8969c78f5414012ccd80a149e", "669795516d62f38845c7033679c648903200980d68935baaa17ac5c7ae03ae0c", "75fcd60d682db3e1f8fbe2b8b0c6761937ad56d01c1dc73edf4ef2748d5b6bc4", "9395b0a41e8b7e9a284e3be7060db9d14ad80273841c952c83a5afc241d2bd98", "9e37c35fc4e9410093b04a77d11a34c64bf658565e30df7cbe882056088a91c1", "a0678793096205a4d784bd99f32803ba8100f639cf3b932dc63b21621390ea7e", "b46554ad4dafb2927f88de5a1d207398c5385edbb5c84d30b3ef187c4a3894d8", "c867eeccd934920a800f65c6068acdd6b87e80d45cd8c8beefff783b23cdc462", "dd0667f5be56fb1b570154c2c0516a528e02d50da121bbbb2cbb0b6f87f59bc2", "de2b1c20494bdf47f0160bd88ed05f5e48ae5dc336b8de7cfade71abcc95c0b9", "f1df7b2b7740dd777571c732f98adb5aad5450aee32772f1b39249c8a50386f6", "ffca69e29079f7880c5392bf675eb8b4146479d976ae1924d01cd92b04cccbcc"] +oauth2 = ["15b5c42301f46dd63113f1214b0d81a8b16254f65a86d3c32a1b52297f3266e6", "c006a85e7c60107c7cc6da1b184b5c719f6dd7202098196dfa6e55df669b59bf"] +oauthlib = ["bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889", "df884cd6cbe20e32633f1db1072e9356f53638e4361bef4e8b03c9127c9328ea"] +packaging = ["28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", "d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"] +pandocfilters = ["b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9"] +pathtools = ["7c35c5421a39bb82e58018febd90e3b6e5db34c5443aaaf742b3f33d4655f1c0"] +pillow = ["047d9473cf68af50ac85f8ee5d5f21a60f849bc17d348da7fc85711287a75031", "0f66dc6c8a3cc319561a633b6aa82c44107f12594643efa37210d8c924fc1c71", "12c9169c4e8fe0a7329e8658c7e488001f6b4c8e88740e76292c2b857af2e94c", "248cffc168896982f125f5c13e9317c059f74fffdb4152893339f3be62a01340", "27faf0552bf8c260a5cee21a76e031acaea68babb64daf7e8f2e2540745082aa", "285edafad9bc60d96978ed24d77cdc0b91dace88e5da8c548ba5937c425bca8b", "384b12c9aa8ef95558abdcb50aada56d74bc7cc131dd62d28c2d0e4d3aadd573", "38950b3a707f6cef09cd3cbb142474357ad1a985ceb44d921bdf7b4647b3e13e", "4aad1b88933fd6dc2846552b89ad0c74ddbba2f0884e2c162aa368374bf5abab", "4ac6148008c169603070c092e81f88738f1a0c511e07bd2bb0f9ef542d375da9", "4deb1d2a45861ae6f0b12ea0a786a03d19d29edcc7e05775b85ec2877cb54c5e", "59aa2c124df72cc75ed72c8d6005c442d4685691a30c55321e00ed915ad1a291", "5a47d2123a9ec86660fe0e8d0ebf0aa6bc6a17edc63f338b73ea20ba11713f12", "5cc901c2ab9409b4b7ac7b5bcc3e86ac14548627062463da0af3b6b7c555a871", "6c1db03e8dff7b9f955a0fb9907eb9ca5da75b5ce056c0c93d33100a35050281", "7ce80c0a65a6ea90ef9c1f63c8593fcd2929448613fc8da0adf3e6bfad669d08", "809c19241c14433c5d6135e1b6c72da4e3b56d5c865ad5736ab99af8896b8f41", "83792cb4e0b5af480588601467c0764242b9a483caea71ef12d22a0d0d6bdce2", "846fa202bd7ee0f6215c897a1d33238ef071b50766339186687bd9b7a6d26ac5", "9f5529fc02009f96ba95bea48870173426879dc19eec49ca8e08cd63ecd82ddb", "a423c2ea001c6265ed28700df056f75e26215fd28c001e93ef4380b0f05f9547", "ac4428094b42907aba5879c7c000d01c8278d451a3b7cccd2103e21f6397ea75", "b1ae48d87f10d1384e5beecd169c77502fcc04a2c00a4c02b85f0a94b419e5f9", "bf4e972a88f8841d8fdc6db1a75e0f8d763e66e3754b03006cbc3854d89f1cb1", "c6414f6aad598364aaf81068cabb077894eb88fed99c6a65e6e8217bab62ae7a", "c710fcb7ee32f67baf25aa9ffede4795fd5d93b163ce95fdc724383e38c9df96", "c7be4b8a09852291c3c48d3c25d1b876d2494a0a674980089ac9d5e0d78bd132", "c9e5ffb910b14f090ac9c38599063e354887a5f6d7e6d26795e916b4514f2c1a", "e0697b826da6c2472bb6488db4c0a7fa8af0d52fa08833ceb3681358914b14e5", "e9a3edd5f714229d41057d56ac0f39ad9bdba6767e8c888c951869f0bdd129b0"] +pkgconfig = ["97bfe3d981bab675d5ea3ef259045d7919c93897db7d3b59d4e8593cba8d354f", "cddf2d7ecadb272178a942eb852a9dee46bda2adcc36c3416b0fef47a4ed9f38"] +pluggy = ["0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", "fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"] +port-for = ["b16a84bb29c2954db44c29be38b17c659c9c27e33918dec16b90d375cc596f1c"] +psycopg2 = ["47fc642bf6f427805daf52d6e52619fe0637648fe27017062d898f3bf891419d", "72772181d9bad1fa349792a1e7384dde56742c14af2b9986013eb94a240f005b", "8396be6e5ff844282d4d49b81631772f80dabae5658d432202faf101f5283b7c", "893c11064b347b24ecdd277a094413e1954f8a4e8cdaf7ffbe7ca3db87c103f0", "965c4c93e33e6984d8031f74e51227bd755376a9df6993774fd5b6fb3288b1f4", "9ab75e0b2820880ae24b7136c4d230383e07db014456a476d096591172569c38", "b0845e3bdd4aa18dc2f9b6fb78fbd3d9d371ad167fd6d1b7ad01c0a6cdad4fc6", "dca2d7203f0dfce8ea4b3efd668f8ea65cd2b35112638e488a4c12594015f67b", "ed686e5926929887e2c7ae0a700e32c6129abb798b4ad2b846e933de21508151", "ef6df7e14698e79c59c7ee7cf94cd62e5b869db369ed4b1b8f7b729ea825712a", "f898e5cc0a662a9e12bde6f931263a1bbd350cfb18e1d5336a12927851825bb6"] +py = ["64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"] +pycparser = ["a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"] +pygments = ["71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127", "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"] +pyjwt = ["5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", "8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"] +pyparsing = ["6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", "d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"] +pypiwin32 = ["67adf399debc1d5d14dffc1ab5acacb800da569754fafdc576b2a039485aa775", "71be40c1fbd28594214ecaecb58e7aa8b708eabfa0125c8a109ebd51edbd776a"] +pyrsistent = ["34b47fa169d6006b32e99d4b3c4031f155e6e68ebcc107d6454852e8e0ee6533"] +pytest = ["7e4800063ccfc306a53c461442526c5571e1462f61583506ce97e4da6a1d88c8", "ca563435f4941d0cb34767301c27bc65c510cb82e90b9ecf9cb52dc2c63caaa0"] +pytest-cov = ["cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b", "cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"] +pytest-django = ["497e8d967d2ec82b3388267b2f1f037761ff34c10ebb13c534d8c5804846e4eb", "b6c900461a6a7c450dcf11736cabc289a90f5d6f28ef74c46e32e86ffd16a4bd"] +pytest-forked = ["1805699ed9c9e60cb7a8179b8d4fa2b8898098e82d229b0825d8095f0f261100", "1ae25dba8ee2e56fb47311c9638f9e58552691da87e82d25b0ce0e4bf52b7d87"] +pytest-mock = ["b3514caac35fe3f05555923eabd9546abce11571cc2ddf7d8615959d04f2c89e", "ea502c3891599c26243a3a847ccf0b1d20556678c528f86c98e3cd6d40c5cf11"] +pytest-xdist = ["5d1b1d4461518a6023d56dab62fb63670d6f7537f23e2708459a557329accf48", "a8569b027db70112b290911ce2ed732121876632fb3f40b1d39cd2f72f58b147"] +python-crontab = ["3ac1608ff76032e6fc6e16b5fbf83b51557e0e066bf78e9f88571571e7bd7ae6"] +python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"] +python-magic = ["f2674dcfad52ae6c49d4803fa027809540b130db1dec928cfbb9240316831375", "f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5"] +python-memcached = ["4dac64916871bd3550263323fc2ce18e1e439080a2d5670c594cf3118d99b594", "a2e28637be13ee0bf1a8b6843e7490f9456fd3f2a4cb60471733c7b5d5557e4f"] +python3-openid = ["0086da6b6ef3161cfe50fb1ee5cceaf2cda1700019fda03c2c5c440ca6abe4fa", "628d365d687e12da12d02c6691170f4451db28d6d68d050007e4a40065868502"] +pytz = ["1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d", "b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"] +pyupgrade = ["a0db809531fafa686d3eb567d8bf7f5119f8d1bfa076d2a2ffa87e20d5aafd4e", "ddf33de4ab2d9b7f7eb9aae540a9cf0e576b2bbb82eb24db5be7eb610bbcbad9"] +pyvips = ["8992acde85331c08bf4cd0b8213d99bc65c523fc67eade93820d600de138ad04"] +pywin32 = ["0443e9bb196e72480f50cbddc2cf98fbb858a77d02e281ba79489ea3287b36e9", "09bbe7cdb29eb40ab2e83f7a232eeeedde864be7a0622b70a90f456aad07a234", "0d8e0f47808798d320c983574c36c49db642678902933a210edd40157d206fd0", "0db7c9f4b93528afd080d35912a60be2f86a1d6c49c0a9cf9cedd106eed81ea3", "749e590875051661ecefbd9dfa957a485016de0f25e43f5e70f888ef1e29587b", "779d3e9d4b934f2445d2920c3941416d99af72eb7f7fd57a63576cc8aa540ad6", "7c89d2c11a31c7aaa16dc4d25054d7e0e99d6f6b24193cf62c83850484658c87", "81f7732b662c46274d7d8c411c905d53e71999cba95457a0686467c3ebc745ca", "9db1fb8830bfa99c5bfd335d4482c14db5c6f5028db3b006787ef4200206242b", "bd8d04835db28646d9e07fd0ab7c7b18bd90e89dfdc559e60389179495ef30da", "fc6822a68afd79e97b015985dd455767c72009b81bcd18957068626c43f11e75", "fe6cfc2045931866417740b575231c7e12d69d481643be1493487ad53b089959"] +pyyaml = ["0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", "01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", "5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", "5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", "7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", "7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", "87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", "9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", "a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", "b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", "b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", "bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", "f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8"] +redis = ["3613daad9ce5951e426f460deddd5caf469e08a3af633e9578fc77d362becf62", "8d0fc278d3f5e1249967cba2eb4a5632d19e45ce5c09442b8422d15ee2c22cc2"] +requests = ["11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", "9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"] +requests-file = ["75c175eed739270aec3c5279ffd74e6527dada275c5c0d76b5817e9c86bb7dea", "8f04aa6201bacda0567e7ac7f677f1499b0fc76b22140c54bc06edf1ba92e2fa"] +requests-oauthlib = ["bd6533330e8748e94bf0b214775fed487d309b8b8fe823dc45641ebcd9a32f57", "d3ed0c8f2e3bbc6b344fa63d6f933745ab394469da38db16bdddb461c7e25140", "dd5a0499abfefd087c6dd96693cbd5bfd28aa009719a7f85ab3fabe3956ef19a"] +s3transfer = ["6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", "b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba"] +sentry-sdk = ["7d8668f082cb1eb9bf1e0d3f8f9bd5796d05d927c1197af226d044ed32b9815f", "ff14935cc3053de0650128f124c36f34a4be120b8cc522c149f5cba342c1fd05"] +simpleitk = ["145f3f3c9444e0bd1b93d0941114744fab856c13c76749cad0f6dcc884b391da", "1fa1a0b05ef66bb4fd2d83bc41950271f62aad906050359303bfdf9efdcedb7a", "21e7418c325f8a2e26bda293323376783bcb77ec67e8c58a523468c08d6cf2ad", "2334392e4aaefbaf7190fd9ce9387133d1a17a716e686b1011d5bd2f2b707c81", "26c9064d2faba621788eebc8369cb7056439b82c1f5ed040b3b46adfacc9965c", "2c2a2cc75342ffd5e87be649b5ea8c0b9aa0e2ef9e875b6cce884409aae32335", "3125ffb75aab69106e6989267319ac9bb4d10d204c89b154c0bf15f8647ef55e", "34bf2a2e95f9dea5e1479ffb829c353987c8abc08c63c1bc8967d492b7e437ca", "495de11bb8430ab3c9ad1e437fc3f41c465daa4bf52ef57656b31b76f46a176e", "580dce0d1405a0d66751c552d64210a276752c6ab852251d8adb098deb5b4484", "5a7835a033a73a26c61837a9a37d8905a28863a9da8c9e9dc1bef114f57e69c9", "662bde72be9011851875ea0ba6cb60803ba133b47ac8e2921a044570ba68f9c4", "755374b6f185ffb580933eacf419d173561bb20c2c5f684352ec907cefac856b", "8658061ef6ba09301bbf9323b5e17fff38ece95546761688403612f0b3ecfb2a", "89d1ff555363585a6049ae495b781f2d41af997417c68289c1e18d8c24796460", "8f357e16c99c3badec21a67cec05de7620873c49c57551e1aedec6afa469b3f6", "97ffaca00d104119cff5f200b27e3f28977d52ae077fcd2b51d09fe34b251a8f", "9ec92ad09c420ab9fee50ca2cff3bdf959aa33244aba9116f9156d64468adc2f", "b87c0d7abbb13b65bd4f65755a964bcdd618ee5c1913526faa0bd7ed1837e9df", "ba8435e0371533a60d4db52a2d8233cd29ff54d78bb08140fcef117d7d77210b", "bae18c99e83052ba668ccbcdc9d601d03329d0989bf2316872bd289a4f88aac3", "c9b03918e4cc82bbedebedd310d873ea383b99f1d5e3b8e1ab46ae18cd9d8c65", "cdc7bcc94acb7d1ea9ff760dac6bb4d67ab986dfda13d1c002cb5ac13eefe419", "d397c7585c5ce437aa3383c75f7d816aac2215be92b9c562d53c85d344199ed5", "d6d23b5d97e85d5bae57eb8d6cc252714cd3a785021589fdcd1e3371c6016f14", "f4c5ef5219d04f49e7ba9b00c6f5d1316f9b287c9fec839e2a614a8e43d08992"] +six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] +snowballstemmer = ["209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0", "df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"] +social-auth-app-django = ["6d0dd18c2d9e71ca545097d57b44d26f59e624a12833078e8e52f91baf849778", "9237e3d7b6f6f59494c3b02e0cce6efc69c9d33ad9d1a064e3b2318bcbe89ae3", "f151396e5b16e2eee12cd2e211004257826ece24fc4ae97a147df386c1cd7082"] +social-auth-core = ["47cd2458c8fefd02466b0c514643e02ad8b61d8b4b69f7573e80882e3a97b0f0", "8320666548a532eb158968eda542bbe1863682357c432d8c4e28034a7f1e3b58", "d81ed681e3c0722300b61a0792c5db5d21206793f95ca810f010c1cc931c8d89"] +sorl-thumbnail = ["8dfe5fda91a5047d1d35a0b9effe7b000764a01d648e15ca076f44e9c34b6dbd", "d9e3f018d19293824803e4ffead96b19dfcd44fa7987cea392f50436817bef34"] +soupsieve = ["605f89ad5fdbfefe30cdc293303665eff2d188865d4dbe4eb510bba1edfbfce3", "b91d676b330a0ebd5b21719cb6e9b57c57d433671f65b9c28dd3461d9a1ed0b6"] +sphinx = ["0d586b0f8c2fc3cc6559c5e8fd6124628110514fda0e5d7c82e682d749d2e845", "839a3ed6f6b092bb60f492024489cc9e6991360fb9f52ed6361acd510d261069"] +sphinx-autobuild = ["66388f81884666e3821edbe05dd53a0cfb68093873d17320d0610de8db28c74e", "e60aea0789cab02fa32ee63c7acae5ef41c06f1434d9fd0a74250a61f5994692"] +sphinx-autodoc-typehints = ["0d968ec3ee4f7fe7695ab6facf5cd2d74d3cea67584277458ad9b2788ebbcc3b", "8edca714fd3de8e43467d7e51dd3812fe999f8874408a639f7c38a9e1a5a4eb3"] +sphinx-rtd-theme = ["00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4", "728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"] +sphinxcontrib-applehelp = ["edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897", "fb8dee85af95e5c30c91f10e7eb3c8967308518e0f7488a2828ef7bc191d0d5d"] +sphinxcontrib-devhelp = ["6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34", "9512ecb00a2b0821a146736b39f7aeb90759834b07e81e8cc23a9c70bacb9981"] +sphinxcontrib-htmlhelp = ["4670f99f8951bd78cd4ad2ab962f798f5618b17675c35c5ac3b2132a14ea8422", "d4fd39a65a625c9df86d7fa8a2d9f3cd8299a3a4b15db63b50aac9e161d8eff7"] +sphinxcontrib-jsmath = ["2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"] +sphinxcontrib-qthelp = ["513049b93031beb1f57d4daea74068a4feb77aa5630f856fcff2e50de14e9a20", "79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f"] +sphinxcontrib-serializinghtml = ["c0efb33f8052c04fd7a26c0a07f1678e8512e0faec19f4aa8f2473a8b81d5227", "db6615af393650bf1151a6cd39120c29abaf93cc60db8c48eb2dddbfdc3a9768"] +sqlparse = ["40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177", "7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873"] +testpath = ["46c89ebb683f473ffe2aab0ed9f12581d4d078308a3cb3765d79c6b2317b0109", "b694b3d9288dbd81685c5d2e7140b81365d46c29f5db4bc659de5aa6b98780f8"] +text-unidecode = ["1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8", "bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"] +tifffile = ["645e3a427743b9a892c835bcc363b043e732489621d2b6de12933c82b591398c", "6b875c4342a55cbed8ef4af3aa9d7a06b02219089b9f5d7a457918dc73f61f9d"] +tldextract = ["16b2f7e81d89c2a5a914d25bdbddd3932c31a6b510db886c3ce0764a195c0ee7", "9aa21a1f7827df4209e242ec4fc2293af5940ec730cde46ea80f66ed97bfc808"] +tokenize-rt = ["2f44eee8f620102f8a03c50142795121faf86e020d208896ea7a7047bbe933cf", "53f5c22d36e5c6f8e3fdbc6cb4dd151d1b3d38cea1b85b5fef6268f153733899"] +toml = ["229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", "235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e", "f1db651f9657708513243e61e6cc67d101a39bad662eaa9b5546f789338e07a3"] +tornado = ["349884248c36801afa19e342a77cc4458caca694b0eda633f5878e458a44cb2c", "398e0d35e086ba38a0427c3b37f4337327231942e731edaa6e9fd1865bbd6f60", "4e73ef678b1a859f0cb29e1d895526a20ea64b5ffd510a2307b5998c7df24281", "559bce3d31484b665259f50cd94c5c28b961b09315ccd838f284687245f416e5", "abbe53a39734ef4aba061fca54e30c6b4639d3e1f59653f0da37a0003de148c7", "c845db36ba616912074c5b1ee897f8e0124df269468f25e4fe21fe72f6edd7a9", "c9399267c926a4e7c418baa5cbe91c7d1cf362d505a1ef898fde44a07c9dd8a5"] +traitlets = ["70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44", "d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7"] +urllib3 = ["06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b", "cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"] +vine = ["133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87", "ea4947cc56d1fd6f2095c8d543ee25dad966f78692528e68b4fada11ba3f98af"] +watchdog = ["965f658d0732de3188211932aeb0bb457587f04f63ab4c1e33eab878e9de961d"] +wcwidth = ["3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", "f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"] +webencodings = ["a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", "b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"] +websocket-client = ["1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9", "1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a"] +werkzeug = ["7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7", "e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4"] +whitenoise = ["22f79cf8f1f509639330f93886acaece8ec5ac5e9600c3b981d33c34e8a42dfd", "6dfea214b7c12efd689007abf9afa87a426586e9dbc051873ad2c8e535e2a1ac"] +zipp = ["3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", "f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"] diff --git a/pyproject.toml b/pyproject.toml index 7795d0bf61..26c1ad70ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,73 @@ +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" + [tool.black] line-length = 79 target-version = ['py37'] + +[tool.poetry] +name = "grand-challenge.org" +version = "0.1.0" +description = "" +authors = ["James Meakin <[email protected]>"] + +[tool.poetry.dependencies] +python = ">=3.7" +"beautifulsoup4" = "*" +celery = "*" +redis = "*" +django = "<2.3" +django-countries = "*" +django-crispy-forms = "*" +django-userena-ce = "*" +djangorestframework = "*" +docker = "*" +matplotlib = "*" +"oauth2" = "*" +python-magic = "*" +python-memcached = "*" +pytz = "*" +social-auth-app-django = "*" +gunicorn = "*" +django-celery-email = "*" +nbconvert = "*" +simpleitk = "*" +django-celery-beat = "*" +django-favicon-plus = "*" +"psycopg2" = "*" +"django-select2" = "*" +django-celery-results = "*" +django-summernote = "*" +bleach = "*" +jsonschema = "*" +tldextract = "*" +tifffile = "==2019.1.4" +sorl-thumbnail = "*" +django-autocomplete-light = "*" +django-storages = "*" +boto3 = "*" +whitenoise = "*" +brotli = "*" +djangorestframework-guardian = "*" +django-extensions = "*" +django-simple-history = "*" +sentry-sdk = "*" +django-cors-headers = "*" +pyvips = "*" +django-speedinfo = "*" + +[tool.poetry.dev-dependencies] +pytest-django = "*" +pytest-cov = "*" +pytest-mock = "*" +factory-boy = "*" +django-debug-toolbar = "*" +black = "==19.3b0" +sphinx-autobuild = "*" +sphinx = "*" +pyupgrade = "*" +pytest-xdist = "*" +sphinx-autodoc-typehints = "*" +werkzeug = "*" +sphinx-rtd-theme = "*"
pre-commit__pre-commit-1254
Running `pre-commit` 1.20.0 on Guix gives server certificate verification failed. CAfile: none CRLfile: none Running `pre-commit` 1.20.0 on Guix gives ``` An unexpected error has occurred: CalledProcessError: Command: ('/home/igankevich/.guix-profile/bin/git', 'fetch', 'origin', '--tags') Return code: 128 Expected return code: 0 Output: (none) Errors: fatal: unable to access 'https://github.com/pre-commit/pre-commit-hooks/': server certificate verification failed. CAfile: none CRLfile: none Traceback (most recent call last): File "/gnu/store/35ppc0zpffbzc6zsw9xnks1hxmr7wh19-python-pre-commit-1.20.0/lib/python3.7/site-packages/pre_commit/store.py", line 168, in clone_strategy self._shallow_clone(ref, _git_cmd) File "/gnu/store/35ppc0zpffbzc6zsw9xnks1hxmr7wh19-python-pre-commit-1.20.0/lib/python3.7/site-packages/pre_commit/store.py", line 150, in _shallow_clone git_cmd('-c', git_config, 'fetch', 'origin', ref, '--depth=1') File "/gnu/store/35ppc0zpffbzc6zsw9xnks1hxmr7wh19-python-pre-commit-1.20.0/lib/python3.7/site-packages/pre_commit/store.py", line 165, in _git_cmd cmd_output_b('git', *args, cwd=directory, env=env) File "/gnu/store/35ppc0zpffbzc6zsw9xnks1hxmr7wh19-python-pre-commit-1.20.0/lib/python3.7/site-packages/pre_commit/util.py", line 147, in cmd_output_b returncode, cmd, retcode, output=(stdout_b, stderr_b), pre_commit.util.CalledProcessError: Command: ('/home/igankevich/.guix-profile/bin/git', '-c', 'protocol.version=2', 'fetch', 'origin', 'v2.4.0', '--depth=1') Return code: 128 Expected return code: 0 Output: (none) Errors: fatal: unable to access 'https://github.com/pre-commit/pre-commit-hooks/': server certificate verification failed. CAfile: none CRLfile: none ``` It looks like pre-commit sanitises GIT_SSL_CAINFO environment variable. Guix uses this variable to specify the path to the certificates. I fixed this bug by _whitelisting_ GIT_SSL_CAINFO in `no_git_env` in `pre_commit/git.py`.
[ { "content": "from __future__ import unicode_literals\n\nimport logging\nimport os.path\nimport sys\n\nfrom pre_commit.util import cmd_output\nfrom pre_commit.util import cmd_output_b\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef zsplit(s):\n s = s.strip('\\0')\n if s:\n return s.split('\\0')\n else:\n return []\n\n\ndef no_git_env(_env=None):\n # Too many bugs dealing with environment variables and GIT:\n # https://github.com/pre-commit/pre-commit/issues/300\n # In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running\n # pre-commit hooks\n # In git 1.9.1 (maybe others), git exports GIT_DIR and GIT_INDEX_FILE\n # while running pre-commit hooks in submodules.\n # GIT_DIR: Causes git clone to clone wrong thing\n # GIT_INDEX_FILE: Causes 'error invalid object ...' during commit\n _env = _env if _env is not None else os.environ\n return {\n k: v for k, v in _env.items()\n if not k.startswith('GIT_') or\n k in {'GIT_EXEC_PATH', 'GIT_SSH', 'GIT_SSH_COMMAND'}\n }\n\n\ndef get_root():\n return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()\n\n\ndef get_git_dir(git_root='.'):\n opts = ('--git-common-dir', '--git-dir')\n _, out, _ = cmd_output('git', 'rev-parse', *opts, cwd=git_root)\n for line, opt in zip(out.splitlines(), opts):\n if line != opt: # pragma: no branch (git < 2.5)\n return os.path.normpath(os.path.join(git_root, line))\n else:\n raise AssertionError('unreachable: no git dir')\n\n\ndef get_remote_url(git_root):\n _, out, _ = cmd_output('git', 'config', 'remote.origin.url', cwd=git_root)\n return out.strip()\n\n\ndef is_in_merge_conflict():\n git_dir = get_git_dir('.')\n return (\n os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and\n os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))\n )\n\n\ndef parse_merge_msg_for_conflicts(merge_msg):\n # Conflicted files start with tabs\n return [\n line.lstrip(b'#').strip().decode('UTF-8')\n for line in merge_msg.splitlines()\n # '#\\t' for git 2.4.1\n if line.startswith((b'\\t', b'#\\t'))\n ]\n\n\ndef get_conflicted_files():\n logger.info('Checking merge-conflict files only.')\n # Need to get the conflicted files from the MERGE_MSG because they could\n # have resolved the conflict by choosing one side or the other\n with open(os.path.join(get_git_dir('.'), 'MERGE_MSG'), 'rb') as f:\n merge_msg = f.read()\n merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)\n\n # This will get the rest of the changes made after the merge.\n # If they resolved the merge conflict by choosing a mesh of both sides\n # this will also include the conflicted files\n tree_hash = cmd_output('git', 'write-tree')[1].strip()\n merge_diff_filenames = zsplit(\n cmd_output(\n 'git', 'diff', '--name-only', '--no-ext-diff', '-z',\n '-m', tree_hash, 'HEAD', 'MERGE_HEAD',\n )[1],\n )\n return set(merge_conflict_filenames) | set(merge_diff_filenames)\n\n\ndef get_staged_files(cwd=None):\n return zsplit(\n cmd_output(\n 'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',\n # Everything except for D\n '--diff-filter=ACMRTUXB',\n cwd=cwd,\n )[1],\n )\n\n\ndef intent_to_add_files():\n _, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z')\n parts = list(reversed(zsplit(stdout)))\n intent_to_add = []\n while parts:\n line = parts.pop()\n status, filename = line[:3], line[3:]\n if status[0] in {'C', 'R'}: # renames / moves have an additional arg\n parts.pop()\n if status[1] == 'A':\n intent_to_add.append(filename)\n return intent_to_add\n\n\ndef get_all_files():\n return zsplit(cmd_output('git', 'ls-files', '-z')[1])\n\n\ndef get_changed_files(new, old):\n return zsplit(\n cmd_output(\n 'git', 'diff', '--name-only', '--no-ext-diff', '-z',\n '{}...{}'.format(old, new),\n )[1],\n )\n\n\ndef head_rev(remote):\n _, out, _ = cmd_output('git', 'ls-remote', '--exit-code', remote, 'HEAD')\n return out.split()[0]\n\n\ndef has_diff(*args, **kwargs):\n repo = kwargs.pop('repo', '.')\n assert not kwargs, kwargs\n cmd = ('git', 'diff', '--quiet', '--no-ext-diff') + args\n return cmd_output_b(*cmd, cwd=repo, retcode=None)[0]\n\n\ndef has_core_hookpaths_set():\n _, out, _ = cmd_output_b('git', 'config', 'core.hooksPath', retcode=None)\n return bool(out.strip())\n\n\ndef init_repo(path, remote):\n if os.path.isdir(remote):\n remote = os.path.abspath(remote)\n\n env = no_git_env()\n cmd_output_b('git', 'init', path, env=env)\n cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)\n\n\ndef commit(repo='.'):\n env = no_git_env()\n name, email = 'pre-commit', '[email protected]'\n env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = name\n env['GIT_AUTHOR_EMAIL'] = env['GIT_COMMITTER_EMAIL'] = email\n cmd = ('git', 'commit', '--no-edit', '--no-gpg-sign', '-n', '-minit')\n cmd_output_b(*cmd, cwd=repo, env=env)\n\n\ndef git_path(name, repo='.'):\n _, out, _ = cmd_output('git', 'rev-parse', '--git-path', name, cwd=repo)\n return os.path.join(repo, out.strip())\n\n\ndef check_for_cygwin_mismatch():\n \"\"\"See https://github.com/pre-commit/pre-commit/issues/354\"\"\"\n if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows)\n is_cygwin_python = sys.platform == 'cygwin'\n toplevel = cmd_output('git', 'rev-parse', '--show-toplevel')[1]\n is_cygwin_git = toplevel.startswith('/')\n\n if is_cygwin_python ^ is_cygwin_git:\n exe_type = {True: '(cygwin)', False: '(windows)'}\n logger.warn(\n 'pre-commit has detected a mix of cygwin python / git\\n'\n 'This combination is not supported, it is likely you will '\n 'receive an error later in the program.\\n'\n 'Make sure to use cygwin git+python while using cygwin\\n'\n 'These can be installed through the cygwin installer.\\n'\n ' - python {}\\n'\n ' - git {}\\n'.format(\n exe_type[is_cygwin_python], exe_type[is_cygwin_git],\n ),\n )\n", "path": "pre_commit/git.py" } ]
[ { "content": "from __future__ import unicode_literals\n\nimport logging\nimport os.path\nimport sys\n\nfrom pre_commit.util import cmd_output\nfrom pre_commit.util import cmd_output_b\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef zsplit(s):\n s = s.strip('\\0')\n if s:\n return s.split('\\0')\n else:\n return []\n\n\ndef no_git_env(_env=None):\n # Too many bugs dealing with environment variables and GIT:\n # https://github.com/pre-commit/pre-commit/issues/300\n # In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running\n # pre-commit hooks\n # In git 1.9.1 (maybe others), git exports GIT_DIR and GIT_INDEX_FILE\n # while running pre-commit hooks in submodules.\n # GIT_DIR: Causes git clone to clone wrong thing\n # GIT_INDEX_FILE: Causes 'error invalid object ...' during commit\n _env = _env if _env is not None else os.environ\n return {\n k: v for k, v in _env.items()\n if not k.startswith('GIT_') or\n k in {'GIT_EXEC_PATH', 'GIT_SSH', 'GIT_SSH_COMMAND', 'GIT_SSL_CAINFO'}\n }\n\n\ndef get_root():\n return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()\n\n\ndef get_git_dir(git_root='.'):\n opts = ('--git-common-dir', '--git-dir')\n _, out, _ = cmd_output('git', 'rev-parse', *opts, cwd=git_root)\n for line, opt in zip(out.splitlines(), opts):\n if line != opt: # pragma: no branch (git < 2.5)\n return os.path.normpath(os.path.join(git_root, line))\n else:\n raise AssertionError('unreachable: no git dir')\n\n\ndef get_remote_url(git_root):\n _, out, _ = cmd_output('git', 'config', 'remote.origin.url', cwd=git_root)\n return out.strip()\n\n\ndef is_in_merge_conflict():\n git_dir = get_git_dir('.')\n return (\n os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and\n os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))\n )\n\n\ndef parse_merge_msg_for_conflicts(merge_msg):\n # Conflicted files start with tabs\n return [\n line.lstrip(b'#').strip().decode('UTF-8')\n for line in merge_msg.splitlines()\n # '#\\t' for git 2.4.1\n if line.startswith((b'\\t', b'#\\t'))\n ]\n\n\ndef get_conflicted_files():\n logger.info('Checking merge-conflict files only.')\n # Need to get the conflicted files from the MERGE_MSG because they could\n # have resolved the conflict by choosing one side or the other\n with open(os.path.join(get_git_dir('.'), 'MERGE_MSG'), 'rb') as f:\n merge_msg = f.read()\n merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)\n\n # This will get the rest of the changes made after the merge.\n # If they resolved the merge conflict by choosing a mesh of both sides\n # this will also include the conflicted files\n tree_hash = cmd_output('git', 'write-tree')[1].strip()\n merge_diff_filenames = zsplit(\n cmd_output(\n 'git', 'diff', '--name-only', '--no-ext-diff', '-z',\n '-m', tree_hash, 'HEAD', 'MERGE_HEAD',\n )[1],\n )\n return set(merge_conflict_filenames) | set(merge_diff_filenames)\n\n\ndef get_staged_files(cwd=None):\n return zsplit(\n cmd_output(\n 'git', 'diff', '--staged', '--name-only', '--no-ext-diff', '-z',\n # Everything except for D\n '--diff-filter=ACMRTUXB',\n cwd=cwd,\n )[1],\n )\n\n\ndef intent_to_add_files():\n _, stdout, _ = cmd_output('git', 'status', '--porcelain', '-z')\n parts = list(reversed(zsplit(stdout)))\n intent_to_add = []\n while parts:\n line = parts.pop()\n status, filename = line[:3], line[3:]\n if status[0] in {'C', 'R'}: # renames / moves have an additional arg\n parts.pop()\n if status[1] == 'A':\n intent_to_add.append(filename)\n return intent_to_add\n\n\ndef get_all_files():\n return zsplit(cmd_output('git', 'ls-files', '-z')[1])\n\n\ndef get_changed_files(new, old):\n return zsplit(\n cmd_output(\n 'git', 'diff', '--name-only', '--no-ext-diff', '-z',\n '{}...{}'.format(old, new),\n )[1],\n )\n\n\ndef head_rev(remote):\n _, out, _ = cmd_output('git', 'ls-remote', '--exit-code', remote, 'HEAD')\n return out.split()[0]\n\n\ndef has_diff(*args, **kwargs):\n repo = kwargs.pop('repo', '.')\n assert not kwargs, kwargs\n cmd = ('git', 'diff', '--quiet', '--no-ext-diff') + args\n return cmd_output_b(*cmd, cwd=repo, retcode=None)[0]\n\n\ndef has_core_hookpaths_set():\n _, out, _ = cmd_output_b('git', 'config', 'core.hooksPath', retcode=None)\n return bool(out.strip())\n\n\ndef init_repo(path, remote):\n if os.path.isdir(remote):\n remote = os.path.abspath(remote)\n\n env = no_git_env()\n cmd_output_b('git', 'init', path, env=env)\n cmd_output_b('git', 'remote', 'add', 'origin', remote, cwd=path, env=env)\n\n\ndef commit(repo='.'):\n env = no_git_env()\n name, email = 'pre-commit', '[email protected]'\n env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = name\n env['GIT_AUTHOR_EMAIL'] = env['GIT_COMMITTER_EMAIL'] = email\n cmd = ('git', 'commit', '--no-edit', '--no-gpg-sign', '-n', '-minit')\n cmd_output_b(*cmd, cwd=repo, env=env)\n\n\ndef git_path(name, repo='.'):\n _, out, _ = cmd_output('git', 'rev-parse', '--git-path', name, cwd=repo)\n return os.path.join(repo, out.strip())\n\n\ndef check_for_cygwin_mismatch():\n \"\"\"See https://github.com/pre-commit/pre-commit/issues/354\"\"\"\n if sys.platform in ('cygwin', 'win32'): # pragma: no cover (windows)\n is_cygwin_python = sys.platform == 'cygwin'\n toplevel = cmd_output('git', 'rev-parse', '--show-toplevel')[1]\n is_cygwin_git = toplevel.startswith('/')\n\n if is_cygwin_python ^ is_cygwin_git:\n exe_type = {True: '(cygwin)', False: '(windows)'}\n logger.warn(\n 'pre-commit has detected a mix of cygwin python / git\\n'\n 'This combination is not supported, it is likely you will '\n 'receive an error later in the program.\\n'\n 'Make sure to use cygwin git+python while using cygwin\\n'\n 'These can be installed through the cygwin installer.\\n'\n ' - python {}\\n'\n ' - git {}\\n'.format(\n exe_type[is_cygwin_python], exe_type[is_cygwin_git],\n ),\n )\n", "path": "pre_commit/git.py" } ]
diff --git a/pre_commit/git.py b/pre_commit/git.py index 3ee9ca3af..c8faf60f7 100644 --- a/pre_commit/git.py +++ b/pre_commit/git.py @@ -32,7 +32,7 @@ def no_git_env(_env=None): return { k: v for k, v in _env.items() if not k.startswith('GIT_') or - k in {'GIT_EXEC_PATH', 'GIT_SSH', 'GIT_SSH_COMMAND'} + k in {'GIT_EXEC_PATH', 'GIT_SSH', 'GIT_SSH_COMMAND', 'GIT_SSL_CAINFO'} }
Pylons__pyramid-2226
Update to Sphinx 1.3.4 when released There is a [bug in Sphinx 1.3.3 and 1.3.1](https://github.com/sphinx-doc/sphinx/issues/2189) (I haven't tried 1.3.2) where next and previous links in Sphinx documentation are broken when going into children and across sibling directories. When 1.3.4 is released, we need to pin sphinx to 1.3.4, which will include the commit made 8 days after the 1.3.3 release.
[ { "content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.3.1',\n 'docutils',\n 'repoze.sphinx.autointerface',\n 'pylons_sphinx_latesturl',\n 'pylons-sphinx-themes',\n 'sphinxcontrib-programoutput',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.5.8',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py" } ]
[ { "content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.3.4',\n 'docutils',\n 'repoze.sphinx.autointerface',\n 'pylons_sphinx_latesturl',\n 'pylons-sphinx-themes',\n 'sphinxcontrib-programoutput',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.5.8',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 60502548e1..87e8ed0f05 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ tests_require.append('zope.component>=3.11.0') docs_extras = [ - 'Sphinx >= 1.3.1', + 'Sphinx >= 1.3.4', 'docutils', 'repoze.sphinx.autointerface', 'pylons_sphinx_latesturl',
blaze__blaze-872
Truncate column name is too verbose Do we have to have a unique name for the result of such operations? How about having it renamed to the unit, i.e. instead of `when_datetimetruncate` we use `when_day` or `when_week`, etc?
[ { "content": "from __future__ import absolute_import, division, print_function\n\nfrom .expressions import Expr, ElemWise\nfrom datashape import dshape, Record, DataShape, Unit, Option, date_, datetime_\nimport datashape\n\n__all__ = ['DateTime', 'Date', 'date', 'Year', 'year', 'Month', 'month', 'Day',\n 'day', 'Hour', 'hour', 'Second', 'second', 'Millisecond',\n 'millisecond', 'Microsecond', 'microsecond', 'Date', 'date', 'Time',\n 'time', 'UTCFromTimestamp', 'DateTimeTruncate']\n\nclass DateTime(ElemWise):\n \"\"\" Superclass for datetime accessors \"\"\"\n __slots__ = '_hash', '_child',\n\n def __str__(self):\n return '%s.%s' % (str(self._child), type(self).__name__.lower())\n\n @property\n def schema(self):\n return dshape(self._dtype)\n\n @property\n def _name(self):\n return '%s_%s' % (self._child._name, self.attr)\n\n @property\n def attr(self):\n return type(self).__name__.lower()\n\n\nclass Date(DateTime):\n _dtype = datashape.date_\n\ndef date(expr):\n return Date(expr)\n\nclass Year(DateTime):\n _dtype = datashape.int32\n\ndef year(expr):\n return Year(expr)\n\nclass Month(DateTime):\n _dtype = datashape.int32\n\ndef month(expr):\n return Month(expr)\n\nclass Day(DateTime):\n _dtype = datashape.int32\n\ndef day(expr):\n return Day(expr)\n\nclass Time(DateTime):\n _dtype = datashape.time_\n\ndef time(expr):\n return Time(Expr)\n\nclass Hour(DateTime):\n _dtype = datashape.int32\n\ndef hour(expr):\n return Hour(expr)\n\nclass Minute(DateTime):\n _dtype = datashape.int32\n\ndef minute(expr):\n return Minute(expr)\n\nclass Second(DateTime):\n _dtype = datashape.int32\n\ndef second(expr):\n return Second(expr)\n\nclass Millisecond(DateTime):\n _dtype = datashape.int64\n\ndef millisecond(expr):\n return Millisecond(expr)\n\nclass Microsecond(DateTime):\n _dtype = datashape.int64\n\ndef microsecond(expr):\n return Microsecond(expr)\n\nclass UTCFromTimestamp(DateTime):\n _dtype = datashape.datetime_\n\ndef utcfromtimestamp(expr):\n return UTCFromTimestamp(expr)\n\nunits = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second',\n'millisecond', 'microsecond', 'nanosecond']\n\n\n_unit_aliases = {'y': 'year', 'w': 'week', 'd': 'day', 'date': 'day',\n 'h': 'hour', 's': 'second', 'ms': 'millisecond', 'us': 'microsecond',\n 'ns': 'nanosecond'}\n\ndef normalize_time_unit(s):\n \"\"\" Normalize time input to one of 'year', 'second', 'millisecond', etc..\n\n Example\n -------\n\n >>> normalize_time_unit('milliseconds')\n 'millisecond'\n >>> normalize_time_unit('ms')\n 'millisecond'\n \"\"\"\n s = s.lower().strip()\n if s in units:\n return s\n if s in _unit_aliases:\n return _unit_aliases[s]\n if s[-1] == 's':\n return normalize_time_unit(s.rstrip('s'))\n\n raise ValueError(\"Do not understand time unit %s\" % s)\n\n\nclass DateTimeTruncate(DateTime):\n __slots__ = '_hash', '_child', 'measure', 'unit'\n\n @property\n def _dtype(self):\n if units.index('day') >= units.index(self.unit):\n return datashape.date_\n else:\n return datashape.datetime_\n\n\ndef truncate(expr, *args, **kwargs):\n \"\"\" Truncate datetime expression\n\n Example\n -------\n\n >>> from blaze import symbol, compute\n >>> from datetime import datetime\n >>> s = symbol('s', 'datetime')\n\n >>> expr = s.truncate(10, 'minutes')\n >>> compute(expr, datetime(2000, 6, 25, 12, 35, 10))\n datetime.datetime(2000, 6, 25, 12, 30)\n\n >>> expr = s.truncate(1, 'week')\n >>> compute(expr, datetime(2000, 6, 25, 12, 35, 10))\n datetime.date(2000, 6, 25)\n\n Alternatively use keyword arguments to specify unit and measure\n\n >>> # expr = s.truncate(2, 'weeks')\n >>> expr = s.truncate(weeks=2)\n \"\"\"\n if args:\n assert not kwargs\n measure, unit = args\n if kwargs:\n assert not args\n [(unit, measure)] = kwargs.items()\n return DateTimeTruncate(expr, measure, normalize_time_unit(unit))\n\n\nfrom .expressions import schema_method_list, method_properties\nfrom datashape.predicates import isdatelike, isnumeric\n\nschema_method_list.extend([\n (isdatelike, set([year, month, day, hour, minute, date, time, second,\n millisecond, microsecond, truncate])),\n (isnumeric, set([utcfromtimestamp]))\n ])\n\nmethod_properties |= set([year, month, day, hour, minute, second, millisecond,\n microsecond, date, time, utcfromtimestamp])\n", "path": "blaze/expr/datetime.py" } ]
[ { "content": "from __future__ import absolute_import, division, print_function\n\nfrom .expressions import Expr, ElemWise\nfrom datashape import dshape, Record, DataShape, Unit, Option, date_, datetime_\nimport datashape\n\n__all__ = ['DateTime', 'Date', 'date', 'Year', 'year', 'Month', 'month', 'Day',\n 'day', 'Hour', 'hour', 'Second', 'second', 'Millisecond',\n 'millisecond', 'Microsecond', 'microsecond', 'Date', 'date', 'Time',\n 'time', 'UTCFromTimestamp', 'DateTimeTruncate']\n\nclass DateTime(ElemWise):\n \"\"\" Superclass for datetime accessors \"\"\"\n __slots__ = '_hash', '_child',\n\n def __str__(self):\n return '%s.%s' % (str(self._child), type(self).__name__.lower())\n\n @property\n def schema(self):\n return dshape(self._dtype)\n\n @property\n def _name(self):\n return '%s_%s' % (self._child._name, self.attr)\n\n @property\n def attr(self):\n return type(self).__name__.lower()\n\n\nclass Date(DateTime):\n _dtype = datashape.date_\n\ndef date(expr):\n return Date(expr)\n\nclass Year(DateTime):\n _dtype = datashape.int32\n\ndef year(expr):\n return Year(expr)\n\nclass Month(DateTime):\n _dtype = datashape.int32\n\ndef month(expr):\n return Month(expr)\n\nclass Day(DateTime):\n _dtype = datashape.int32\n\ndef day(expr):\n return Day(expr)\n\nclass Time(DateTime):\n _dtype = datashape.time_\n\ndef time(expr):\n return Time(Expr)\n\nclass Hour(DateTime):\n _dtype = datashape.int32\n\ndef hour(expr):\n return Hour(expr)\n\nclass Minute(DateTime):\n _dtype = datashape.int32\n\ndef minute(expr):\n return Minute(expr)\n\nclass Second(DateTime):\n _dtype = datashape.int32\n\ndef second(expr):\n return Second(expr)\n\nclass Millisecond(DateTime):\n _dtype = datashape.int64\n\ndef millisecond(expr):\n return Millisecond(expr)\n\nclass Microsecond(DateTime):\n _dtype = datashape.int64\n\ndef microsecond(expr):\n return Microsecond(expr)\n\nclass UTCFromTimestamp(DateTime):\n _dtype = datashape.datetime_\n\ndef utcfromtimestamp(expr):\n return UTCFromTimestamp(expr)\n\nunits = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second',\n'millisecond', 'microsecond', 'nanosecond']\n\n\n_unit_aliases = {'y': 'year', 'w': 'week', 'd': 'day', 'date': 'day',\n 'h': 'hour', 's': 'second', 'ms': 'millisecond', 'us': 'microsecond',\n 'ns': 'nanosecond'}\n\ndef normalize_time_unit(s):\n \"\"\" Normalize time input to one of 'year', 'second', 'millisecond', etc..\n\n Example\n -------\n\n >>> normalize_time_unit('milliseconds')\n 'millisecond'\n >>> normalize_time_unit('ms')\n 'millisecond'\n \"\"\"\n s = s.lower().strip()\n if s in units:\n return s\n if s in _unit_aliases:\n return _unit_aliases[s]\n if s[-1] == 's':\n return normalize_time_unit(s.rstrip('s'))\n\n raise ValueError(\"Do not understand time unit %s\" % s)\n\n\nclass DateTimeTruncate(DateTime):\n __slots__ = '_hash', '_child', 'measure', 'unit'\n\n @property\n def _dtype(self):\n if units.index('day') >= units.index(self.unit):\n return datashape.date_\n else:\n return datashape.datetime_\n\n @property\n def _name(self):\n return self._child._name\n\n\ndef truncate(expr, *args, **kwargs):\n \"\"\" Truncate datetime expression\n\n Example\n -------\n\n >>> from blaze import symbol, compute\n >>> from datetime import datetime\n >>> s = symbol('s', 'datetime')\n\n >>> expr = s.truncate(10, 'minutes')\n >>> compute(expr, datetime(2000, 6, 25, 12, 35, 10))\n datetime.datetime(2000, 6, 25, 12, 30)\n\n >>> expr = s.truncate(1, 'week')\n >>> compute(expr, datetime(2000, 6, 25, 12, 35, 10))\n datetime.date(2000, 6, 25)\n\n Alternatively use keyword arguments to specify unit and measure\n\n >>> # expr = s.truncate(2, 'weeks')\n >>> expr = s.truncate(weeks=2)\n \"\"\"\n if args:\n assert not kwargs\n measure, unit = args\n if kwargs:\n assert not args\n [(unit, measure)] = kwargs.items()\n return DateTimeTruncate(expr, measure, normalize_time_unit(unit))\n\n\nfrom .expressions import schema_method_list, method_properties\nfrom datashape.predicates import isdatelike, isnumeric\n\nschema_method_list.extend([\n (isdatelike, set([year, month, day, hour, minute, date, time, second,\n millisecond, microsecond, truncate])),\n (isnumeric, set([utcfromtimestamp]))\n ])\n\nmethod_properties |= set([year, month, day, hour, minute, second, millisecond,\n microsecond, date, time, utcfromtimestamp])\n", "path": "blaze/expr/datetime.py" } ]
diff --git a/blaze/expr/datetime.py b/blaze/expr/datetime.py index 7864688bc..0a0399df0 100644 --- a/blaze/expr/datetime.py +++ b/blaze/expr/datetime.py @@ -135,6 +135,10 @@ def _dtype(self): else: return datashape.datetime_ + @property + def _name(self): + return self._child._name + def truncate(expr, *args, **kwargs): """ Truncate datetime expression diff --git a/blaze/expr/tests/test_datetime.py b/blaze/expr/tests/test_datetime.py index d9e9f7e0f..f3a55d281 100644 --- a/blaze/expr/tests/test_datetime.py +++ b/blaze/expr/tests/test_datetime.py @@ -44,3 +44,8 @@ def test_isdatelike(): assert not isdatelike('int32') assert isdatelike('?date') assert not isdatelike('{is_outdated: bool}') + + +def test_truncate_names(): + t = symbol('t', '5 * {name: string, when: datetime}') + assert t.when.truncate(days=2)._name == 'when'
aio-libs__aiohttp-1989
Deprecate app.on_loop_available signal From my understanding on `app.on_startup` the loop is already present. Why do we need additional signal? `app.on_loop_available` is not documented BTW @fafhrd91 ?
[ { "content": "import asyncio\nimport os\nimport signal\nimport socket\nimport stat\nimport sys\nimport warnings\nfrom argparse import ArgumentParser\nfrom collections import Iterable, MutableMapping\nfrom importlib import import_module\n\nfrom yarl import URL\n\nfrom . import (hdrs, web_exceptions, web_fileresponse, web_middlewares,\n web_protocol, web_request, web_response, web_server,\n web_urldispatcher, web_ws)\nfrom .abc import AbstractMatchInfo, AbstractRouter\nfrom .frozenlist import FrozenList\nfrom .http import HttpVersion # noqa\nfrom .log import access_logger, web_logger\nfrom .signals import FuncSignal, PostSignal, PreSignal, Signal\nfrom .web_exceptions import * # noqa\nfrom .web_fileresponse import * # noqa\nfrom .web_middlewares import * # noqa\nfrom .web_protocol import * # noqa\nfrom .web_request import * # noqa\nfrom .web_response import * # noqa\nfrom .web_server import Server\nfrom .web_urldispatcher import * # noqa\nfrom .web_urldispatcher import PrefixedSubAppResource\nfrom .web_ws import * # noqa\n\n\n__all__ = (web_protocol.__all__ +\n web_fileresponse.__all__ +\n web_request.__all__ +\n web_response.__all__ +\n web_exceptions.__all__ +\n web_urldispatcher.__all__ +\n web_ws.__all__ +\n web_server.__all__ +\n web_middlewares.__all__ +\n ('Application', 'HttpVersion', 'MsgType'))\n\n\nclass Application(MutableMapping):\n def __init__(self, *,\n logger=web_logger,\n router=None,\n middlewares=(),\n handler_args=None,\n client_max_size=1024**2,\n secure_proxy_ssl_header=None,\n loop=None,\n debug=...):\n if router is None:\n router = web_urldispatcher.UrlDispatcher()\n assert isinstance(router, AbstractRouter), router\n\n if loop is not None:\n warnings.warn(\"loop argument is deprecated\", ResourceWarning)\n\n if secure_proxy_ssl_header is not None:\n warnings.warn(\n \"secure_proxy_ssl_header is deprecated\", DeprecationWarning)\n\n self._debug = debug\n self._router = router\n self._secure_proxy_ssl_header = secure_proxy_ssl_header\n self._loop = loop\n self._handler_args = handler_args\n self.logger = logger\n\n self._middlewares = FrozenList(middlewares)\n self._state = {}\n self._frozen = False\n self._subapps = []\n\n self._on_pre_signal = PreSignal()\n self._on_post_signal = PostSignal()\n self._on_loop_available = FuncSignal(self)\n self._on_response_prepare = Signal(self)\n self._on_startup = Signal(self)\n self._on_shutdown = Signal(self)\n self._on_cleanup = Signal(self)\n self._client_max_size = client_max_size\n\n # MutableMapping API\n\n def __eq__(self, other):\n return self is other\n\n def __getitem__(self, key):\n return self._state[key]\n\n def _check_frozen(self):\n if self._frozen:\n warnings.warn(\"Changing state of started or joined \"\n \"application is deprecated\",\n DeprecationWarning,\n stacklevel=3)\n\n def __setitem__(self, key, value):\n self._check_frozen()\n self._state[key] = value\n\n def __delitem__(self, key):\n self._check_frozen()\n del self._state[key]\n\n def __len__(self):\n return len(self._state)\n\n def __iter__(self):\n return iter(self._state)\n\n ########\n @property\n def loop(self):\n return self._loop\n\n def _set_loop(self, loop):\n if loop is None:\n loop = asyncio.get_event_loop()\n if self._loop is not None and self._loop is not loop:\n raise RuntimeError(\n \"web.Application instance initialized with different loop\")\n\n self._loop = loop\n self._on_loop_available.send(self)\n\n # set loop debug\n if self._debug is ...:\n self._debug = loop.get_debug()\n\n # set loop to sub applications\n for subapp in self._subapps:\n subapp._set_loop(loop)\n\n @property\n def frozen(self):\n return self._frozen\n\n def freeze(self):\n if self._frozen:\n return\n\n self._frozen = True\n self._middlewares = tuple(reversed(self._middlewares))\n self._router.freeze()\n self._on_loop_available.freeze()\n self._on_pre_signal.freeze()\n self._on_post_signal.freeze()\n self._on_response_prepare.freeze()\n self._on_startup.freeze()\n self._on_shutdown.freeze()\n self._on_cleanup.freeze()\n\n for subapp in self._subapps:\n subapp.freeze()\n\n @property\n def debug(self):\n return self._debug\n\n def _reg_subapp_signals(self, subapp):\n\n def reg_handler(signame):\n subsig = getattr(subapp, signame)\n\n @asyncio.coroutine\n def handler(app):\n yield from subsig.send(subapp)\n appsig = getattr(self, signame)\n appsig.append(handler)\n\n reg_handler('on_startup')\n reg_handler('on_shutdown')\n reg_handler('on_cleanup')\n\n def add_subapp(self, prefix, subapp):\n if self.frozen:\n raise RuntimeError(\n \"Cannot add sub application to frozen application\")\n if subapp.frozen:\n raise RuntimeError(\"Cannot add frozen application\")\n if prefix.endswith('/'):\n prefix = prefix[:-1]\n if prefix in ('', '/'):\n raise ValueError(\"Prefix cannot be empty\")\n\n resource = PrefixedSubAppResource(prefix, subapp)\n self.router.register_resource(resource)\n self._reg_subapp_signals(subapp)\n self._subapps.append(subapp)\n if self._loop is not None:\n subapp._set_loop(self._loop)\n return resource\n\n @property\n def on_loop_available(self):\n return self._on_loop_available\n\n @property\n def on_response_prepare(self):\n return self._on_response_prepare\n\n @property\n def on_pre_signal(self):\n return self._on_pre_signal\n\n @property\n def on_post_signal(self):\n return self._on_post_signal\n\n @property\n def on_startup(self):\n return self._on_startup\n\n @property\n def on_shutdown(self):\n return self._on_shutdown\n\n @property\n def on_cleanup(self):\n return self._on_cleanup\n\n @property\n def router(self):\n return self._router\n\n @property\n def middlewares(self):\n return self._middlewares\n\n def make_handler(self, *, loop=None,\n secure_proxy_ssl_header=None, **kwargs):\n self._set_loop(loop)\n self.freeze()\n\n kwargs['debug'] = self.debug\n if self._handler_args:\n for k, v in self._handler_args.items():\n kwargs[k] = v\n\n if secure_proxy_ssl_header:\n self._secure_proxy_ssl_header = secure_proxy_ssl_header\n return Server(self._handle, request_factory=self._make_request,\n loop=self.loop, **kwargs)\n\n @asyncio.coroutine\n def startup(self):\n \"\"\"Causes on_startup signal\n\n Should be called in the event loop along with the request handler.\n \"\"\"\n yield from self.on_startup.send(self)\n\n @asyncio.coroutine\n def shutdown(self):\n \"\"\"Causes on_shutdown signal\n\n Should be called before cleanup()\n \"\"\"\n yield from self.on_shutdown.send(self)\n\n @asyncio.coroutine\n def cleanup(self):\n \"\"\"Causes on_cleanup signal\n\n Should be called after shutdown()\n \"\"\"\n yield from self.on_cleanup.send(self)\n\n def _make_request(self, message, payload, protocol, writer, task,\n _cls=web_request.Request):\n return _cls(\n message, payload, protocol, writer, protocol._time_service, task,\n secure_proxy_ssl_header=self._secure_proxy_ssl_header,\n client_max_size=self._client_max_size)\n\n @asyncio.coroutine\n def _handle(self, request):\n match_info = yield from self._router.resolve(request)\n assert isinstance(match_info, AbstractMatchInfo), match_info\n match_info.add_app(self)\n\n if __debug__:\n match_info.freeze()\n\n resp = None\n request._match_info = match_info\n expect = request.headers.get(hdrs.EXPECT)\n if expect:\n resp = yield from match_info.expect_handler(request)\n yield from request.writer.drain()\n\n if resp is None:\n handler = match_info.handler\n for app in match_info.apps[::-1]:\n for factory in app._middlewares:\n handler = yield from factory(app, handler)\n\n resp = yield from handler(request)\n\n assert isinstance(resp, web_response.StreamResponse), \\\n (\"Handler {!r} should return response instance, \"\n \"got {!r} [middlewares {!r}]\").format(\n match_info.handler, type(resp),\n [middleware for middleware in app.middlewares\n for app in match_info.apps])\n return resp\n\n def __call__(self):\n \"\"\"gunicorn compatibility\"\"\"\n return self\n\n def __repr__(self):\n return \"<Application 0x{:x}>\".format(id(self))\n\n\nclass GracefulExit(SystemExit):\n code = 1\n\n\ndef raise_graceful_exit():\n raise GracefulExit()\n\n\ndef _make_server_creators(handler, *, loop, ssl_context,\n host, port, path, sock, backlog):\n\n scheme = 'https' if ssl_context else 'http'\n base_url = URL('{}://localhost'.format(scheme)).with_port(port)\n\n if path is None:\n paths = ()\n elif isinstance(path, (str, bytes, bytearray, memoryview))\\\n or not isinstance(path, Iterable):\n paths = (path,)\n else:\n paths = path\n\n if sock is None:\n socks = ()\n elif not isinstance(sock, Iterable):\n socks = (sock,)\n else:\n socks = sock\n\n if host is None:\n if (paths or socks) and not port:\n hosts = ()\n else:\n hosts = (\"0.0.0.0\",)\n elif isinstance(host, (str, bytes, bytearray, memoryview))\\\n or not isinstance(host, Iterable):\n hosts = (host,)\n else:\n hosts = host\n\n if hosts and port is None:\n port = 8443 if ssl_context else 8080\n\n server_creations = []\n uris = [str(base_url.with_host(host)) for host in hosts]\n if hosts:\n # Multiple hosts bound to same server is available in most loop\n # implementations, but only send multiple if we have multiple.\n host_binding = hosts[0] if len(hosts) == 1 else hosts\n server_creations.append(\n loop.create_server(\n handler, host_binding, port, ssl=ssl_context, backlog=backlog\n )\n )\n for path in paths:\n # Most loop implementations don't support multiple paths bound in same\n # server, so create a server for each.\n server_creations.append(\n loop.create_unix_server(\n handler, path, ssl=ssl_context, backlog=backlog\n )\n )\n uris.append('{}://unix:{}:'.format(scheme, path))\n\n # Clean up prior socket path if stale and not abstract.\n # CPython 3.5.3+'s event loop already does this. See\n # https://github.com/python/asyncio/issues/425\n if path[0] not in (0, '\\x00'): # pragma: no branch\n try:\n if stat.S_ISSOCK(os.stat(path).st_mode):\n os.remove(path)\n except FileNotFoundError:\n pass\n for sock in socks:\n server_creations.append(\n loop.create_server(\n handler, sock=sock, ssl=ssl_context, backlog=backlog\n )\n )\n\n if hasattr(socket, 'AF_UNIX') and sock.family == socket.AF_UNIX:\n uris.append('{}://unix:{}:'.format(scheme, sock.getsockname()))\n else:\n host, port = sock.getsockname()\n uris.append(str(base_url.with_host(host).with_port(port)))\n return server_creations, uris\n\n\ndef run_app(app, *, host=None, port=None, path=None, sock=None,\n shutdown_timeout=60.0, ssl_context=None,\n print=print, backlog=128, access_log_format=None,\n access_log=access_logger, handle_signals=True, loop=None):\n \"\"\"Run an app locally\"\"\"\n user_supplied_loop = loop is not None\n if loop is None:\n loop = asyncio.get_event_loop()\n\n app._set_loop(loop)\n loop.run_until_complete(app.startup())\n\n try:\n make_handler_kwargs = dict()\n if access_log_format is not None:\n make_handler_kwargs['access_log_format'] = access_log_format\n handler = app.make_handler(loop=loop, access_log=access_log,\n **make_handler_kwargs)\n\n server_creations, uris = _make_server_creators(\n handler,\n loop=loop, ssl_context=ssl_context,\n host=host, port=port, path=path, sock=sock,\n backlog=backlog)\n servers = loop.run_until_complete(\n asyncio.gather(*server_creations, loop=loop)\n )\n\n if handle_signals:\n try:\n loop.add_signal_handler(signal.SIGINT, raise_graceful_exit)\n loop.add_signal_handler(signal.SIGTERM, raise_graceful_exit)\n except NotImplementedError: # pragma: no cover\n # add_signal_handler is not implemented on Windows\n pass\n\n try:\n print(\"======== Running on {} ========\\n\"\n \"(Press CTRL+C to quit)\".format(', '.join(uris)))\n loop.run_forever()\n except (GracefulExit, KeyboardInterrupt): # pragma: no cover\n pass\n finally:\n server_closures = []\n for srv in servers:\n srv.close()\n server_closures.append(srv.wait_closed())\n loop.run_until_complete(\n asyncio.gather(*server_closures, loop=loop))\n loop.run_until_complete(app.shutdown())\n loop.run_until_complete(handler.shutdown(shutdown_timeout))\n finally:\n loop.run_until_complete(app.cleanup())\n if not user_supplied_loop:\n loop.close()\n\n\ndef main(argv):\n arg_parser = ArgumentParser(\n description=\"aiohttp.web Application server\",\n prog=\"aiohttp.web\"\n )\n arg_parser.add_argument(\n \"entry_func\",\n help=(\"Callable returning the `aiohttp.web.Application` instance to \"\n \"run. Should be specified in the 'module:function' syntax.\"),\n metavar=\"entry-func\"\n )\n arg_parser.add_argument(\n \"-H\", \"--hostname\",\n help=\"TCP/IP hostname to serve on (default: %(default)r)\",\n default=\"localhost\"\n )\n arg_parser.add_argument(\n \"-P\", \"--port\",\n help=\"TCP/IP port to serve on (default: %(default)r)\",\n type=int,\n default=\"8080\"\n )\n arg_parser.add_argument(\n \"-U\", \"--path\",\n help=\"Unix file system path to serve on. Specifying a path will cause \"\n \"hostname and port arguments to be ignored.\",\n )\n args, extra_argv = arg_parser.parse_known_args(argv)\n\n # Import logic\n mod_str, _, func_str = args.entry_func.partition(\":\")\n if not func_str or not mod_str:\n arg_parser.error(\n \"'entry-func' not in 'module:function' syntax\"\n )\n if mod_str.startswith(\".\"):\n arg_parser.error(\"relative module names not supported\")\n try:\n module = import_module(mod_str)\n except ImportError as ex:\n arg_parser.error(\"unable to import %s: %s\" % (mod_str, ex))\n try:\n func = getattr(module, func_str)\n except AttributeError:\n arg_parser.error(\"module %r has no attribute %r\" % (mod_str, func_str))\n\n # Compatibility logic\n if args.path is not None and not hasattr(socket, 'AF_UNIX'):\n arg_parser.error(\"file system paths not supported by your operating\"\n \" environment\")\n\n app = func(extra_argv)\n run_app(app, host=args.hostname, port=args.port, path=args.path)\n arg_parser.exit(message=\"Stopped\\n\")\n\n\nif __name__ == \"__main__\": # pragma: no branch\n main(sys.argv[1:]) # pragma: no cover\n", "path": "aiohttp/web.py" } ]
[ { "content": "import asyncio\nimport os\nimport signal\nimport socket\nimport stat\nimport sys\nimport warnings\nfrom argparse import ArgumentParser\nfrom collections import Iterable, MutableMapping\nfrom importlib import import_module\n\nfrom yarl import URL\n\nfrom . import (hdrs, web_exceptions, web_fileresponse, web_middlewares,\n web_protocol, web_request, web_response, web_server,\n web_urldispatcher, web_ws)\nfrom .abc import AbstractMatchInfo, AbstractRouter\nfrom .frozenlist import FrozenList\nfrom .http import HttpVersion # noqa\nfrom .log import access_logger, web_logger\nfrom .signals import FuncSignal, PostSignal, PreSignal, Signal\nfrom .web_exceptions import * # noqa\nfrom .web_fileresponse import * # noqa\nfrom .web_middlewares import * # noqa\nfrom .web_protocol import * # noqa\nfrom .web_request import * # noqa\nfrom .web_response import * # noqa\nfrom .web_server import Server\nfrom .web_urldispatcher import * # noqa\nfrom .web_urldispatcher import PrefixedSubAppResource\nfrom .web_ws import * # noqa\n\n\n__all__ = (web_protocol.__all__ +\n web_fileresponse.__all__ +\n web_request.__all__ +\n web_response.__all__ +\n web_exceptions.__all__ +\n web_urldispatcher.__all__ +\n web_ws.__all__ +\n web_server.__all__ +\n web_middlewares.__all__ +\n ('Application', 'HttpVersion', 'MsgType'))\n\n\nclass Application(MutableMapping):\n def __init__(self, *,\n logger=web_logger,\n router=None,\n middlewares=(),\n handler_args=None,\n client_max_size=1024**2,\n secure_proxy_ssl_header=None,\n loop=None,\n debug=...):\n if router is None:\n router = web_urldispatcher.UrlDispatcher()\n assert isinstance(router, AbstractRouter), router\n\n if loop is not None:\n warnings.warn(\"loop argument is deprecated\", ResourceWarning)\n\n if secure_proxy_ssl_header is not None:\n warnings.warn(\n \"secure_proxy_ssl_header is deprecated\", DeprecationWarning)\n\n self._debug = debug\n self._router = router\n self._secure_proxy_ssl_header = secure_proxy_ssl_header\n self._loop = loop\n self._handler_args = handler_args\n self.logger = logger\n\n self._middlewares = FrozenList(middlewares)\n self._state = {}\n self._frozen = False\n self._subapps = []\n\n self._on_pre_signal = PreSignal()\n self._on_post_signal = PostSignal()\n self._on_loop_available = FuncSignal(self)\n self._on_response_prepare = Signal(self)\n self._on_startup = Signal(self)\n self._on_shutdown = Signal(self)\n self._on_cleanup = Signal(self)\n self._client_max_size = client_max_size\n\n # MutableMapping API\n\n def __eq__(self, other):\n return self is other\n\n def __getitem__(self, key):\n return self._state[key]\n\n def _check_frozen(self):\n if self._frozen:\n warnings.warn(\"Changing state of started or joined \"\n \"application is deprecated\",\n DeprecationWarning,\n stacklevel=3)\n\n def __setitem__(self, key, value):\n self._check_frozen()\n self._state[key] = value\n\n def __delitem__(self, key):\n self._check_frozen()\n del self._state[key]\n\n def __len__(self):\n return len(self._state)\n\n def __iter__(self):\n return iter(self._state)\n\n ########\n @property\n def loop(self):\n return self._loop\n\n def _set_loop(self, loop):\n if loop is None:\n loop = asyncio.get_event_loop()\n if self._loop is not None and self._loop is not loop:\n raise RuntimeError(\n \"web.Application instance initialized with different loop\")\n\n self._loop = loop\n self._on_loop_available.send(self)\n\n # set loop debug\n if self._debug is ...:\n self._debug = loop.get_debug()\n\n # set loop to sub applications\n for subapp in self._subapps:\n subapp._set_loop(loop)\n\n @property\n def frozen(self):\n return self._frozen\n\n def freeze(self):\n if self._frozen:\n return\n\n self._frozen = True\n self._middlewares = tuple(reversed(self._middlewares))\n self._router.freeze()\n self._on_loop_available.freeze()\n self._on_pre_signal.freeze()\n self._on_post_signal.freeze()\n self._on_response_prepare.freeze()\n self._on_startup.freeze()\n self._on_shutdown.freeze()\n self._on_cleanup.freeze()\n\n for subapp in self._subapps:\n subapp.freeze()\n\n @property\n def debug(self):\n return self._debug\n\n def _reg_subapp_signals(self, subapp):\n\n def reg_handler(signame):\n subsig = getattr(subapp, signame)\n\n @asyncio.coroutine\n def handler(app):\n yield from subsig.send(subapp)\n appsig = getattr(self, signame)\n appsig.append(handler)\n\n reg_handler('on_startup')\n reg_handler('on_shutdown')\n reg_handler('on_cleanup')\n\n def add_subapp(self, prefix, subapp):\n if self.frozen:\n raise RuntimeError(\n \"Cannot add sub application to frozen application\")\n if subapp.frozen:\n raise RuntimeError(\"Cannot add frozen application\")\n if prefix.endswith('/'):\n prefix = prefix[:-1]\n if prefix in ('', '/'):\n raise ValueError(\"Prefix cannot be empty\")\n\n resource = PrefixedSubAppResource(prefix, subapp)\n self.router.register_resource(resource)\n self._reg_subapp_signals(subapp)\n self._subapps.append(subapp)\n if self._loop is not None:\n subapp._set_loop(self._loop)\n return resource\n\n @property\n def on_loop_available(self):\n warnings.warn(\"on_loop_available is deprecated and will be removed\",\n DeprecationWarning, stacklevel=2)\n return self._on_loop_available\n\n @property\n def on_response_prepare(self):\n return self._on_response_prepare\n\n @property\n def on_pre_signal(self):\n return self._on_pre_signal\n\n @property\n def on_post_signal(self):\n return self._on_post_signal\n\n @property\n def on_startup(self):\n return self._on_startup\n\n @property\n def on_shutdown(self):\n return self._on_shutdown\n\n @property\n def on_cleanup(self):\n return self._on_cleanup\n\n @property\n def router(self):\n return self._router\n\n @property\n def middlewares(self):\n return self._middlewares\n\n def make_handler(self, *, loop=None,\n secure_proxy_ssl_header=None, **kwargs):\n self._set_loop(loop)\n self.freeze()\n\n kwargs['debug'] = self.debug\n if self._handler_args:\n for k, v in self._handler_args.items():\n kwargs[k] = v\n\n if secure_proxy_ssl_header:\n self._secure_proxy_ssl_header = secure_proxy_ssl_header\n return Server(self._handle, request_factory=self._make_request,\n loop=self.loop, **kwargs)\n\n @asyncio.coroutine\n def startup(self):\n \"\"\"Causes on_startup signal\n\n Should be called in the event loop along with the request handler.\n \"\"\"\n yield from self.on_startup.send(self)\n\n @asyncio.coroutine\n def shutdown(self):\n \"\"\"Causes on_shutdown signal\n\n Should be called before cleanup()\n \"\"\"\n yield from self.on_shutdown.send(self)\n\n @asyncio.coroutine\n def cleanup(self):\n \"\"\"Causes on_cleanup signal\n\n Should be called after shutdown()\n \"\"\"\n yield from self.on_cleanup.send(self)\n\n def _make_request(self, message, payload, protocol, writer, task,\n _cls=web_request.Request):\n return _cls(\n message, payload, protocol, writer, protocol._time_service, task,\n secure_proxy_ssl_header=self._secure_proxy_ssl_header,\n client_max_size=self._client_max_size)\n\n @asyncio.coroutine\n def _handle(self, request):\n match_info = yield from self._router.resolve(request)\n assert isinstance(match_info, AbstractMatchInfo), match_info\n match_info.add_app(self)\n\n if __debug__:\n match_info.freeze()\n\n resp = None\n request._match_info = match_info\n expect = request.headers.get(hdrs.EXPECT)\n if expect:\n resp = yield from match_info.expect_handler(request)\n yield from request.writer.drain()\n\n if resp is None:\n handler = match_info.handler\n for app in match_info.apps[::-1]:\n for factory in app._middlewares:\n handler = yield from factory(app, handler)\n\n resp = yield from handler(request)\n\n assert isinstance(resp, web_response.StreamResponse), \\\n (\"Handler {!r} should return response instance, \"\n \"got {!r} [middlewares {!r}]\").format(\n match_info.handler, type(resp),\n [middleware for middleware in app.middlewares\n for app in match_info.apps])\n return resp\n\n def __call__(self):\n \"\"\"gunicorn compatibility\"\"\"\n return self\n\n def __repr__(self):\n return \"<Application 0x{:x}>\".format(id(self))\n\n\nclass GracefulExit(SystemExit):\n code = 1\n\n\ndef raise_graceful_exit():\n raise GracefulExit()\n\n\ndef _make_server_creators(handler, *, loop, ssl_context,\n host, port, path, sock, backlog):\n\n scheme = 'https' if ssl_context else 'http'\n base_url = URL('{}://localhost'.format(scheme)).with_port(port)\n\n if path is None:\n paths = ()\n elif isinstance(path, (str, bytes, bytearray, memoryview))\\\n or not isinstance(path, Iterable):\n paths = (path,)\n else:\n paths = path\n\n if sock is None:\n socks = ()\n elif not isinstance(sock, Iterable):\n socks = (sock,)\n else:\n socks = sock\n\n if host is None:\n if (paths or socks) and not port:\n hosts = ()\n else:\n hosts = (\"0.0.0.0\",)\n elif isinstance(host, (str, bytes, bytearray, memoryview))\\\n or not isinstance(host, Iterable):\n hosts = (host,)\n else:\n hosts = host\n\n if hosts and port is None:\n port = 8443 if ssl_context else 8080\n\n server_creations = []\n uris = [str(base_url.with_host(host)) for host in hosts]\n if hosts:\n # Multiple hosts bound to same server is available in most loop\n # implementations, but only send multiple if we have multiple.\n host_binding = hosts[0] if len(hosts) == 1 else hosts\n server_creations.append(\n loop.create_server(\n handler, host_binding, port, ssl=ssl_context, backlog=backlog\n )\n )\n for path in paths:\n # Most loop implementations don't support multiple paths bound in same\n # server, so create a server for each.\n server_creations.append(\n loop.create_unix_server(\n handler, path, ssl=ssl_context, backlog=backlog\n )\n )\n uris.append('{}://unix:{}:'.format(scheme, path))\n\n # Clean up prior socket path if stale and not abstract.\n # CPython 3.5.3+'s event loop already does this. See\n # https://github.com/python/asyncio/issues/425\n if path[0] not in (0, '\\x00'): # pragma: no branch\n try:\n if stat.S_ISSOCK(os.stat(path).st_mode):\n os.remove(path)\n except FileNotFoundError:\n pass\n for sock in socks:\n server_creations.append(\n loop.create_server(\n handler, sock=sock, ssl=ssl_context, backlog=backlog\n )\n )\n\n if hasattr(socket, 'AF_UNIX') and sock.family == socket.AF_UNIX:\n uris.append('{}://unix:{}:'.format(scheme, sock.getsockname()))\n else:\n host, port = sock.getsockname()\n uris.append(str(base_url.with_host(host).with_port(port)))\n return server_creations, uris\n\n\ndef run_app(app, *, host=None, port=None, path=None, sock=None,\n shutdown_timeout=60.0, ssl_context=None,\n print=print, backlog=128, access_log_format=None,\n access_log=access_logger, handle_signals=True, loop=None):\n \"\"\"Run an app locally\"\"\"\n user_supplied_loop = loop is not None\n if loop is None:\n loop = asyncio.get_event_loop()\n\n app._set_loop(loop)\n loop.run_until_complete(app.startup())\n\n try:\n make_handler_kwargs = dict()\n if access_log_format is not None:\n make_handler_kwargs['access_log_format'] = access_log_format\n handler = app.make_handler(loop=loop, access_log=access_log,\n **make_handler_kwargs)\n\n server_creations, uris = _make_server_creators(\n handler,\n loop=loop, ssl_context=ssl_context,\n host=host, port=port, path=path, sock=sock,\n backlog=backlog)\n servers = loop.run_until_complete(\n asyncio.gather(*server_creations, loop=loop)\n )\n\n if handle_signals:\n try:\n loop.add_signal_handler(signal.SIGINT, raise_graceful_exit)\n loop.add_signal_handler(signal.SIGTERM, raise_graceful_exit)\n except NotImplementedError: # pragma: no cover\n # add_signal_handler is not implemented on Windows\n pass\n\n try:\n print(\"======== Running on {} ========\\n\"\n \"(Press CTRL+C to quit)\".format(', '.join(uris)))\n loop.run_forever()\n except (GracefulExit, KeyboardInterrupt): # pragma: no cover\n pass\n finally:\n server_closures = []\n for srv in servers:\n srv.close()\n server_closures.append(srv.wait_closed())\n loop.run_until_complete(\n asyncio.gather(*server_closures, loop=loop))\n loop.run_until_complete(app.shutdown())\n loop.run_until_complete(handler.shutdown(shutdown_timeout))\n finally:\n loop.run_until_complete(app.cleanup())\n if not user_supplied_loop:\n loop.close()\n\n\ndef main(argv):\n arg_parser = ArgumentParser(\n description=\"aiohttp.web Application server\",\n prog=\"aiohttp.web\"\n )\n arg_parser.add_argument(\n \"entry_func\",\n help=(\"Callable returning the `aiohttp.web.Application` instance to \"\n \"run. Should be specified in the 'module:function' syntax.\"),\n metavar=\"entry-func\"\n )\n arg_parser.add_argument(\n \"-H\", \"--hostname\",\n help=\"TCP/IP hostname to serve on (default: %(default)r)\",\n default=\"localhost\"\n )\n arg_parser.add_argument(\n \"-P\", \"--port\",\n help=\"TCP/IP port to serve on (default: %(default)r)\",\n type=int,\n default=\"8080\"\n )\n arg_parser.add_argument(\n \"-U\", \"--path\",\n help=\"Unix file system path to serve on. Specifying a path will cause \"\n \"hostname and port arguments to be ignored.\",\n )\n args, extra_argv = arg_parser.parse_known_args(argv)\n\n # Import logic\n mod_str, _, func_str = args.entry_func.partition(\":\")\n if not func_str or not mod_str:\n arg_parser.error(\n \"'entry-func' not in 'module:function' syntax\"\n )\n if mod_str.startswith(\".\"):\n arg_parser.error(\"relative module names not supported\")\n try:\n module = import_module(mod_str)\n except ImportError as ex:\n arg_parser.error(\"unable to import %s: %s\" % (mod_str, ex))\n try:\n func = getattr(module, func_str)\n except AttributeError:\n arg_parser.error(\"module %r has no attribute %r\" % (mod_str, func_str))\n\n # Compatibility logic\n if args.path is not None and not hasattr(socket, 'AF_UNIX'):\n arg_parser.error(\"file system paths not supported by your operating\"\n \" environment\")\n\n app = func(extra_argv)\n run_app(app, host=args.hostname, port=args.port, path=args.path)\n arg_parser.exit(message=\"Stopped\\n\")\n\n\nif __name__ == \"__main__\": # pragma: no branch\n main(sys.argv[1:]) # pragma: no cover\n", "path": "aiohttp/web.py" } ]
diff --git a/CHANGES.rst b/CHANGES.rst index 71c8722c22c..2225241cb55 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -36,6 +36,14 @@ Changes - Fix BadStatusLine caused by extra `CRLF` after `POST` data #1792 +- + +- Deprecate undocumented app.on_loop_available signal #1978 + +- + +- + 2.1.0 (2017-05-26) ------------------ diff --git a/aiohttp/web.py b/aiohttp/web.py index 1dac37d7cb4..c4afe4620c8 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -199,6 +199,8 @@ def add_subapp(self, prefix, subapp): @property def on_loop_available(self): + warnings.warn("on_loop_available is deprecated and will be removed", + DeprecationWarning, stacklevel=2) return self._on_loop_available @property diff --git a/tests/test_web_application.py b/tests/test_web_application.py index 1633bfe1b50..daa19ef5df4 100644 --- a/tests/test_web_application.py +++ b/tests/test_web_application.py @@ -50,7 +50,8 @@ def test_on_loop_available(loop): app = web.Application() cb = mock.Mock() - app.on_loop_available.append(cb) + with pytest.warns(DeprecationWarning): + app.on_loop_available.append(cb) app._set_loop(loop) cb.assert_called_with(app)
saulpw__visidata-1310
[v2.9dev] Disable adding new row in DirSheet **Small description** Unless used, `add-row` should probably be disabled on DirSheet as it creates an error **Expected result** A warning to be shown to the user that a new row/file cannot be created. **Actual result with screenshot** ![image](https://user-images.githubusercontent.com/11225502/155373843-aeef6d8e-cc96-4559-b3e6-dc9690aab25f.png) **Steps to reproduce with sample data and a .vd** Open vd directory with vd (`vd .`) and press `a`
[ { "content": "import os\nimport shutil\nimport stat\nimport subprocess\nimport contextlib\ntry:\n import pwd\n import grp\nexcept ImportError:\n pass # pwd,grp modules not available on Windows\n\nfrom visidata import Column, Sheet, LazyComputeRow, asynccache, BaseSheet, vd\nfrom visidata import Path, ENTER, date, asyncthread, FileExistsError, VisiData\nfrom visidata import modtime, filesize, vstat, Progress, TextSheet\n\n\nvd.option('dir_recurse', False, 'walk source path recursively on DirSheet')\nvd.option('dir_hidden', False, 'load hidden files on DirSheet')\n\n\[email protected]_property\ndef currentDirSheet(p):\n 'Support opening the current DirSheet from the vdmenu'\n return DirSheet('.', source=Path('.'))\n\n@asyncthread\ndef exec_shell(*args):\n p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, err = p.communicate()\n if err or out:\n lines = err.decode('utf8').splitlines() + out.decode('utf8').splitlines()\n vd.push(TextSheet(' '.join(args), source=lines))\n\n\[email protected]\ndef open_dir(vd, p):\n return DirSheet(p.name, source=p)\n\[email protected]\ndef open_fdir(vd, p):\n return FileListSheet(p.name, source=p)\n\[email protected]\ndef addShellColumns(vd, cmd, sheet):\n shellcol = ColumnShell(cmd, source=sheet, width=0)\n sheet.addColumnAtCursor(\n shellcol,\n Column(cmd+'_stdout', srccol=shellcol, getter=lambda col,row: col.srccol.getValue(row)[0]),\n Column(cmd+'_stderr', srccol=shellcol, getter=lambda col,row: col.srccol.getValue(row)[1]))\n\n\nclass ColumnShell(Column):\n def __init__(self, name, cmd=None, **kwargs):\n super().__init__(name, **kwargs)\n self.expr = cmd or name\n\n @asynccache(lambda col,row: (col, col.sheet.rowid(row)))\n def calcValue(self, row):\n try:\n import shlex\n args = []\n context = LazyComputeRow(self.source, row)\n for arg in shlex.split(self.expr):\n if arg.startswith('$'):\n args.append(shlex.quote(str(context[arg[1:]])))\n else:\n args.append(arg)\n\n p = subprocess.Popen([os.getenv('SHELL', 'bash'), '-c', ' '.join(args)],\n stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n return p.communicate()\n except Exception as e:\n vd.exceptionCaught(e)\n\n\nclass DirSheet(Sheet):\n 'Sheet displaying directory, using ENTER to open a particular file. Edited fields are applied to the filesystem.'\n rowtype = 'files' # rowdef: Path\n defer = True\n columns = [\n Column('directory',\n getter=lambda col,row: str(row.parent) if str(row.parent) == '.' else str(row.parent) + '/',\n setter=lambda col,row,val: col.sheet.moveFile(row, val)),\n Column('filename',\n getter=lambda col,row: row.name + row.suffix,\n setter=lambda col,row,val: col.sheet.renameFile(row, val)),\n Column('abspath', width=0, type=str,\n getter=lambda col,row: row,\n setter=lambda col,row,val: os.rename(row, val)),\n Column('ext', getter=lambda col,row: row.is_dir() and '/' or row.ext),\n Column('size', type=int,\n getter=lambda col,row: filesize(row),\n setter=lambda col,row,val: os.truncate(row, int(val))),\n Column('modtime', type=date,\n getter=lambda col,row: modtime(row),\n setter=lambda col,row,val: os.utime(row, times=((row.stat().st_atime, float(val))))),\n Column('owner', width=0,\n getter=lambda col,row: pwd.getpwuid(row.stat().st_uid).pw_name,\n setter=lambda col,row,val: os.chown(row, pwd.getpwnam(val).pw_uid, -1)),\n Column('group', width=0,\n getter=lambda col,row: grp.getgrgid(row.stat().st_gid).gr_name,\n setter=lambda col,row,val: os.chown(row, -1, grp.getgrnam(val).pw_gid)),\n Column('mode', width=0,\n getter=lambda col,row: '{:o}'.format(row.stat().st_mode),\n setter=lambda col,row,val: os.chmod(row, int(val, 8))),\n Column('filetype', width=0, cache='async', getter=lambda col,row: subprocess.Popen(['file', '--brief', row], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].strip()),\n ]\n nKeys = 2\n _ordering = [('modtime', True)] # sort by reverse modtime initially\n\n @staticmethod\n def colorOwner(sheet, col, row, val):\n ret = ''\n if col.name == 'group':\n mode = row.stat().st_mode\n if mode & stat.S_IXGRP: ret = 'bold '\n if mode & stat.S_IWGRP: return ret + 'green'\n if mode & stat.S_IRGRP: return ret + 'yellow'\n elif col.name == 'owner':\n mode = row.stat().st_mode\n if mode & stat.S_IXUSR: ret = 'bold '\n if mode & stat.S_IWUSR: return ret + 'green'\n if mode & stat.S_IRUSR: return ret + 'yellow'\n\n def moveFile(self, row, newparent):\n parent = Path(newparent)\n newpath = Path(parent/(row.name + row.suffix))\n if parent.exists():\n if not parent.is_dir():\n vd.error('destination %s not a directory' % parent)\n else:\n with contextlib.suppress(FileExistsError):\n os.makedirs(parent)\n\n row.rename(newpath)\n row.given = newpath # modify visidata.Path\n self.restat()\n\n def renameFile(self, row, val):\n newpath = row.with_name(val)\n row.rename(newpath)\n row.given = newpath\n self.restat()\n\n def removeFile(self, path):\n if path.is_dir():\n os.rmdir(path)\n else:\n path.unlink()\n\n def deleteSourceRow(self, r):\n self.removeFile(r)\n\n def iterload(self):\n hidden_files = self.options.dir_hidden\n\n def _walkfiles(p):\n basepath = str(p)\n for folder, subdirs, files in os.walk(basepath):\n subfolder = folder[len(basepath)+1:]\n if not hidden_files and subfolder.startswith('.'): continue\n if subfolder in ['.', '..']: continue\n\n fpath = Path(folder)\n yield fpath\n\n for fn in files:\n yield fpath/fn\n\n def _listfiles(p):\n basepath = str(p)\n for fn in os.listdir(basepath):\n yield p/fn\n\n\n basepath = str(self.source)\n\n folders = set()\n f = _walkfiles if self.options.dir_recurse else _listfiles\n\n for p in f(self.source):\n if not hidden_files and p.name.startswith('.'):\n continue\n\n yield p\n\n def preloadHook(self):\n super().preloadHook()\n Path.stat.cache_clear()\n\n def restat(self):\n vstat.cache_clear()\n\n @asyncthread\n def putChanges(self):\n self.commitAdds()\n self.commitMods()\n self.commitDeletes()\n\n self._deferredDels.clear()\n self.reload()\n\n\nclass FileListSheet(DirSheet):\n _ordering = []\n def iterload(self):\n for fn in self.source.open_text():\n yield Path(fn.rstrip())\n\n\[email protected]\ndef inputShell(vd):\n cmd = vd.input(\"sh$ \", type=\"sh\")\n if '$' not in cmd:\n vd.warning('no $column in command')\n return cmd\n\nBaseSheet.addCommand('', 'open-dir-current', 'vd.push(vd.currentDirSheet)', 'open Directory Sheet: browse properties of files in current directory')\n\nSheet.addCommand('z;', 'addcol-sh', 'cmd=inputShell(); addShellColumns(cmd, sheet)', 'create new column from bash expression, with $columnNames as variables')\n\nDirSheet.addCommand(ENTER, 'open-row', 'vd.push(openSource(cursorRow or fail(\"no row\"), filetype=\"dir\" if cursorRow.is_dir() else LazyComputeRow(sheet, cursorRow).ext))', 'open current file as a new sheet')\nDirSheet.addCommand('g'+ENTER, 'open-rows', 'for r in selectedRows: vd.push(openSource(r))', 'open selected files as new sheets')\nDirSheet.addCommand('^O', 'sysopen-row', 'launchEditor(cursorRow)', 'open current file in external $EDITOR')\nDirSheet.addCommand('g^O', 'sysopen-rows', 'launchEditor(*selectedRows)', 'open selected files in external $EDITOR')\n\nDirSheet.addCommand('y', 'copy-row', 'copy_files([cursorRow], inputPath(\"copy to dest: \"))', 'copy file to given directory')\nDirSheet.addCommand('gy', 'copy-selected', 'copy_files(selectedRows, inputPath(\"copy to dest: \", value=cursorRow.given))', 'copy selected files to given directory')\n\[email protected]\n@asyncthread\ndef copy_files(sheet, paths, dest):\n destdir = Path(dest)\n destdir.is_dir() or vd.fail('target must be directory')\n vd.status('copying %s %s to %s' % (len(paths), sheet.rowtype, destdir))\n os.makedirs(destdir, exist_ok=True)\n for srcpath in Progress(paths, gerund='copying'):\n try:\n destpath = destdir/str(srcpath._path.name)\n if srcpath.is_dir():\n shutil.copy_tree(srcpath, destpath)\n else:\n shutil.copyfile(srcpath, destpath)\n except Exception as e:\n vd.exceptionCaught(e)\n\n\nvd.addGlobals({\n 'DirSheet': DirSheet\n})\n", "path": "visidata/shell.py" } ]
[ { "content": "import os\nimport shutil\nimport stat\nimport subprocess\nimport contextlib\ntry:\n import pwd\n import grp\nexcept ImportError:\n pass # pwd,grp modules not available on Windows\n\nfrom visidata import Column, Sheet, LazyComputeRow, asynccache, BaseSheet, vd\nfrom visidata import Path, ENTER, date, asyncthread, FileExistsError, VisiData\nfrom visidata import modtime, filesize, vstat, Progress, TextSheet\n\n\nvd.option('dir_recurse', False, 'walk source path recursively on DirSheet')\nvd.option('dir_hidden', False, 'load hidden files on DirSheet')\n\n\[email protected]_property\ndef currentDirSheet(p):\n 'Support opening the current DirSheet from the vdmenu'\n return DirSheet('.', source=Path('.'))\n\n@asyncthread\ndef exec_shell(*args):\n p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n out, err = p.communicate()\n if err or out:\n lines = err.decode('utf8').splitlines() + out.decode('utf8').splitlines()\n vd.push(TextSheet(' '.join(args), source=lines))\n\n\[email protected]\ndef open_dir(vd, p):\n return DirSheet(p.name, source=p)\n\[email protected]\ndef open_fdir(vd, p):\n return FileListSheet(p.name, source=p)\n\[email protected]\ndef addShellColumns(vd, cmd, sheet):\n shellcol = ColumnShell(cmd, source=sheet, width=0)\n sheet.addColumnAtCursor(\n shellcol,\n Column(cmd+'_stdout', srccol=shellcol, getter=lambda col,row: col.srccol.getValue(row)[0]),\n Column(cmd+'_stderr', srccol=shellcol, getter=lambda col,row: col.srccol.getValue(row)[1]))\n\n\nclass ColumnShell(Column):\n def __init__(self, name, cmd=None, **kwargs):\n super().__init__(name, **kwargs)\n self.expr = cmd or name\n\n @asynccache(lambda col,row: (col, col.sheet.rowid(row)))\n def calcValue(self, row):\n try:\n import shlex\n args = []\n context = LazyComputeRow(self.source, row)\n for arg in shlex.split(self.expr):\n if arg.startswith('$'):\n args.append(shlex.quote(str(context[arg[1:]])))\n else:\n args.append(arg)\n\n p = subprocess.Popen([os.getenv('SHELL', 'bash'), '-c', ' '.join(args)],\n stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n return p.communicate()\n except Exception as e:\n vd.exceptionCaught(e)\n\n\nclass DirSheet(Sheet):\n 'Sheet displaying directory, using ENTER to open a particular file. Edited fields are applied to the filesystem.'\n rowtype = 'files' # rowdef: Path\n defer = True\n columns = [\n Column('directory',\n getter=lambda col,row: str(row.parent) if str(row.parent) == '.' else str(row.parent) + '/',\n setter=lambda col,row,val: col.sheet.moveFile(row, val)),\n Column('filename',\n getter=lambda col,row: row.name + row.suffix,\n setter=lambda col,row,val: col.sheet.renameFile(row, val)),\n Column('abspath', width=0, type=str,\n getter=lambda col,row: row,\n setter=lambda col,row,val: os.rename(row, val)),\n Column('ext', getter=lambda col,row: row.is_dir() and '/' or row.ext),\n Column('size', type=int,\n getter=lambda col,row: filesize(row),\n setter=lambda col,row,val: os.truncate(row, int(val))),\n Column('modtime', type=date,\n getter=lambda col,row: modtime(row),\n setter=lambda col,row,val: os.utime(row, times=((row.stat().st_atime, float(val))))),\n Column('owner', width=0,\n getter=lambda col,row: pwd.getpwuid(row.stat().st_uid).pw_name,\n setter=lambda col,row,val: os.chown(row, pwd.getpwnam(val).pw_uid, -1)),\n Column('group', width=0,\n getter=lambda col,row: grp.getgrgid(row.stat().st_gid).gr_name,\n setter=lambda col,row,val: os.chown(row, -1, grp.getgrnam(val).pw_gid)),\n Column('mode', width=0,\n getter=lambda col,row: '{:o}'.format(row.stat().st_mode),\n setter=lambda col,row,val: os.chmod(row, int(val, 8))),\n Column('filetype', width=0, cache='async', getter=lambda col,row: subprocess.Popen(['file', '--brief', row], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].strip()),\n ]\n nKeys = 2\n _ordering = [('modtime', True)] # sort by reverse modtime initially\n\n @staticmethod\n def colorOwner(sheet, col, row, val):\n ret = ''\n if col.name == 'group':\n mode = row.stat().st_mode\n if mode & stat.S_IXGRP: ret = 'bold '\n if mode & stat.S_IWGRP: return ret + 'green'\n if mode & stat.S_IRGRP: return ret + 'yellow'\n elif col.name == 'owner':\n mode = row.stat().st_mode\n if mode & stat.S_IXUSR: ret = 'bold '\n if mode & stat.S_IWUSR: return ret + 'green'\n if mode & stat.S_IRUSR: return ret + 'yellow'\n\n def moveFile(self, row, newparent):\n parent = Path(newparent)\n newpath = Path(parent/(row.name + row.suffix))\n if parent.exists():\n if not parent.is_dir():\n vd.error('destination %s not a directory' % parent)\n else:\n with contextlib.suppress(FileExistsError):\n os.makedirs(parent)\n\n row.rename(newpath)\n row.given = newpath # modify visidata.Path\n self.restat()\n\n def renameFile(self, row, val):\n newpath = row.with_name(val)\n row.rename(newpath)\n row.given = newpath\n self.restat()\n\n def removeFile(self, path):\n if path.is_dir():\n os.rmdir(path)\n else:\n path.unlink()\n\n def deleteSourceRow(self, r):\n self.removeFile(r)\n\n def newRow(self):\n vd.fail('new file not supported')\n\n def iterload(self):\n hidden_files = self.options.dir_hidden\n\n def _walkfiles(p):\n basepath = str(p)\n for folder, subdirs, files in os.walk(basepath):\n subfolder = folder[len(basepath)+1:]\n if not hidden_files and subfolder.startswith('.'): continue\n if subfolder in ['.', '..']: continue\n\n fpath = Path(folder)\n yield fpath\n\n for fn in files:\n yield fpath/fn\n\n def _listfiles(p):\n basepath = str(p)\n for fn in os.listdir(basepath):\n yield p/fn\n\n\n basepath = str(self.source)\n\n folders = set()\n f = _walkfiles if self.options.dir_recurse else _listfiles\n\n for p in f(self.source):\n if not hidden_files and p.name.startswith('.'):\n continue\n\n yield p\n\n def preloadHook(self):\n super().preloadHook()\n Path.stat.cache_clear()\n\n def restat(self):\n vstat.cache_clear()\n\n @asyncthread\n def putChanges(self):\n self.commitAdds()\n self.commitMods()\n self.commitDeletes()\n\n self._deferredDels.clear()\n self.reload()\n\n\nclass FileListSheet(DirSheet):\n _ordering = []\n def iterload(self):\n for fn in self.source.open_text():\n yield Path(fn.rstrip())\n\n\[email protected]\ndef inputShell(vd):\n cmd = vd.input(\"sh$ \", type=\"sh\")\n if '$' not in cmd:\n vd.warning('no $column in command')\n return cmd\n\nBaseSheet.addCommand('', 'open-dir-current', 'vd.push(vd.currentDirSheet)', 'open Directory Sheet: browse properties of files in current directory')\n\nSheet.addCommand('z;', 'addcol-sh', 'cmd=inputShell(); addShellColumns(cmd, sheet)', 'create new column from bash expression, with $columnNames as variables')\n\nDirSheet.addCommand(ENTER, 'open-row', 'vd.push(openSource(cursorRow or fail(\"no row\"), filetype=\"dir\" if cursorRow.is_dir() else LazyComputeRow(sheet, cursorRow).ext))', 'open current file as a new sheet')\nDirSheet.addCommand('g'+ENTER, 'open-rows', 'for r in selectedRows: vd.push(openSource(r))', 'open selected files as new sheets')\nDirSheet.addCommand('^O', 'sysopen-row', 'launchEditor(cursorRow)', 'open current file in external $EDITOR')\nDirSheet.addCommand('g^O', 'sysopen-rows', 'launchEditor(*selectedRows)', 'open selected files in external $EDITOR')\n\nDirSheet.addCommand('y', 'copy-row', 'copy_files([cursorRow], inputPath(\"copy to dest: \"))', 'copy file to given directory')\nDirSheet.addCommand('gy', 'copy-selected', 'copy_files(selectedRows, inputPath(\"copy to dest: \", value=cursorRow.given))', 'copy selected files to given directory')\n\[email protected]\n@asyncthread\ndef copy_files(sheet, paths, dest):\n destdir = Path(dest)\n destdir.is_dir() or vd.fail('target must be directory')\n vd.status('copying %s %s to %s' % (len(paths), sheet.rowtype, destdir))\n os.makedirs(destdir, exist_ok=True)\n for srcpath in Progress(paths, gerund='copying'):\n try:\n destpath = destdir/str(srcpath._path.name)\n if srcpath.is_dir():\n shutil.copy_tree(srcpath, destpath)\n else:\n shutil.copyfile(srcpath, destpath)\n except Exception as e:\n vd.exceptionCaught(e)\n\n\nvd.addGlobals({\n 'DirSheet': DirSheet\n})\n", "path": "visidata/shell.py" } ]
diff --git a/visidata/shell.py b/visidata/shell.py index b60be51d1..ce43a6d55 100644 --- a/visidata/shell.py +++ b/visidata/shell.py @@ -151,6 +151,9 @@ def removeFile(self, path): def deleteSourceRow(self, r): self.removeFile(r) + def newRow(self): + vd.fail('new file not supported') + def iterload(self): hidden_files = self.options.dir_hidden
holoviz__panel-1775
Pyvista tests breaking Looks like latest pyvista 0.27.2 changed some internal APIs: ```python def pyvista_render_window(): """ Allow to download and create a more complex example easily """ from pyvista import examples sphere = pv.Sphere() #test actor globe = examples.load_globe() #test texture head = examples.download_head() #test volume uniform = examples.load_uniform() #test structured grid scalars=sphere.points[:, 2] > sphere._add_point_array(scalars, 'test', set_active=True) #allow to test scalars E AttributeError: 'PolyData' object has no attribute '_add_point_array' ``` Cc @xavArtley
[ { "content": "#!/usr/bin/env python\n\nimport os\nimport shutil\nimport sys\nimport json\n\nfrom setuptools import setup, find_packages\nfrom setuptools.command.develop import develop\nfrom setuptools.command.install import install\nfrom setuptools.command.sdist import sdist\n\nimport pyct.build\n\n\ndef get_setup_version(reponame):\n \"\"\"\n Helper to get the current version from either git describe or the\n .version file (if available).\n \"\"\"\n basepath = os.path.split(__file__)[0]\n version_file_path = os.path.join(basepath, reponame, '.version')\n try:\n from param import version\n except Exception:\n version = None\n if version is not None:\n return version.Version.setup_version(basepath, reponame, archive_commit=\"$Format:%h$\")\n else:\n print(\"WARNING: param>=1.6.0 unavailable. If you are installing a package, \"\n \"this warning can safely be ignored. If you are creating a package or \"\n \"otherwise operating in a git repository, you should install param>=1.6.0.\")\n return json.load(open(version_file_path, 'r'))['version_string']\n\n\ndef _build_paneljs():\n from bokeh.ext import build\n from panel.compiler import bundle_resources\n print(\"Building custom models:\")\n panel_dir = os.path.join(os.path.dirname(__file__), \"panel\")\n build(panel_dir)\n print(\"Bundling custom model resources:\")\n bundle_resources()\n\n\nclass CustomDevelopCommand(develop):\n \"\"\"Custom installation for development mode.\"\"\"\n\n def run(self):\n _build_paneljs()\n develop.run(self)\n\n\nclass CustomInstallCommand(install):\n \"\"\"Custom installation for install mode.\"\"\"\n\n def run(self):\n _build_paneljs()\n install.run(self)\n\n\nclass CustomSdistCommand(sdist):\n \"\"\"Custom installation for sdist mode.\"\"\"\n\n def run(self):\n _build_paneljs()\n sdist.run(self)\n\n\n_COMMANDS = {\n 'develop': CustomDevelopCommand,\n 'install': CustomInstallCommand,\n 'sdist': CustomSdistCommand,\n}\n\ntry:\n from wheel.bdist_wheel import bdist_wheel\n\n class CustomBdistWheelCommand(bdist_wheel):\n \"\"\"Custom bdist_wheel command to force cancelling qiskit-terra wheel\n creation.\"\"\"\n\n def run(self):\n \"\"\"Do nothing so the command intentionally fails.\"\"\"\n _build_paneljs()\n bdist_wheel.run(self)\n\n _COMMANDS['bdist_wheel'] = CustomBdistWheelCommand\nexcept Exception:\n pass\n\n########## dependencies ##########\n\ninstall_requires = [\n 'bokeh >=2.2.2',\n 'param >=1.9.3',\n 'pyviz_comms >=0.7.4',\n 'markdown',\n 'requests',\n 'tqdm',\n 'pyct >=0.4.4'\n]\n\n_recommended = [\n 'notebook >=5.4',\n 'holoviews >=1.13.2',\n 'matplotlib',\n 'pillow',\n 'plotly'\n]\n\n_tests = [\n 'flake8',\n 'parameterized',\n 'pytest',\n 'scipy',\n 'nbsmoke >=0.2.0',\n 'pytest-cov',\n 'codecov',\n 'folium',\n 'ipympl',\n 'twine',\n 'pandas',\n 'ipython >=7.0'\n]\n\nextras_require = {\n 'examples': [\n 'hvplot',\n 'plotly',\n 'altair',\n 'streamz',\n 'vega_datasets',\n 'vtk',\n 'scikit-learn',\n 'datashader',\n 'jupyter_bokeh',\n 'django',\n 'channels',\n 'pyvista <0.27', # temporary fix for tests \n 'ipywidgets',\n 'ipywidgets_bokeh',\n 'ipyvolume',\n 'ipyleaflet'\n ],\n 'tests': _tests,\n 'recommended': _recommended,\n 'doc': _recommended + [\n 'nbsite >=0.6.1',\n 'nbconvert <6.0',\n 'sphinx_holoviz_theme',\n 'selenium',\n 'phantomjs',\n 'graphviz',\n 'lxml',\n ]\n}\n\nextras_require['all'] = sorted(set(sum(extras_require.values(), [])))\n\n# Superset of what's in pyproject.toml (includes non-python\n# dependencies). Also, pyproject.toml isn't supported by all tools\n# anyway (e.g. older versions of pip, or conda - which also supports\n# non-python dependencies). Note that setup_requires isn't used\n# because it doesn't work well with pip.\nextras_require['build'] = [\n 'param >=1.9.2',\n 'pyct >=0.4.4',\n 'setuptools >=30.3.0',\n 'bokeh >=2.0.0',\n 'pyviz_comms >=0.6.0',\n # non-python dependency\n 'nodejs >=10.13.0',\n]\n\nsetup_args = dict(\n name='panel',\n version=get_setup_version(\"panel\"),\n description='A high level app and dashboarding solution for Python.',\n long_description=open('README.md').read() if os.path.isfile('README.md') else 'Consult README.md',\n long_description_content_type=\"text/markdown\",\n author=\"HoloViz\",\n author_email=\"[email protected]\",\n maintainer=\"HoloViz\",\n maintainer_email=\"[email protected]\",\n platforms=['Windows', 'Mac OS X', 'Linux'],\n license='BSD',\n url='http://panel.holoviz.org',\n cmdclass=_COMMANDS,\n packages=find_packages(),\n include_package_data=True,\n classifiers=[\n \"License :: OSI Approved :: BSD License\",\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Financial and Insurance Industry\",\n \"Intended Audience :: Healthcare Industry\",\n \"Intended Audience :: Information Technology\",\n \"Intended Audience :: Legal Industry\",\n \"Intended Audience :: Other Audience\",\n \"Intended Audience :: Science/Research\",\n \"Natural Language :: English\",\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Scientific/Engineering :: Visualization\",\n \"Topic :: Scientific/Engineering :: Information Analysis\",\n \"Topic :: Office/Business\",\n \"Topic :: Office/Business :: Financial\",\n \"Topic :: Software Development :: Libraries\"],\n python_requires=\">=3.6\",\n entry_points={\n 'console_scripts': [\n 'panel = panel.command:main'\n ]\n },\n install_requires=install_requires,\n extras_require=extras_require,\n tests_require=extras_require['tests']\n)\n\nif __name__ == \"__main__\":\n example_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'panel', 'examples')\n\n if 'develop' not in sys.argv and 'egg_info' not in sys.argv:\n pyct.build.examples(example_path, __file__, force=True)\n\n version = setup_args['version']\n if 'post' not in version:\n with open('./panel/package.json') as f:\n package_json = json.load(f)\n js_version = package_json['version']\n if version != 'None' and version.split('+')[0] != js_version.replace('-', ''):\n raise ValueError(\"panel.js version (%s) does not match \"\n \"panel version (%s). Cannot build release.\"\n % (js_version, version))\n\n setup(**setup_args)\n\n if os.path.isdir(example_path):\n shutil.rmtree(example_path)\n", "path": "setup.py" } ]
[ { "content": "#!/usr/bin/env python\n\nimport os\nimport shutil\nimport sys\nimport json\n\nfrom setuptools import setup, find_packages\nfrom setuptools.command.develop import develop\nfrom setuptools.command.install import install\nfrom setuptools.command.sdist import sdist\n\nimport pyct.build\n\n\ndef get_setup_version(reponame):\n \"\"\"\n Helper to get the current version from either git describe or the\n .version file (if available).\n \"\"\"\n basepath = os.path.split(__file__)[0]\n version_file_path = os.path.join(basepath, reponame, '.version')\n try:\n from param import version\n except Exception:\n version = None\n if version is not None:\n return version.Version.setup_version(basepath, reponame, archive_commit=\"$Format:%h$\")\n else:\n print(\"WARNING: param>=1.6.0 unavailable. If you are installing a package, \"\n \"this warning can safely be ignored. If you are creating a package or \"\n \"otherwise operating in a git repository, you should install param>=1.6.0.\")\n return json.load(open(version_file_path, 'r'))['version_string']\n\n\ndef _build_paneljs():\n from bokeh.ext import build\n from panel.compiler import bundle_resources\n print(\"Building custom models:\")\n panel_dir = os.path.join(os.path.dirname(__file__), \"panel\")\n build(panel_dir)\n print(\"Bundling custom model resources:\")\n bundle_resources()\n\n\nclass CustomDevelopCommand(develop):\n \"\"\"Custom installation for development mode.\"\"\"\n\n def run(self):\n _build_paneljs()\n develop.run(self)\n\n\nclass CustomInstallCommand(install):\n \"\"\"Custom installation for install mode.\"\"\"\n\n def run(self):\n _build_paneljs()\n install.run(self)\n\n\nclass CustomSdistCommand(sdist):\n \"\"\"Custom installation for sdist mode.\"\"\"\n\n def run(self):\n _build_paneljs()\n sdist.run(self)\n\n\n_COMMANDS = {\n 'develop': CustomDevelopCommand,\n 'install': CustomInstallCommand,\n 'sdist': CustomSdistCommand,\n}\n\ntry:\n from wheel.bdist_wheel import bdist_wheel\n\n class CustomBdistWheelCommand(bdist_wheel):\n \"\"\"Custom bdist_wheel command to force cancelling qiskit-terra wheel\n creation.\"\"\"\n\n def run(self):\n \"\"\"Do nothing so the command intentionally fails.\"\"\"\n _build_paneljs()\n bdist_wheel.run(self)\n\n _COMMANDS['bdist_wheel'] = CustomBdistWheelCommand\nexcept Exception:\n pass\n\n########## dependencies ##########\n\ninstall_requires = [\n 'bokeh >=2.2.2',\n 'param >=1.9.3',\n 'pyviz_comms >=0.7.4',\n 'markdown',\n 'requests',\n 'tqdm',\n 'pyct >=0.4.4'\n]\n\n_recommended = [\n 'notebook >=5.4',\n 'holoviews >=1.13.2',\n 'matplotlib',\n 'pillow',\n 'plotly'\n]\n\n_tests = [\n 'flake8',\n 'parameterized',\n 'pytest',\n 'scipy',\n 'nbsmoke >=0.2.0',\n 'pytest-cov',\n 'codecov',\n 'folium',\n 'ipympl',\n 'twine',\n 'pandas',\n 'ipython >=7.0'\n]\n\nextras_require = {\n 'examples': [\n 'hvplot',\n 'plotly',\n 'altair',\n 'streamz',\n 'vega_datasets',\n 'vtk',\n 'scikit-learn',\n 'datashader',\n 'jupyter_bokeh',\n 'django',\n 'channels',\n 'pyvista',\n 'ipywidgets',\n 'ipywidgets_bokeh',\n 'ipyvolume',\n 'ipyleaflet'\n ],\n 'tests': _tests,\n 'recommended': _recommended,\n 'doc': _recommended + [\n 'nbsite >=0.6.1',\n 'nbconvert <6.0',\n 'sphinx_holoviz_theme',\n 'selenium',\n 'phantomjs',\n 'graphviz',\n 'lxml',\n ]\n}\n\nextras_require['all'] = sorted(set(sum(extras_require.values(), [])))\n\n# Superset of what's in pyproject.toml (includes non-python\n# dependencies). Also, pyproject.toml isn't supported by all tools\n# anyway (e.g. older versions of pip, or conda - which also supports\n# non-python dependencies). Note that setup_requires isn't used\n# because it doesn't work well with pip.\nextras_require['build'] = [\n 'param >=1.9.2',\n 'pyct >=0.4.4',\n 'setuptools >=30.3.0',\n 'bokeh >=2.0.0',\n 'pyviz_comms >=0.6.0',\n # non-python dependency\n 'nodejs >=10.13.0',\n]\n\nsetup_args = dict(\n name='panel',\n version=get_setup_version(\"panel\"),\n description='A high level app and dashboarding solution for Python.',\n long_description=open('README.md').read() if os.path.isfile('README.md') else 'Consult README.md',\n long_description_content_type=\"text/markdown\",\n author=\"HoloViz\",\n author_email=\"[email protected]\",\n maintainer=\"HoloViz\",\n maintainer_email=\"[email protected]\",\n platforms=['Windows', 'Mac OS X', 'Linux'],\n license='BSD',\n url='http://panel.holoviz.org',\n cmdclass=_COMMANDS,\n packages=find_packages(),\n include_package_data=True,\n classifiers=[\n \"License :: OSI Approved :: BSD License\",\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Financial and Insurance Industry\",\n \"Intended Audience :: Healthcare Industry\",\n \"Intended Audience :: Information Technology\",\n \"Intended Audience :: Legal Industry\",\n \"Intended Audience :: Other Audience\",\n \"Intended Audience :: Science/Research\",\n \"Natural Language :: English\",\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Scientific/Engineering :: Visualization\",\n \"Topic :: Scientific/Engineering :: Information Analysis\",\n \"Topic :: Office/Business\",\n \"Topic :: Office/Business :: Financial\",\n \"Topic :: Software Development :: Libraries\"],\n python_requires=\">=3.6\",\n entry_points={\n 'console_scripts': [\n 'panel = panel.command:main'\n ]\n },\n install_requires=install_requires,\n extras_require=extras_require,\n tests_require=extras_require['tests']\n)\n\nif __name__ == \"__main__\":\n example_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'panel', 'examples')\n\n if 'develop' not in sys.argv and 'egg_info' not in sys.argv:\n pyct.build.examples(example_path, __file__, force=True)\n\n version = setup_args['version']\n if 'post' not in version:\n with open('./panel/package.json') as f:\n package_json = json.load(f)\n js_version = package_json['version']\n if version != 'None' and version.split('+')[0] != js_version.replace('-', ''):\n raise ValueError(\"panel.js version (%s) does not match \"\n \"panel version (%s). Cannot build release.\"\n % (js_version, version))\n\n setup(**setup_args)\n\n if os.path.isdir(example_path):\n shutil.rmtree(example_path)\n", "path": "setup.py" } ]
diff --git a/panel/tests/pane/test_vtk.py b/panel/tests/pane/test_vtk.py index 6b65da9fdf..0fffbfd495 100644 --- a/panel/tests/pane/test_vtk.py +++ b/panel/tests/pane/test_vtk.py @@ -68,7 +68,8 @@ def pyvista_render_window(): uniform = examples.load_uniform() #test structured grid scalars=sphere.points[:, 2] - sphere._add_point_array(scalars, 'test', set_active=True) #allow to test scalars + sphere.point_arrays['test'] = scalars #allow to test scalars + sphere.set_active_scalars('test') uniform.set_active_scalars("Spatial Cell Data") diff --git a/setup.py b/setup.py index d2710b496d..cef4cf8f46 100644 --- a/setup.py +++ b/setup.py @@ -137,7 +137,7 @@ def run(self): 'jupyter_bokeh', 'django', 'channels', - 'pyvista <0.27', # temporary fix for tests + 'pyvista', 'ipywidgets', 'ipywidgets_bokeh', 'ipyvolume',
cloud-custodian__cloud-custodian-5544
aws - add usgs additional partitions iso and isob are currently missing, its unclear if boto3 has support for them out of the box, golang and nodejs sdks do.
[ { "content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport copy\nimport csv\nfrom datetime import datetime, timedelta\nimport json\nimport itertools\nimport ipaddress\nimport logging\nimport os\nimport random\nimport re\nimport sys\nimport threading\nimport time\n\nimport six\nfrom six.moves.urllib import parse as urlparse\nfrom six.moves.urllib.request import getproxies\n\nfrom c7n import config\nfrom c7n.exceptions import ClientError, PolicyValidationError\n\n# Try to play nice in a serverless environment, where we don't require yaml\n\ntry:\n import yaml\nexcept ImportError: # pragma: no cover\n SafeLoader = BaseSafeDumper = yaml = None\nelse:\n try:\n from yaml import CSafeLoader as SafeLoader, CSafeDumper as BaseSafeDumper\n except ImportError: # pragma: no cover\n from yaml import SafeLoader, SafeDumper as BaseSafeDumper\n\n\nclass SafeDumper(BaseSafeDumper or object):\n def ignore_aliases(self, data):\n return True\n\n\nlog = logging.getLogger('custodian.utils')\n\n\nclass UnicodeWriter:\n \"\"\"utf8 encoding csv writer.\"\"\"\n\n def __init__(self, f, dialect=csv.excel, **kwds):\n self.writer = csv.writer(f, dialect=dialect, **kwds)\n if sys.version_info.major == 3:\n self.writerows = self.writer.writerows\n self.writerow = self.writer.writerow\n\n def writerow(self, row):\n self.writer.writerow([s.encode(\"utf-8\") for s in row])\n\n def writerows(self, rows):\n for row in rows:\n self.writerow(row)\n\n\nclass VarsSubstitutionError(Exception):\n pass\n\n\ndef load_file(path, format=None, vars=None):\n if format is None:\n format = 'yaml'\n _, ext = os.path.splitext(path)\n if ext[1:] == 'json':\n format = 'json'\n\n with open(path) as fh:\n contents = fh.read()\n\n if vars:\n try:\n contents = contents.format(**vars)\n except IndexError:\n msg = 'Failed to substitute variable by positional argument.'\n raise VarsSubstitutionError(msg)\n except KeyError as e:\n msg = 'Failed to substitute variables. KeyError on {}'.format(str(e))\n raise VarsSubstitutionError(msg)\n\n if format == 'yaml':\n return yaml_load(contents)\n elif format == 'json':\n return loads(contents)\n\n\ndef yaml_load(value):\n if yaml is None:\n raise RuntimeError(\"Yaml not available\")\n return yaml.load(value, Loader=SafeLoader)\n\n\ndef yaml_dump(value):\n if yaml is None:\n raise RuntimeError(\"Yaml not available\")\n return yaml.dump(value, default_flow_style=False, Dumper=SafeDumper)\n\n\ndef loads(body):\n return json.loads(body)\n\n\ndef dumps(data, fh=None, indent=0):\n if fh:\n return json.dump(data, fh, cls=DateTimeEncoder, indent=indent)\n else:\n return json.dumps(data, cls=DateTimeEncoder, indent=indent)\n\n\ndef format_event(evt):\n return json.dumps(evt, indent=2)\n\n\ndef filter_empty(d):\n for k, v in list(d.items()):\n if not v:\n del d[k]\n return d\n\n\ndef type_schema(\n type_name, inherits=None, rinherit=None,\n aliases=None, required=None, **props):\n \"\"\"jsonschema generation helper\n\n params:\n - type_name: name of the type\n - inherits: list of document fragments that are required via anyOf[$ref]\n - rinherit: use another schema as a base for this, basically work around\n inherits issues with additionalProperties and type enums.\n - aliases: additional names this type maybe called\n - required: list of required properties, by default 'type' is required\n - props: additional key value properties\n \"\"\"\n if aliases:\n type_names = [type_name]\n type_names.extend(aliases)\n else:\n type_names = [type_name]\n\n if rinherit:\n s = copy.deepcopy(rinherit)\n s['properties']['type'] = {'enum': type_names}\n else:\n s = {\n 'type': 'object',\n 'properties': {\n 'type': {'enum': type_names}}}\n\n # Ref based inheritance and additional properties don't mix well.\n # https://stackoverflow.com/questions/22689900/json-schema-allof-with-additionalproperties\n if not inherits:\n s['additionalProperties'] = False\n\n s['properties'].update(props)\n if not required:\n required = []\n if isinstance(required, list):\n required.append('type')\n s['required'] = required\n if inherits:\n extended = s\n s = {'allOf': [{'$ref': i} for i in inherits]}\n s['allOf'].append(extended)\n return s\n\n\nclass DateTimeEncoder(json.JSONEncoder):\n\n def default(self, obj):\n if isinstance(obj, datetime):\n return obj.isoformat()\n return json.JSONEncoder.default(self, obj)\n\n\ndef group_by(resources, key):\n \"\"\"Return a mapping of key value to resources with the corresponding value.\n\n Key may be specified as dotted form for nested dictionary lookup\n \"\"\"\n resource_map = {}\n parts = key.split('.')\n for r in resources:\n v = r\n for k in parts:\n v = v.get(k)\n if not isinstance(v, dict):\n break\n resource_map.setdefault(v, []).append(r)\n return resource_map\n\n\ndef chunks(iterable, size=50):\n \"\"\"Break an iterable into lists of size\"\"\"\n batch = []\n for n in iterable:\n batch.append(n)\n if len(batch) % size == 0:\n yield batch\n batch = []\n if batch:\n yield batch\n\n\ndef camelResource(obj):\n \"\"\"Some sources from apis return lowerCased where as describe calls\n\n always return TitleCase, this function turns the former to the later\n \"\"\"\n if not isinstance(obj, dict):\n return obj\n for k in list(obj.keys()):\n v = obj.pop(k)\n obj[\"%s%s\" % (k[0].upper(), k[1:])] = v\n if isinstance(v, dict):\n camelResource(v)\n elif isinstance(v, list):\n list(map(camelResource, v))\n return obj\n\n\ndef get_account_id_from_sts(session):\n response = session.client('sts').get_caller_identity()\n return response.get('Account')\n\n\ndef get_account_alias_from_sts(session):\n response = session.client('iam').list_account_aliases()\n aliases = response.get('AccountAliases', ())\n return aliases and aliases[0] or ''\n\n\ndef query_instances(session, client=None, **query):\n \"\"\"Return a list of ec2 instances for the query.\n \"\"\"\n if client is None:\n client = session.client('ec2')\n p = client.get_paginator('describe_instances')\n results = p.paginate(**query)\n return list(itertools.chain(\n *[r[\"Instances\"] for r in itertools.chain(\n *[pp['Reservations'] for pp in results])]))\n\n\nCONN_CACHE = threading.local()\n\n\ndef local_session(factory):\n \"\"\"Cache a session thread local for up to 45m\"\"\"\n factory_region = getattr(factory, 'region', 'global')\n s = getattr(CONN_CACHE, factory_region, {}).get('session')\n t = getattr(CONN_CACHE, factory_region, {}).get('time')\n\n n = time.time()\n if s is not None and t + (60 * 45) > n:\n return s\n s = factory()\n\n setattr(CONN_CACHE, factory_region, {'session': s, 'time': n})\n return s\n\n\ndef reset_session_cache():\n for k in [k for k in dir(CONN_CACHE) if not k.startswith('_')]:\n setattr(CONN_CACHE, k, {})\n\n\ndef annotation(i, k):\n return i.get(k, ())\n\n\ndef set_annotation(i, k, v):\n \"\"\"\n >>> x = {}\n >>> set_annotation(x, 'marker', 'a')\n >>> annotation(x, 'marker')\n ['a']\n \"\"\"\n if not isinstance(i, dict):\n raise ValueError(\"Can only annotate dictionaries\")\n\n if not isinstance(v, list):\n v = [v]\n\n if k in i:\n ev = i.get(k)\n if isinstance(ev, list):\n ev.extend(v)\n else:\n i[k] = v\n\n\ndef parse_s3(s3_path):\n if not s3_path.startswith('s3://'):\n raise ValueError(\"invalid s3 path\")\n ridx = s3_path.find('/', 5)\n if ridx == -1:\n ridx = None\n bucket = s3_path[5:ridx]\n s3_path = s3_path.rstrip('/')\n if ridx is None:\n key_prefix = \"\"\n else:\n key_prefix = s3_path[s3_path.find('/', 5):]\n return s3_path, bucket, key_prefix\n\n\nREGION_PARTITION_MAP = {\n 'us-gov-east-1': 'aws-us-gov',\n 'us-gov-west-1': 'aws-us-gov',\n 'cn-north-1': 'aws-cn',\n 'cn-northwest-1': 'aws-cn'\n}\n\n\ndef get_partition(region):\n return REGION_PARTITION_MAP.get(region, 'aws')\n\n\ndef generate_arn(\n service, resource, partition='aws',\n region=None, account_id=None, resource_type=None, separator='/'):\n \"\"\"Generate an Amazon Resource Name.\n See http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html.\n \"\"\"\n if region and region in REGION_PARTITION_MAP:\n partition = REGION_PARTITION_MAP[region]\n if service == 's3':\n region = ''\n arn = 'arn:%s:%s:%s:%s:' % (\n partition, service, region if region else '', account_id if account_id else '')\n if resource_type:\n if resource.startswith(separator):\n separator = ''\n arn = arn + '%s%s%s' % (resource_type, separator, resource)\n else:\n arn = arn + resource\n return arn\n\n\ndef snapshot_identifier(prefix, db_identifier):\n \"\"\"Return an identifier for a snapshot of a database or cluster.\n \"\"\"\n now = datetime.now()\n return '%s-%s-%s' % (prefix, db_identifier, now.strftime('%Y-%m-%d-%H-%M'))\n\n\nretry_log = logging.getLogger('c7n.retry')\n\n\ndef get_retry(codes=(), max_attempts=8, min_delay=1, log_retries=False):\n \"\"\"Decorator for retry boto3 api call on transient errors.\n\n https://www.awsarchitectureblog.com/2015/03/backoff.html\n https://en.wikipedia.org/wiki/Exponential_backoff\n\n :param codes: A sequence of retryable error codes.\n :param max_attempts: The max number of retries, by default the delay\n time is proportional to the max number of attempts.\n :param log_retries: Whether we should log retries, if specified\n specifies the level at which the retry should be logged.\n :param _max_delay: The maximum delay for any retry interval *note*\n this parameter is only exposed for unit testing, as its\n derived from the number of attempts.\n\n Returns a function for invoking aws client calls that\n retries on retryable error codes.\n \"\"\"\n max_delay = max(min_delay, 2) ** max_attempts\n\n def _retry(func, *args, **kw):\n for idx, delay in enumerate(\n backoff_delays(min_delay, max_delay, jitter=True)):\n try:\n return func(*args, **kw)\n except ClientError as e:\n if e.response['Error']['Code'] not in codes:\n raise\n elif idx == max_attempts - 1:\n raise\n if log_retries:\n retry_log.log(\n log_retries,\n \"retrying %s on error:%s attempt:%d last delay:%0.2f\",\n func, e.response['Error']['Code'], idx, delay)\n time.sleep(delay)\n return _retry\n\n\ndef backoff_delays(start, stop, factor=2.0, jitter=False):\n \"\"\"Geometric backoff sequence w/ jitter\n \"\"\"\n cur = start\n while cur <= stop:\n if jitter:\n yield cur - (cur * random.random())\n else:\n yield cur\n cur = cur * factor\n\n\ndef parse_cidr(value):\n \"\"\"Process cidr ranges.\"\"\"\n klass = IPv4Network\n if '/' not in value:\n klass = ipaddress.ip_address\n try:\n v = klass(six.text_type(value))\n except (ipaddress.AddressValueError, ValueError):\n v = None\n return v\n\n\nclass IPv4Network(ipaddress.IPv4Network):\n\n # Override for net 2 net containment comparison\n def __contains__(self, other):\n if other is None:\n return False\n if isinstance(other, ipaddress._BaseNetwork):\n return self.supernet_of(other)\n return super(IPv4Network, self).__contains__(other)\n\n if (sys.version_info.major == 3 and sys.version_info.minor <= 6): # pragma: no cover\n @staticmethod\n def _is_subnet_of(a, b):\n try:\n # Always false if one is v4 and the other is v6.\n if a._version != b._version:\n raise TypeError(f\"{a} and {b} are not of the same version\")\n return (b.network_address <= a.network_address and\n b.broadcast_address >= a.broadcast_address)\n except AttributeError:\n raise TypeError(f\"Unable to test subnet containment \"\n f\"between {a} and {b}\")\n\n def supernet_of(self, other):\n \"\"\"Return True if this network is a supernet of other.\"\"\"\n return self._is_subnet_of(other, self)\n\n\ndef reformat_schema(model):\n \"\"\" Reformat schema to be in a more displayable format. \"\"\"\n if not hasattr(model, 'schema'):\n return \"Model '{}' does not have a schema\".format(model)\n\n if 'properties' not in model.schema:\n return \"Schema in unexpected format.\"\n\n ret = copy.deepcopy(model.schema['properties'])\n\n if 'type' in ret:\n del(ret['type'])\n\n for key in model.schema.get('required', []):\n if key in ret:\n ret[key]['required'] = True\n\n return ret\n\n\n# from botocore.utils avoiding runtime dependency for botocore for other providers.\n# license apache 2.0\ndef set_value_from_jmespath(source, expression, value, is_first=True):\n # This takes a (limited) jmespath-like expression & can set a value based\n # on it.\n # Limitations:\n # * Only handles dotted lookups\n # * No offsets/wildcards/slices/etc.\n bits = expression.split('.', 1)\n current_key, remainder = bits[0], bits[1] if len(bits) > 1 else ''\n\n if not current_key:\n raise ValueError(expression)\n\n if remainder:\n if current_key not in source:\n # We've got something in the expression that's not present in the\n # source (new key). If there's any more bits, we'll set the key\n # with an empty dictionary.\n source[current_key] = {}\n\n return set_value_from_jmespath(\n source[current_key],\n remainder,\n value,\n is_first=False\n )\n\n # If we're down to a single key, set it.\n source[current_key] = value\n\n\ndef format_string_values(obj, err_fallback=(IndexError, KeyError), *args, **kwargs):\n \"\"\"\n Format all string values in an object.\n Return the updated object\n \"\"\"\n if isinstance(obj, dict):\n new = {}\n for key in obj.keys():\n new[key] = format_string_values(obj[key], *args, **kwargs)\n return new\n elif isinstance(obj, list):\n new = []\n for item in obj:\n new.append(format_string_values(item, *args, **kwargs))\n return new\n elif isinstance(obj, six.string_types):\n try:\n return obj.format(*args, **kwargs)\n except err_fallback:\n return obj\n else:\n return obj\n\n\ndef parse_url_config(url):\n if url and '://' not in url:\n url += \"://\"\n conf = config.Bag()\n parsed = urlparse.urlparse(url)\n for k in ('scheme', 'netloc', 'path'):\n conf[k] = getattr(parsed, k)\n for k, v in urlparse.parse_qs(parsed.query).items():\n conf[k] = v[0]\n conf['url'] = url\n return conf\n\n\ndef get_proxy_url(url):\n proxies = getproxies()\n url_parts = parse_url_config(url)\n\n proxy_keys = [\n url_parts['scheme'] + '://' + url_parts['netloc'],\n url_parts['scheme'],\n 'all://' + url_parts['netloc'],\n 'all'\n ]\n\n for key in proxy_keys:\n if key in proxies:\n return proxies[key]\n\n return None\n\n\nclass FormatDate(object):\n \"\"\"a datetime wrapper with extended pyformat syntax\"\"\"\n\n date_increment = re.compile(r'\\+[0-9]+[Mdh]')\n\n def __init__(self, d=None):\n self._d = d\n\n @property\n def datetime(self):\n return self._d\n\n @classmethod\n def utcnow(cls):\n return cls(datetime.utcnow())\n\n def __getattr__(self, k):\n return getattr(self._d, k)\n\n def __format__(self, fmt=None):\n d = self._d\n increments = self.date_increment.findall(fmt)\n for i in increments:\n p = {}\n if i[-1] == 'M':\n p['minutes'] = float(i[1:-1])\n if i[-1] == 'h':\n p['hours'] = float(i[1:-1])\n if i[-1] == 'd':\n p['days'] = float(i[1:-1])\n d = d + timedelta(**p)\n if increments:\n fmt = self.date_increment.sub(\"\", fmt)\n return d.__format__(fmt)\n\n\nclass QueryParser(object):\n\n QuerySchema = {}\n type_name = ''\n multi_value = True\n value_key = 'Values'\n\n @classmethod\n def parse(cls, data):\n filters = []\n if not isinstance(data, (tuple, list)):\n raise PolicyValidationError(\n \"%s Query invalid format, must be array of dicts %s\" % (\n cls.type_name,\n data))\n for d in data:\n if not isinstance(d, dict):\n raise PolicyValidationError(\n \"%s Query Filter Invalid %s\" % (cls.type_name, data))\n if \"Name\" not in d or cls.value_key not in d:\n raise PolicyValidationError(\n \"%s Query Filter Invalid: Missing Key or Values in %s\" % (\n cls.type_name, data))\n\n key = d['Name']\n values = d[cls.value_key]\n\n if not cls.multi_value and isinstance(values, list):\n raise PolicyValidationError(\n \"%s QUery Filter Invalid Key: Value:%s Must be single valued\" % (\n cls.type_name, key, values))\n elif not cls.multi_value:\n values = [values]\n\n if key not in cls.QuerySchema and not key.startswith('tag:'):\n raise PolicyValidationError(\n \"%s Query Filter Invalid Key:%s Valid: %s\" % (\n cls.type_name, key, \", \".join(cls.QuerySchema.keys())))\n\n vtype = cls.QuerySchema.get(key)\n if vtype is None and key.startswith('tag'):\n vtype = six.string_types\n\n if not isinstance(values, list):\n raise PolicyValidationError(\n \"%s Query Filter Invalid Values, must be array %s\" % (\n cls.type_name, data,))\n\n for v in values:\n if isinstance(vtype, tuple) and vtype != six.string_types:\n if v not in vtype:\n raise PolicyValidationError(\n \"%s Query Filter Invalid Value: %s Valid: %s\" % (\n cls.type_name, v, \", \".join(vtype)))\n elif not isinstance(v, vtype):\n raise PolicyValidationError(\n \"%s Query Filter Invalid Value Type %s\" % (\n cls.type_name, data,))\n\n filters.append(d)\n\n return filters\n\n\ndef get_annotation_prefix(s):\n return 'c7n:{}'.format(s)\n\n\ndef merge_dict(a, b):\n \"\"\"Perform a merge of dictionaries a and b\n\n Any subdictionaries will be recursively merged.\n Any leaf elements in the form of a list or scalar will use the value from a\n \"\"\"\n d = {}\n for k, v in a.items():\n if k not in b:\n d[k] = v\n elif isinstance(v, dict) and isinstance(b[k], dict):\n d[k] = merge_dict(v, b[k])\n for k, v in b.items():\n if k not in d:\n d[k] = v\n return d\n", "path": "c7n/utils.py" } ]
[ { "content": "# Copyright 2015-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport copy\nimport csv\nfrom datetime import datetime, timedelta\nimport json\nimport itertools\nimport ipaddress\nimport logging\nimport os\nimport random\nimport re\nimport sys\nimport threading\nimport time\n\nimport six\nfrom six.moves.urllib import parse as urlparse\nfrom six.moves.urllib.request import getproxies\n\nfrom c7n import config\nfrom c7n.exceptions import ClientError, PolicyValidationError\n\n# Try to play nice in a serverless environment, where we don't require yaml\n\ntry:\n import yaml\nexcept ImportError: # pragma: no cover\n SafeLoader = BaseSafeDumper = yaml = None\nelse:\n try:\n from yaml import CSafeLoader as SafeLoader, CSafeDumper as BaseSafeDumper\n except ImportError: # pragma: no cover\n from yaml import SafeLoader, SafeDumper as BaseSafeDumper\n\n\nclass SafeDumper(BaseSafeDumper or object):\n def ignore_aliases(self, data):\n return True\n\n\nlog = logging.getLogger('custodian.utils')\n\n\nclass UnicodeWriter:\n \"\"\"utf8 encoding csv writer.\"\"\"\n\n def __init__(self, f, dialect=csv.excel, **kwds):\n self.writer = csv.writer(f, dialect=dialect, **kwds)\n if sys.version_info.major == 3:\n self.writerows = self.writer.writerows\n self.writerow = self.writer.writerow\n\n def writerow(self, row):\n self.writer.writerow([s.encode(\"utf-8\") for s in row])\n\n def writerows(self, rows):\n for row in rows:\n self.writerow(row)\n\n\nclass VarsSubstitutionError(Exception):\n pass\n\n\ndef load_file(path, format=None, vars=None):\n if format is None:\n format = 'yaml'\n _, ext = os.path.splitext(path)\n if ext[1:] == 'json':\n format = 'json'\n\n with open(path) as fh:\n contents = fh.read()\n\n if vars:\n try:\n contents = contents.format(**vars)\n except IndexError:\n msg = 'Failed to substitute variable by positional argument.'\n raise VarsSubstitutionError(msg)\n except KeyError as e:\n msg = 'Failed to substitute variables. KeyError on {}'.format(str(e))\n raise VarsSubstitutionError(msg)\n\n if format == 'yaml':\n return yaml_load(contents)\n elif format == 'json':\n return loads(contents)\n\n\ndef yaml_load(value):\n if yaml is None:\n raise RuntimeError(\"Yaml not available\")\n return yaml.load(value, Loader=SafeLoader)\n\n\ndef yaml_dump(value):\n if yaml is None:\n raise RuntimeError(\"Yaml not available\")\n return yaml.dump(value, default_flow_style=False, Dumper=SafeDumper)\n\n\ndef loads(body):\n return json.loads(body)\n\n\ndef dumps(data, fh=None, indent=0):\n if fh:\n return json.dump(data, fh, cls=DateTimeEncoder, indent=indent)\n else:\n return json.dumps(data, cls=DateTimeEncoder, indent=indent)\n\n\ndef format_event(evt):\n return json.dumps(evt, indent=2)\n\n\ndef filter_empty(d):\n for k, v in list(d.items()):\n if not v:\n del d[k]\n return d\n\n\ndef type_schema(\n type_name, inherits=None, rinherit=None,\n aliases=None, required=None, **props):\n \"\"\"jsonschema generation helper\n\n params:\n - type_name: name of the type\n - inherits: list of document fragments that are required via anyOf[$ref]\n - rinherit: use another schema as a base for this, basically work around\n inherits issues with additionalProperties and type enums.\n - aliases: additional names this type maybe called\n - required: list of required properties, by default 'type' is required\n - props: additional key value properties\n \"\"\"\n if aliases:\n type_names = [type_name]\n type_names.extend(aliases)\n else:\n type_names = [type_name]\n\n if rinherit:\n s = copy.deepcopy(rinherit)\n s['properties']['type'] = {'enum': type_names}\n else:\n s = {\n 'type': 'object',\n 'properties': {\n 'type': {'enum': type_names}}}\n\n # Ref based inheritance and additional properties don't mix well.\n # https://stackoverflow.com/questions/22689900/json-schema-allof-with-additionalproperties\n if not inherits:\n s['additionalProperties'] = False\n\n s['properties'].update(props)\n if not required:\n required = []\n if isinstance(required, list):\n required.append('type')\n s['required'] = required\n if inherits:\n extended = s\n s = {'allOf': [{'$ref': i} for i in inherits]}\n s['allOf'].append(extended)\n return s\n\n\nclass DateTimeEncoder(json.JSONEncoder):\n\n def default(self, obj):\n if isinstance(obj, datetime):\n return obj.isoformat()\n return json.JSONEncoder.default(self, obj)\n\n\ndef group_by(resources, key):\n \"\"\"Return a mapping of key value to resources with the corresponding value.\n\n Key may be specified as dotted form for nested dictionary lookup\n \"\"\"\n resource_map = {}\n parts = key.split('.')\n for r in resources:\n v = r\n for k in parts:\n v = v.get(k)\n if not isinstance(v, dict):\n break\n resource_map.setdefault(v, []).append(r)\n return resource_map\n\n\ndef chunks(iterable, size=50):\n \"\"\"Break an iterable into lists of size\"\"\"\n batch = []\n for n in iterable:\n batch.append(n)\n if len(batch) % size == 0:\n yield batch\n batch = []\n if batch:\n yield batch\n\n\ndef camelResource(obj):\n \"\"\"Some sources from apis return lowerCased where as describe calls\n\n always return TitleCase, this function turns the former to the later\n \"\"\"\n if not isinstance(obj, dict):\n return obj\n for k in list(obj.keys()):\n v = obj.pop(k)\n obj[\"%s%s\" % (k[0].upper(), k[1:])] = v\n if isinstance(v, dict):\n camelResource(v)\n elif isinstance(v, list):\n list(map(camelResource, v))\n return obj\n\n\ndef get_account_id_from_sts(session):\n response = session.client('sts').get_caller_identity()\n return response.get('Account')\n\n\ndef get_account_alias_from_sts(session):\n response = session.client('iam').list_account_aliases()\n aliases = response.get('AccountAliases', ())\n return aliases and aliases[0] or ''\n\n\ndef query_instances(session, client=None, **query):\n \"\"\"Return a list of ec2 instances for the query.\n \"\"\"\n if client is None:\n client = session.client('ec2')\n p = client.get_paginator('describe_instances')\n results = p.paginate(**query)\n return list(itertools.chain(\n *[r[\"Instances\"] for r in itertools.chain(\n *[pp['Reservations'] for pp in results])]))\n\n\nCONN_CACHE = threading.local()\n\n\ndef local_session(factory):\n \"\"\"Cache a session thread local for up to 45m\"\"\"\n factory_region = getattr(factory, 'region', 'global')\n s = getattr(CONN_CACHE, factory_region, {}).get('session')\n t = getattr(CONN_CACHE, factory_region, {}).get('time')\n\n n = time.time()\n if s is not None and t + (60 * 45) > n:\n return s\n s = factory()\n\n setattr(CONN_CACHE, factory_region, {'session': s, 'time': n})\n return s\n\n\ndef reset_session_cache():\n for k in [k for k in dir(CONN_CACHE) if not k.startswith('_')]:\n setattr(CONN_CACHE, k, {})\n\n\ndef annotation(i, k):\n return i.get(k, ())\n\n\ndef set_annotation(i, k, v):\n \"\"\"\n >>> x = {}\n >>> set_annotation(x, 'marker', 'a')\n >>> annotation(x, 'marker')\n ['a']\n \"\"\"\n if not isinstance(i, dict):\n raise ValueError(\"Can only annotate dictionaries\")\n\n if not isinstance(v, list):\n v = [v]\n\n if k in i:\n ev = i.get(k)\n if isinstance(ev, list):\n ev.extend(v)\n else:\n i[k] = v\n\n\ndef parse_s3(s3_path):\n if not s3_path.startswith('s3://'):\n raise ValueError(\"invalid s3 path\")\n ridx = s3_path.find('/', 5)\n if ridx == -1:\n ridx = None\n bucket = s3_path[5:ridx]\n s3_path = s3_path.rstrip('/')\n if ridx is None:\n key_prefix = \"\"\n else:\n key_prefix = s3_path[s3_path.find('/', 5):]\n return s3_path, bucket, key_prefix\n\n\nREGION_PARTITION_MAP = {\n 'us-gov-east-1': 'aws-us-gov',\n 'us-gov-west-1': 'aws-us-gov',\n 'cn-north-1': 'aws-cn',\n 'cn-northwest-1': 'aws-cn',\n 'us-isob-east-1': 'aws-iso-b',\n 'us-iso-east-1': 'aws-iso'\n}\n\n\ndef get_partition(region):\n return REGION_PARTITION_MAP.get(region, 'aws')\n\n\ndef generate_arn(\n service, resource, partition='aws',\n region=None, account_id=None, resource_type=None, separator='/'):\n \"\"\"Generate an Amazon Resource Name.\n See http://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html.\n \"\"\"\n if region and region in REGION_PARTITION_MAP:\n partition = REGION_PARTITION_MAP[region]\n if service == 's3':\n region = ''\n arn = 'arn:%s:%s:%s:%s:' % (\n partition, service, region if region else '', account_id if account_id else '')\n if resource_type:\n if resource.startswith(separator):\n separator = ''\n arn = arn + '%s%s%s' % (resource_type, separator, resource)\n else:\n arn = arn + resource\n return arn\n\n\ndef snapshot_identifier(prefix, db_identifier):\n \"\"\"Return an identifier for a snapshot of a database or cluster.\n \"\"\"\n now = datetime.now()\n return '%s-%s-%s' % (prefix, db_identifier, now.strftime('%Y-%m-%d-%H-%M'))\n\n\nretry_log = logging.getLogger('c7n.retry')\n\n\ndef get_retry(codes=(), max_attempts=8, min_delay=1, log_retries=False):\n \"\"\"Decorator for retry boto3 api call on transient errors.\n\n https://www.awsarchitectureblog.com/2015/03/backoff.html\n https://en.wikipedia.org/wiki/Exponential_backoff\n\n :param codes: A sequence of retryable error codes.\n :param max_attempts: The max number of retries, by default the delay\n time is proportional to the max number of attempts.\n :param log_retries: Whether we should log retries, if specified\n specifies the level at which the retry should be logged.\n :param _max_delay: The maximum delay for any retry interval *note*\n this parameter is only exposed for unit testing, as its\n derived from the number of attempts.\n\n Returns a function for invoking aws client calls that\n retries on retryable error codes.\n \"\"\"\n max_delay = max(min_delay, 2) ** max_attempts\n\n def _retry(func, *args, **kw):\n for idx, delay in enumerate(\n backoff_delays(min_delay, max_delay, jitter=True)):\n try:\n return func(*args, **kw)\n except ClientError as e:\n if e.response['Error']['Code'] not in codes:\n raise\n elif idx == max_attempts - 1:\n raise\n if log_retries:\n retry_log.log(\n log_retries,\n \"retrying %s on error:%s attempt:%d last delay:%0.2f\",\n func, e.response['Error']['Code'], idx, delay)\n time.sleep(delay)\n return _retry\n\n\ndef backoff_delays(start, stop, factor=2.0, jitter=False):\n \"\"\"Geometric backoff sequence w/ jitter\n \"\"\"\n cur = start\n while cur <= stop:\n if jitter:\n yield cur - (cur * random.random())\n else:\n yield cur\n cur = cur * factor\n\n\ndef parse_cidr(value):\n \"\"\"Process cidr ranges.\"\"\"\n klass = IPv4Network\n if '/' not in value:\n klass = ipaddress.ip_address\n try:\n v = klass(six.text_type(value))\n except (ipaddress.AddressValueError, ValueError):\n v = None\n return v\n\n\nclass IPv4Network(ipaddress.IPv4Network):\n\n # Override for net 2 net containment comparison\n def __contains__(self, other):\n if other is None:\n return False\n if isinstance(other, ipaddress._BaseNetwork):\n return self.supernet_of(other)\n return super(IPv4Network, self).__contains__(other)\n\n if (sys.version_info.major == 3 and sys.version_info.minor <= 6): # pragma: no cover\n @staticmethod\n def _is_subnet_of(a, b):\n try:\n # Always false if one is v4 and the other is v6.\n if a._version != b._version:\n raise TypeError(f\"{a} and {b} are not of the same version\")\n return (b.network_address <= a.network_address and\n b.broadcast_address >= a.broadcast_address)\n except AttributeError:\n raise TypeError(f\"Unable to test subnet containment \"\n f\"between {a} and {b}\")\n\n def supernet_of(self, other):\n \"\"\"Return True if this network is a supernet of other.\"\"\"\n return self._is_subnet_of(other, self)\n\n\ndef reformat_schema(model):\n \"\"\" Reformat schema to be in a more displayable format. \"\"\"\n if not hasattr(model, 'schema'):\n return \"Model '{}' does not have a schema\".format(model)\n\n if 'properties' not in model.schema:\n return \"Schema in unexpected format.\"\n\n ret = copy.deepcopy(model.schema['properties'])\n\n if 'type' in ret:\n del(ret['type'])\n\n for key in model.schema.get('required', []):\n if key in ret:\n ret[key]['required'] = True\n\n return ret\n\n\n# from botocore.utils avoiding runtime dependency for botocore for other providers.\n# license apache 2.0\ndef set_value_from_jmespath(source, expression, value, is_first=True):\n # This takes a (limited) jmespath-like expression & can set a value based\n # on it.\n # Limitations:\n # * Only handles dotted lookups\n # * No offsets/wildcards/slices/etc.\n bits = expression.split('.', 1)\n current_key, remainder = bits[0], bits[1] if len(bits) > 1 else ''\n\n if not current_key:\n raise ValueError(expression)\n\n if remainder:\n if current_key not in source:\n # We've got something in the expression that's not present in the\n # source (new key). If there's any more bits, we'll set the key\n # with an empty dictionary.\n source[current_key] = {}\n\n return set_value_from_jmespath(\n source[current_key],\n remainder,\n value,\n is_first=False\n )\n\n # If we're down to a single key, set it.\n source[current_key] = value\n\n\ndef format_string_values(obj, err_fallback=(IndexError, KeyError), *args, **kwargs):\n \"\"\"\n Format all string values in an object.\n Return the updated object\n \"\"\"\n if isinstance(obj, dict):\n new = {}\n for key in obj.keys():\n new[key] = format_string_values(obj[key], *args, **kwargs)\n return new\n elif isinstance(obj, list):\n new = []\n for item in obj:\n new.append(format_string_values(item, *args, **kwargs))\n return new\n elif isinstance(obj, six.string_types):\n try:\n return obj.format(*args, **kwargs)\n except err_fallback:\n return obj\n else:\n return obj\n\n\ndef parse_url_config(url):\n if url and '://' not in url:\n url += \"://\"\n conf = config.Bag()\n parsed = urlparse.urlparse(url)\n for k in ('scheme', 'netloc', 'path'):\n conf[k] = getattr(parsed, k)\n for k, v in urlparse.parse_qs(parsed.query).items():\n conf[k] = v[0]\n conf['url'] = url\n return conf\n\n\ndef get_proxy_url(url):\n proxies = getproxies()\n url_parts = parse_url_config(url)\n\n proxy_keys = [\n url_parts['scheme'] + '://' + url_parts['netloc'],\n url_parts['scheme'],\n 'all://' + url_parts['netloc'],\n 'all'\n ]\n\n for key in proxy_keys:\n if key in proxies:\n return proxies[key]\n\n return None\n\n\nclass FormatDate(object):\n \"\"\"a datetime wrapper with extended pyformat syntax\"\"\"\n\n date_increment = re.compile(r'\\+[0-9]+[Mdh]')\n\n def __init__(self, d=None):\n self._d = d\n\n @property\n def datetime(self):\n return self._d\n\n @classmethod\n def utcnow(cls):\n return cls(datetime.utcnow())\n\n def __getattr__(self, k):\n return getattr(self._d, k)\n\n def __format__(self, fmt=None):\n d = self._d\n increments = self.date_increment.findall(fmt)\n for i in increments:\n p = {}\n if i[-1] == 'M':\n p['minutes'] = float(i[1:-1])\n if i[-1] == 'h':\n p['hours'] = float(i[1:-1])\n if i[-1] == 'd':\n p['days'] = float(i[1:-1])\n d = d + timedelta(**p)\n if increments:\n fmt = self.date_increment.sub(\"\", fmt)\n return d.__format__(fmt)\n\n\nclass QueryParser(object):\n\n QuerySchema = {}\n type_name = ''\n multi_value = True\n value_key = 'Values'\n\n @classmethod\n def parse(cls, data):\n filters = []\n if not isinstance(data, (tuple, list)):\n raise PolicyValidationError(\n \"%s Query invalid format, must be array of dicts %s\" % (\n cls.type_name,\n data))\n for d in data:\n if not isinstance(d, dict):\n raise PolicyValidationError(\n \"%s Query Filter Invalid %s\" % (cls.type_name, data))\n if \"Name\" not in d or cls.value_key not in d:\n raise PolicyValidationError(\n \"%s Query Filter Invalid: Missing Key or Values in %s\" % (\n cls.type_name, data))\n\n key = d['Name']\n values = d[cls.value_key]\n\n if not cls.multi_value and isinstance(values, list):\n raise PolicyValidationError(\n \"%s QUery Filter Invalid Key: Value:%s Must be single valued\" % (\n cls.type_name, key, values))\n elif not cls.multi_value:\n values = [values]\n\n if key not in cls.QuerySchema and not key.startswith('tag:'):\n raise PolicyValidationError(\n \"%s Query Filter Invalid Key:%s Valid: %s\" % (\n cls.type_name, key, \", \".join(cls.QuerySchema.keys())))\n\n vtype = cls.QuerySchema.get(key)\n if vtype is None and key.startswith('tag'):\n vtype = six.string_types\n\n if not isinstance(values, list):\n raise PolicyValidationError(\n \"%s Query Filter Invalid Values, must be array %s\" % (\n cls.type_name, data,))\n\n for v in values:\n if isinstance(vtype, tuple) and vtype != six.string_types:\n if v not in vtype:\n raise PolicyValidationError(\n \"%s Query Filter Invalid Value: %s Valid: %s\" % (\n cls.type_name, v, \", \".join(vtype)))\n elif not isinstance(v, vtype):\n raise PolicyValidationError(\n \"%s Query Filter Invalid Value Type %s\" % (\n cls.type_name, data,))\n\n filters.append(d)\n\n return filters\n\n\ndef get_annotation_prefix(s):\n return 'c7n:{}'.format(s)\n\n\ndef merge_dict(a, b):\n \"\"\"Perform a merge of dictionaries a and b\n\n Any subdictionaries will be recursively merged.\n Any leaf elements in the form of a list or scalar will use the value from a\n \"\"\"\n d = {}\n for k, v in a.items():\n if k not in b:\n d[k] = v\n elif isinstance(v, dict) and isinstance(b[k], dict):\n d[k] = merge_dict(v, b[k])\n for k, v in b.items():\n if k not in d:\n d[k] = v\n return d\n", "path": "c7n/utils.py" } ]
diff --git a/c7n/utils.py b/c7n/utils.py index f52edc195db..c2a324d87d2 100644 --- a/c7n/utils.py +++ b/c7n/utils.py @@ -327,7 +327,9 @@ def parse_s3(s3_path): 'us-gov-east-1': 'aws-us-gov', 'us-gov-west-1': 'aws-us-gov', 'cn-north-1': 'aws-cn', - 'cn-northwest-1': 'aws-cn' + 'cn-northwest-1': 'aws-cn', + 'us-isob-east-1': 'aws-iso-b', + 'us-iso-east-1': 'aws-iso' }
flairNLP__flair-435
Cannot install allennlp due to matplotlib dependency conflict Hello, thanks for the great package. I want to play with ELMoEmbeddings, which requires package allennlp, not installed by default with Flair. However, installing latest allennlp fails because it requires matplotlib==2.2.3, while Flair requires >=3.0.0. When I install allennlp==0.7.0 (without the matplotlib requirement), it still fails because of PyTorch conflicts. Makes me wonder what versions I should use that are actually compatible?
[ { "content": "from setuptools import setup, find_packages\n\nsetup(\n name='flair',\n version='0.4.0',\n description='A very simple framework for state-of-the-art NLP',\n long_description=open(\"README.md\", encoding='utf-8').read(),\n long_description_content_type=\"text/markdown\",\n author='Alan Akbik',\n author_email='[email protected]',\n url='https://github.com/zalandoresearch/flair',\n packages=find_packages(exclude='test'), # same as name\n license='MIT',\n install_requires=[\n 'torch>=1.0.0',\n 'gensim>=3.4.0',\n 'tqdm>=4.26.0',\n 'segtok>=1.5.7',\n 'matplotlib>=3.0.0',\n 'mpld3>=0.3',\n 'sklearn',\n 'sqlitedict>=1.6.0',\n 'deprecated>=1.2.4',\n 'hyperopt>=0.1.1',\n 'pytorch-pretrained-bert>=0.3.0'\n ],\n include_package_data=True,\n python_requires='>=3.6',\n)\n", "path": "setup.py" } ]
[ { "content": "from setuptools import setup, find_packages\n\nsetup(\n name='flair',\n version='0.4.0',\n description='A very simple framework for state-of-the-art NLP',\n long_description=open(\"README.md\", encoding='utf-8').read(),\n long_description_content_type=\"text/markdown\",\n author='Alan Akbik',\n author_email='[email protected]',\n url='https://github.com/zalandoresearch/flair',\n packages=find_packages(exclude='test'), # same as name\n license='MIT',\n install_requires=[\n 'torch>=1.0.0',\n 'gensim>=3.4.0',\n 'tqdm>=4.26.0',\n 'segtok>=1.5.7',\n 'matplotlib>=2.2.3',\n 'mpld3>=0.3',\n 'sklearn',\n 'sqlitedict>=1.6.0',\n 'deprecated>=1.2.4',\n 'hyperopt>=0.1.1',\n 'pytorch-pretrained-bert>=0.3.0'\n ],\n include_package_data=True,\n python_requires='>=3.6',\n)\n", "path": "setup.py" } ]
diff --git a/requirements.txt b/requirements.txt index 2329957afb..9cee3472c7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ gensim>=3.4.0 pytest>=3.6.4 tqdm>=4.26.0 segtok>=1.5.7 -matplotlib>=3.0.0 +matplotlib>=2.2.3 mpld3==0.3 sklearn sqlitedict>=1.6.0 diff --git a/setup.py b/setup.py index 055627dbd8..98a4e9b483 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ 'gensim>=3.4.0', 'tqdm>=4.26.0', 'segtok>=1.5.7', - 'matplotlib>=3.0.0', + 'matplotlib>=2.2.3', 'mpld3>=0.3', 'sklearn', 'sqlitedict>=1.6.0',
dask__distributed-3910
Variables leak virtual clients **What happened**: ```python def _get_number_of_clients(dask_scheduler: Optional[Scheduler] = None) -> Optional[int]: if dask_scheduler is None: return None else: return len(dask_scheduler.clients) n_clients1 = client.run_on_scheduler(_get_number_of_clients) assert n_clients1 is not None for i in range(100): future = create_some_dummy_future() var = Variable(f"var-{i}", client) var.set(future) future.cancel(force=True) var.delete() n_clients2 = client.run_on_scheduler(_get_number_of_clients) assert n_clients2 is not None assert n_client1 == n_clients2 ``` This fails, because here: https://github.com/dask/distributed/blob/1d7640b0172febf9ceef37c2c31241c66ac165eb/distributed/scheduler.py#L2333-L2347 a new virtual client for each and every variable is created but here: https://github.com/dask/distributed/blob/1d7640b0172febf9ceef37c2c31241c66ac165eb/distributed/scheduler.py#L2349-L2361 we never clean up / prune these clients again. **What you expected to happen**: Scheduler-side state is not leaky. **Environment**: - Dask version: 2.17.2 - Distributed version: 2.17.0 - Python version: 3.6 - Operating System: Linux - Install method (conda, pip, source): pip
[ { "content": "import asyncio\nfrom collections import defaultdict\nfrom contextlib import suppress\nimport logging\nimport uuid\n\nfrom tlz import merge\n\nfrom .client import Future, Client\nfrom .utils import tokey, log_errors, TimeoutError, parse_timedelta\nfrom .worker import get_client\n\nlogger = logging.getLogger(__name__)\n\n\nclass VariableExtension:\n \"\"\" An extension for the scheduler to manage queues\n\n This adds the following routes to the scheduler\n\n * variable-set\n * variable-get\n * variable-delete\n \"\"\"\n\n def __init__(self, scheduler):\n self.scheduler = scheduler\n self.variables = dict()\n self.waiting = defaultdict(set)\n self.waiting_conditions = defaultdict(asyncio.Condition)\n self.started = asyncio.Condition()\n\n self.scheduler.handlers.update(\n {\"variable_set\": self.set, \"variable_get\": self.get}\n )\n\n self.scheduler.stream_handlers[\"variable-future-release\"] = self.future_release\n self.scheduler.stream_handlers[\"variable_delete\"] = self.delete\n\n self.scheduler.extensions[\"variables\"] = self\n\n async def set(self, comm=None, name=None, key=None, data=None, client=None):\n if key is not None:\n record = {\"type\": \"Future\", \"value\": key}\n self.scheduler.client_desires_keys(keys=[key], client=\"variable-%s\" % name)\n else:\n record = {\"type\": \"msgpack\", \"value\": data}\n try:\n old = self.variables[name]\n except KeyError:\n pass\n else:\n if old[\"type\"] == \"Future\" and old[\"value\"] != key:\n asyncio.ensure_future(self.release(old[\"value\"], name))\n if name not in self.variables:\n async with self.started:\n self.started.notify_all()\n self.variables[name] = record\n\n async def release(self, key, name):\n while self.waiting[key, name]:\n async with self.waiting_conditions[name]:\n await self.waiting_conditions[name].wait()\n\n self.scheduler.client_releases_keys(keys=[key], client=\"variable-%s\" % name)\n del self.waiting[key, name]\n\n async def future_release(self, name=None, key=None, token=None, client=None):\n self.waiting[key, name].remove(token)\n if not self.waiting[key, name]:\n async with self.waiting_conditions[name]:\n self.waiting_conditions[name].notify_all()\n\n async def get(self, comm=None, name=None, client=None, timeout=None):\n start = self.scheduler.loop.time()\n while name not in self.variables:\n if timeout is not None:\n left = timeout - (self.scheduler.loop.time() - start)\n else:\n left = None\n if left and left < 0:\n raise TimeoutError()\n try:\n\n async def _(): # Python 3.6 is odd and requires special help here\n await self.started.acquire()\n await self.started.wait()\n\n await asyncio.wait_for(_(), timeout=left)\n finally:\n with suppress(RuntimeError): # Python 3.6 loses lock on finally clause\n self.started.release()\n\n record = self.variables[name]\n if record[\"type\"] == \"Future\":\n key = record[\"value\"]\n token = uuid.uuid4().hex\n ts = self.scheduler.tasks.get(key)\n state = ts.state if ts is not None else \"lost\"\n msg = {\"token\": token, \"state\": state}\n if state == \"erred\":\n msg[\"exception\"] = ts.exception_blame.exception\n msg[\"traceback\"] = ts.exception_blame.traceback\n record = merge(record, msg)\n self.waiting[key, name].add(token)\n return record\n\n async def delete(self, comm=None, name=None, client=None):\n with log_errors():\n try:\n old = self.variables[name]\n except KeyError:\n pass\n else:\n if old[\"type\"] == \"Future\":\n await self.release(old[\"value\"], name)\n with suppress(KeyError):\n del self.waiting_conditions[name]\n with suppress(KeyError):\n del self.variables[name]\n\n\nclass Variable:\n \"\"\" Distributed Global Variable\n\n This allows multiple clients to share futures and data between each other\n with a single mutable variable. All metadata is sequentialized through the\n scheduler. Race conditions can occur.\n\n Values must be either Futures or msgpack-encodable data (ints, lists,\n strings, etc..) All data will be kept and sent through the scheduler, so\n it is wise not to send too much. If you want to share a large amount of\n data then ``scatter`` it and share the future instead.\n\n .. warning::\n\n This object is experimental and has known issues in Python 2\n\n Parameters\n ----------\n name: string (optional)\n Name used by other clients and the scheduler to identify the variable.\n If not given, a random name will be generated.\n client: Client (optional)\n Client used for communication with the scheduler. Defaults to the\n value of ``Client.current()``.\n\n Examples\n --------\n >>> from dask.distributed import Client, Variable # doctest: +SKIP\n >>> client = Client() # doctest: +SKIP\n >>> x = Variable('x') # doctest: +SKIP\n >>> x.set(123) # docttest: +SKIP\n >>> x.get() # docttest: +SKIP\n 123\n >>> future = client.submit(f, x) # doctest: +SKIP\n >>> x.set(future) # doctest: +SKIP\n\n See Also\n --------\n Queue: shared multi-producer/multi-consumer queue between clients\n \"\"\"\n\n def __init__(self, name=None, client=None, maxsize=0):\n self.client = client or Client.current()\n self.name = name or \"variable-\" + uuid.uuid4().hex\n\n async def _set(self, value):\n if isinstance(value, Future):\n await self.client.scheduler.variable_set(\n key=tokey(value.key), name=self.name\n )\n else:\n await self.client.scheduler.variable_set(data=value, name=self.name)\n\n def set(self, value, **kwargs):\n \"\"\" Set the value of this variable\n\n Parameters\n ----------\n value: Future or object\n Must be either a Future or a msgpack-encodable value\n \"\"\"\n return self.client.sync(self._set, value, **kwargs)\n\n async def _get(self, timeout=None):\n d = await self.client.scheduler.variable_get(\n timeout=timeout, name=self.name, client=self.client.id\n )\n if d[\"type\"] == \"Future\":\n value = Future(d[\"value\"], self.client, inform=True, state=d[\"state\"])\n if d[\"state\"] == \"erred\":\n value._state.set_error(d[\"exception\"], d[\"traceback\"])\n self.client._send_to_scheduler(\n {\n \"op\": \"variable-future-release\",\n \"name\": self.name,\n \"key\": d[\"value\"],\n \"token\": d[\"token\"],\n }\n )\n else:\n value = d[\"value\"]\n return value\n\n def get(self, timeout=None, **kwargs):\n \"\"\" Get the value of this variable\n\n Parameters\n ----------\n timeout: number or string or timedelta, optional\n Time in seconds to wait before timing out.\n Instead of number of seconds, it is also possible to specify\n a timedelta in string format, e.g. \"200ms\".\n \"\"\"\n timeout = parse_timedelta(timeout)\n return self.client.sync(self._get, timeout=timeout, **kwargs)\n\n def delete(self):\n \"\"\" Delete this variable\n\n Caution, this affects all clients currently pointing to this variable.\n \"\"\"\n if self.client.status == \"running\": # TODO: can leave zombie futures\n self.client._send_to_scheduler({\"op\": \"variable_delete\", \"name\": self.name})\n\n def __getstate__(self):\n return (self.name, self.client.scheduler.address)\n\n def __setstate__(self, state):\n name, address = state\n try:\n client = get_client(address)\n assert client.scheduler.address == address\n except (AttributeError, AssertionError):\n client = Client(address, set_as_default=False)\n self.__init__(name=name, client=client)\n", "path": "distributed/variable.py" } ]
[ { "content": "import asyncio\nfrom collections import defaultdict\nfrom contextlib import suppress\nimport logging\nimport uuid\n\nfrom tlz import merge\n\nfrom .client import Future, Client\nfrom .utils import tokey, log_errors, TimeoutError, parse_timedelta\nfrom .worker import get_client\n\nlogger = logging.getLogger(__name__)\n\n\nclass VariableExtension:\n \"\"\" An extension for the scheduler to manage queues\n\n This adds the following routes to the scheduler\n\n * variable-set\n * variable-get\n * variable-delete\n \"\"\"\n\n def __init__(self, scheduler):\n self.scheduler = scheduler\n self.variables = dict()\n self.waiting = defaultdict(set)\n self.waiting_conditions = defaultdict(asyncio.Condition)\n self.started = asyncio.Condition()\n\n self.scheduler.handlers.update(\n {\"variable_set\": self.set, \"variable_get\": self.get}\n )\n\n self.scheduler.stream_handlers[\"variable-future-release\"] = self.future_release\n self.scheduler.stream_handlers[\"variable_delete\"] = self.delete\n\n self.scheduler.extensions[\"variables\"] = self\n\n async def set(self, comm=None, name=None, key=None, data=None, client=None):\n if key is not None:\n record = {\"type\": \"Future\", \"value\": key}\n self.scheduler.client_desires_keys(keys=[key], client=\"variable-%s\" % name)\n else:\n record = {\"type\": \"msgpack\", \"value\": data}\n try:\n old = self.variables[name]\n except KeyError:\n pass\n else:\n if old[\"type\"] == \"Future\" and old[\"value\"] != key:\n asyncio.ensure_future(self.release(old[\"value\"], name))\n if name not in self.variables:\n async with self.started:\n self.started.notify_all()\n self.variables[name] = record\n\n async def release(self, key, name):\n while self.waiting[key, name]:\n async with self.waiting_conditions[name]:\n await self.waiting_conditions[name].wait()\n\n self.scheduler.client_releases_keys(keys=[key], client=\"variable-%s\" % name)\n del self.waiting[key, name]\n\n async def future_release(self, name=None, key=None, token=None, client=None):\n self.waiting[key, name].remove(token)\n if not self.waiting[key, name]:\n async with self.waiting_conditions[name]:\n self.waiting_conditions[name].notify_all()\n\n async def get(self, comm=None, name=None, client=None, timeout=None):\n start = self.scheduler.loop.time()\n while name not in self.variables:\n if timeout is not None:\n left = timeout - (self.scheduler.loop.time() - start)\n else:\n left = None\n if left and left < 0:\n raise TimeoutError()\n try:\n\n async def _(): # Python 3.6 is odd and requires special help here\n await self.started.acquire()\n await self.started.wait()\n\n await asyncio.wait_for(_(), timeout=left)\n finally:\n with suppress(RuntimeError): # Python 3.6 loses lock on finally clause\n self.started.release()\n\n record = self.variables[name]\n if record[\"type\"] == \"Future\":\n key = record[\"value\"]\n token = uuid.uuid4().hex\n ts = self.scheduler.tasks.get(key)\n state = ts.state if ts is not None else \"lost\"\n msg = {\"token\": token, \"state\": state}\n if state == \"erred\":\n msg[\"exception\"] = ts.exception_blame.exception\n msg[\"traceback\"] = ts.exception_blame.traceback\n record = merge(record, msg)\n self.waiting[key, name].add(token)\n return record\n\n async def delete(self, comm=None, name=None, client=None):\n with log_errors():\n try:\n old = self.variables[name]\n except KeyError:\n pass\n else:\n if old[\"type\"] == \"Future\":\n await self.release(old[\"value\"], name)\n with suppress(KeyError):\n del self.waiting_conditions[name]\n with suppress(KeyError):\n del self.variables[name]\n\n self.scheduler.remove_client(\"variable-%s\" % name)\n\n\nclass Variable:\n \"\"\" Distributed Global Variable\n\n This allows multiple clients to share futures and data between each other\n with a single mutable variable. All metadata is sequentialized through the\n scheduler. Race conditions can occur.\n\n Values must be either Futures or msgpack-encodable data (ints, lists,\n strings, etc..) All data will be kept and sent through the scheduler, so\n it is wise not to send too much. If you want to share a large amount of\n data then ``scatter`` it and share the future instead.\n\n .. warning::\n\n This object is experimental and has known issues in Python 2\n\n Parameters\n ----------\n name: string (optional)\n Name used by other clients and the scheduler to identify the variable.\n If not given, a random name will be generated.\n client: Client (optional)\n Client used for communication with the scheduler. Defaults to the\n value of ``Client.current()``.\n\n Examples\n --------\n >>> from dask.distributed import Client, Variable # doctest: +SKIP\n >>> client = Client() # doctest: +SKIP\n >>> x = Variable('x') # doctest: +SKIP\n >>> x.set(123) # docttest: +SKIP\n >>> x.get() # docttest: +SKIP\n 123\n >>> future = client.submit(f, x) # doctest: +SKIP\n >>> x.set(future) # doctest: +SKIP\n\n See Also\n --------\n Queue: shared multi-producer/multi-consumer queue between clients\n \"\"\"\n\n def __init__(self, name=None, client=None, maxsize=0):\n self.client = client or Client.current()\n self.name = name or \"variable-\" + uuid.uuid4().hex\n\n async def _set(self, value):\n if isinstance(value, Future):\n await self.client.scheduler.variable_set(\n key=tokey(value.key), name=self.name\n )\n else:\n await self.client.scheduler.variable_set(data=value, name=self.name)\n\n def set(self, value, **kwargs):\n \"\"\" Set the value of this variable\n\n Parameters\n ----------\n value: Future or object\n Must be either a Future or a msgpack-encodable value\n \"\"\"\n return self.client.sync(self._set, value, **kwargs)\n\n async def _get(self, timeout=None):\n d = await self.client.scheduler.variable_get(\n timeout=timeout, name=self.name, client=self.client.id\n )\n if d[\"type\"] == \"Future\":\n value = Future(d[\"value\"], self.client, inform=True, state=d[\"state\"])\n if d[\"state\"] == \"erred\":\n value._state.set_error(d[\"exception\"], d[\"traceback\"])\n self.client._send_to_scheduler(\n {\n \"op\": \"variable-future-release\",\n \"name\": self.name,\n \"key\": d[\"value\"],\n \"token\": d[\"token\"],\n }\n )\n else:\n value = d[\"value\"]\n return value\n\n def get(self, timeout=None, **kwargs):\n \"\"\" Get the value of this variable\n\n Parameters\n ----------\n timeout: number or string or timedelta, optional\n Time in seconds to wait before timing out.\n Instead of number of seconds, it is also possible to specify\n a timedelta in string format, e.g. \"200ms\".\n \"\"\"\n timeout = parse_timedelta(timeout)\n return self.client.sync(self._get, timeout=timeout, **kwargs)\n\n def delete(self):\n \"\"\" Delete this variable\n\n Caution, this affects all clients currently pointing to this variable.\n \"\"\"\n if self.client.status == \"running\": # TODO: can leave zombie futures\n self.client._send_to_scheduler({\"op\": \"variable_delete\", \"name\": self.name})\n\n def __getstate__(self):\n return (self.name, self.client.scheduler.address)\n\n def __setstate__(self, state):\n name, address = state\n try:\n client = get_client(address)\n assert client.scheduler.address == address\n except (AttributeError, AssertionError):\n client = Client(address, set_as_default=False)\n self.__init__(name=name, client=client)\n", "path": "distributed/variable.py" } ]
diff --git a/distributed/tests/test_variable.py b/distributed/tests/test_variable.py index 1e707626235..5d9ece6ee54 100644 --- a/distributed/tests/test_variable.py +++ b/distributed/tests/test_variable.py @@ -263,3 +263,22 @@ def test_future_erred_sync(client): with pytest.raises(ZeroDivisionError): future2.result() + + +@gen_cluster(client=True) +async def test_variables_do_not_leak_client(c, s, a, b): + # https://github.com/dask/distributed/issues/3899 + clients_pre = set(s.clients) + + # setup variable with future + x = Variable("x") + future = c.submit(inc, 1) + await x.set(future) + + # complete teardown + x.delete() + + start = time() + while set(s.clients) != clients_pre: + await asyncio.sleep(0.01) + assert time() < start + 5 diff --git a/distributed/variable.py b/distributed/variable.py index 82c407a494f..b20273031ab 100644 --- a/distributed/variable.py +++ b/distributed/variable.py @@ -119,6 +119,8 @@ async def delete(self, comm=None, name=None, client=None): with suppress(KeyError): del self.variables[name] + self.scheduler.remove_client("variable-%s" % name) + class Variable: """ Distributed Global Variable
weni-ai__bothub-engine-226
Training with no sentences Reported by @johncordeiro in https://github.com/Ilhasoft/bothub/issues/36
[ { "content": "import uuid\nimport base64\nimport requests\n\nfrom functools import reduce\nfrom django.db import models\nfrom django.utils.translation import gettext as _\nfrom django.utils import timezone\nfrom django.conf import settings\nfrom django.core.validators import RegexValidator, _lazy_re_compile\nfrom django.core.mail import send_mail\nfrom django.template.loader import render_to_string\nfrom django.dispatch import receiver\nfrom django.core.exceptions import ValidationError\n\nfrom bothub.authentication.models import User\n\nfrom . import languages\nfrom .exceptions import RepositoryUpdateAlreadyStartedTraining\nfrom .exceptions import RepositoryUpdateAlreadyTrained\nfrom .exceptions import TrainingNotAllowed\nfrom .exceptions import DoesNotHaveTranslation\n\n\nitem_key_regex = _lazy_re_compile(r'^[-a-z0-9_]+\\Z')\nvalidate_item_key = RegexValidator(\n item_key_regex,\n _('Enter a valid value consisting of lowercase letters, numbers, ' +\n 'underscores or hyphens.'),\n 'invalid'\n)\n\n\ndef can_t_be_other(value):\n if value == 'other':\n raise ValidationError(_('The label can\\'t be named as \"other\"'))\n\n\nclass RepositoryCategory(models.Model):\n class Meta:\n verbose_name = _('repository category')\n verbose_name_plural = _('repository categories')\n\n name = models.CharField(\n _('name'),\n max_length=32)\n\n def __str__(self):\n return self.name # pragma: no cover\n\n\nclass RepositoryQuerySet(models.QuerySet):\n def publics(self):\n return self.filter(is_private=False)\n\n def order_by_relevance(self):\n return self \\\n .annotate(votes_summ=models.Sum('votes__vote')) \\\n .annotate(examples_sum=models.Sum('updates__added')) \\\n .order_by('-votes_summ', '-examples_sum', '-created_at')\n\n\nclass RepositoryManager(models.Manager):\n def get_queryset(self):\n return RepositoryQuerySet(self.model, using=self._db)\n\n\nclass Repository(models.Model):\n class Meta:\n verbose_name = _('repository')\n verbose_name_plural = _('repositories')\n unique_together = ['owner', 'slug']\n\n CATEGORIES_HELP_TEXT = _('Categories for approaching repositories with ' +\n 'the same purpose')\n DESCRIPTION_HELP_TEXT = _('Tell what your bot do!')\n\n uuid = models.UUIDField(\n _('UUID'),\n primary_key=True,\n default=uuid.uuid4,\n editable=False)\n owner = models.ForeignKey(\n User,\n models.CASCADE,\n related_name='repositories')\n name = models.CharField(\n _('name'),\n max_length=64,\n help_text=_('Repository display name'))\n slug = models.SlugField(\n _('slug'),\n max_length=32,\n help_text=_('Easy way to found and share repositories'))\n language = models.CharField(\n _('language'),\n max_length=5,\n help_text=_('Repository\\'s examples language. The examples can be ' +\n 'translated to other languages.'),\n validators=[\n languages.validate_language,\n ])\n use_language_model_featurizer = models.BooleanField(\n _('Use language model featurizer'),\n help_text=_('You can use language featurizer to get words ' +\n 'similarity. You need less examples to create a great ' +\n 'bot.'),\n default=True)\n use_competing_intents = models.BooleanField(\n _('Use competing intents'),\n help_text=_('When using competing intents the confidence of the ' +\n 'prediction is distributed in all the intents.'),\n default=False)\n categories = models.ManyToManyField(\n RepositoryCategory,\n help_text=CATEGORIES_HELP_TEXT)\n description = models.TextField(\n _('description'),\n blank=True,\n help_text=DESCRIPTION_HELP_TEXT)\n is_private = models.BooleanField(\n _('private'),\n default=False,\n help_text=_('Your repository can be private, only you can see and' +\n ' use, or can be public and all community can see and ' +\n 'use.'))\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryManager()\n\n nlp_train_url = '{}train/'.format(settings.BOTHUB_NLP_BASE_URL)\n nlp_analyze_url = '{}parse/'.format(settings.BOTHUB_NLP_BASE_URL)\n\n @classmethod\n def request_nlp_train(cls, user_authorization):\n r = requests.post( # pragma: no cover\n cls.nlp_train_url,\n data={},\n headers={'Authorization': 'Bearer {}'.format(\n user_authorization.uuid)})\n return r # pragma: no cover\n\n @classmethod\n def request_nlp_analyze(cls, user_authorization, data):\n r = requests.post( # pragma: no cover\n cls.nlp_analyze_url,\n data={\n 'text': data.get('text'),\n 'language': data.get('language'),\n },\n headers={'Authorization': 'Bearer {}'.format(\n user_authorization.uuid)})\n return r # pragma: no cover\n\n @property\n def available_languages(self):\n examples = self.examples()\n examples_languages = examples.values_list(\n 'repository_update__language',\n flat=True)\n translations_languages = examples.annotate(\n translations_count=models.Count('translations')).filter(\n translations_count__gt=0).values_list(\n 'translations__language',\n flat=True)\n return list(set(\n [self.language] +\n list(examples_languages) +\n list(translations_languages)))\n\n @property\n def languages_status(self):\n return dict(\n map(\n lambda language: (\n language,\n self.language_status(language)),\n settings.SUPPORTED_LANGUAGES.keys(),\n ))\n\n @property\n def current_updates(self):\n return map(\n lambda lang: self.current_update(lang),\n self.available_languages)\n\n @property\n def requirements_to_train(self):\n return dict(filter(\n lambda l: l[1],\n map(\n lambda u: (u.language, u.requirements_to_train,),\n self.current_updates)))\n\n @property\n def languages_ready_for_train(self):\n return dict(map(\n lambda u: (u.language, u.ready_for_train,),\n self.current_updates))\n\n @property\n def ready_for_train(self):\n return reduce(\n lambda current, u: u.ready_for_train or current,\n self.current_updates,\n False)\n\n @property\n def languages_warnings(self):\n return dict(filter(\n lambda w: len(w[1]) > 0,\n map(\n lambda u: (u.language, u.warnings,),\n self.current_updates)))\n\n @property\n def votes_sum(self):\n return self.votes.aggregate(\n votes_sum=models.Sum('vote')).get('votes_sum')\n\n @property\n def intents(self):\n return list(set(self.examples(\n exclude_deleted=True).exclude(\n intent='').values_list(\n 'intent',\n flat=True)))\n\n @property\n def current_entities(self):\n return self.entities.filter(value__in=self.examples(\n exclude_deleted=True).exclude(\n entities__entity__value__isnull=True).values_list(\n 'entities__entity__value',\n flat=True).distinct())\n\n @property\n def entities_list(self):\n return self.current_entities.values_list(\n 'value',\n flat=True).distinct()\n\n @property\n def current_labels(self):\n return self.labels.filter(\n entities__value__in=self.entities_list).distinct()\n\n @property\n def labels_list(self):\n return self.current_labels.values_list(\n 'value',\n flat=True).distinct()\n\n @property\n def other_entities(self):\n return self.current_entities.filter(label__isnull=True)\n\n @property\n def admins(self):\n admins = [self.owner] + [\n authorization.user for authorization in\n self.authorizations.filter(role=RepositoryAuthorization.ROLE_ADMIN)\n ]\n return list(set(admins))\n\n def __str__(self):\n return 'Repository {} - {}/{}'.format(\n self.name,\n self.owner.nickname,\n self.slug,\n )\n\n def examples(self, language=None, exclude_deleted=True, queryset=None):\n if queryset is None:\n queryset = RepositoryExample.objects\n query = queryset.filter(\n repository_update__repository=self)\n if language:\n query = query.filter(\n repository_update__language=language)\n if exclude_deleted:\n return query.exclude(deleted_in__isnull=False)\n return query\n\n def language_status(self, language):\n is_base_language = self.language == language\n examples = self.examples(language)\n base_examples = self.examples(self.language)\n base_translations = RepositoryTranslatedExample.objects.filter(\n original_example__in=base_examples,\n language=language)\n\n examples_count = examples.count()\n base_examples_count = base_examples.count()\n base_translations_count = base_translations.count()\n base_translations_percentage = (\n base_translations_count / (\n base_examples_count if base_examples_count > 0 else 1)) * 100\n\n return {\n 'is_base_language': is_base_language,\n 'examples': {\n 'count': examples_count,\n 'entities': list(\n set(\n filter(\n lambda x: x,\n examples.values_list(\n 'entities__entity',\n flat=True).distinct()))),\n },\n 'base_translations': {\n 'count': base_translations_count,\n 'percentage': base_translations_percentage,\n },\n }\n\n def current_update(self, language=None):\n language = language or self.language\n repository_update, created = self.updates.get_or_create(\n language=language,\n training_started_at=None)\n return repository_update\n\n def last_trained_update(self, language=None):\n language = language or self.language\n return self.updates.filter(\n language=language,\n by__isnull=False,\n trained_at__isnull=False).first()\n\n def get_user_authorization(self, user):\n if user.is_anonymous:\n return RepositoryAuthorization(repository=self)\n get, created = RepositoryAuthorization.objects.get_or_create(\n user=user,\n repository=self)\n return get\n\n def get_absolute_url(self):\n return '{}{}/{}/'.format(\n settings.BOTHUB_WEBAPP_BASE_URL,\n self.owner.nickname,\n self.slug)\n\n\nclass RepositoryUpdate(models.Model):\n class Meta:\n verbose_name = _('repository update')\n verbose_name_plural = _('repository updates')\n ordering = ['-created_at']\n\n MIN_EXAMPLES_PER_INTENT = 2\n MIN_EXAMPLES_PER_ENTITY = 2\n RECOMMENDED_INTENTS = 2\n\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='updates')\n language = models.CharField(\n _('language'),\n max_length=5,\n validators=[\n languages.validate_language,\n ])\n use_language_model_featurizer = models.BooleanField(default=True)\n use_competing_intents = models.BooleanField(default=False)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n bot_data = models.TextField(\n _('bot data'),\n blank=True,\n editable=False)\n by = models.ForeignKey(\n User,\n models.CASCADE,\n blank=True,\n null=True)\n training_started_at = models.DateTimeField(\n _('training started at'),\n blank=True,\n null=True)\n trained_at = models.DateTimeField(\n _('trained at'),\n blank=True,\n null=True)\n failed_at = models.DateTimeField(\n _('failed at'),\n blank=True,\n null=True)\n training_log = models.TextField(\n _('training log'),\n blank=True,\n editable=False)\n\n @property\n def examples(self):\n examples = self.repository.examples(exclude_deleted=False).filter(\n models.Q(repository_update__language=self.language) |\n models.Q(translations__language=self.language))\n if self.training_started_at:\n t_started_at = self.training_started_at\n examples = examples.exclude(\n models.Q(repository_update__created_at__gt=t_started_at) |\n models.Q(deleted_in=self) |\n models.Q(deleted_in__training_started_at__lt=t_started_at))\n else:\n examples = examples.exclude(deleted_in__isnull=False)\n return examples\n\n @property\n def requirements_to_train(self):\n try:\n self.validate_init_train()\n except RepositoryUpdateAlreadyTrained as e:\n return [_('This bot version has already been trained.')]\n except RepositoryUpdateAlreadyStartedTraining as e:\n return [_('This bot version is being trained.')]\n\n r = []\n\n intents = self.examples.values_list('intent', flat=True)\n\n if '' in intents:\n r.append(_('All examples need have a intent.'))\n\n weak_intents = self.examples.values('intent').annotate(\n intent_count=models.Count('id')).order_by().exclude(\n intent_count__gte=self.MIN_EXAMPLES_PER_INTENT)\n if weak_intents.exists():\n for i in weak_intents:\n r.append(_('Intent \"{}\" has only {} examples. ' +\n 'Minimum is {}.').format(\n i.get('intent'),\n i.get('intent_count'),\n self.MIN_EXAMPLES_PER_INTENT))\n\n weak_entities = self.examples.annotate(\n es_count=models.Count('entities')).filter(\n es_count__gte=1).values(\n 'entities__entity__value').annotate(\n entities_count=models.Count('id')).order_by().exclude(\n entities_count__gte=self.MIN_EXAMPLES_PER_ENTITY)\n if weak_entities.exists():\n for e in weak_entities:\n r.append(_('Entity \"{}\" has only {} examples. ' +\n 'Minimum is {}.').format(\n e.get('entities__entity__value'),\n e.get('entities_count'),\n self.MIN_EXAMPLES_PER_ENTITY))\n\n return r\n\n @property\n def ready_for_train(self):\n if self.training_started_at:\n return False\n\n previous_update = self.repository.updates.filter(\n language=self.language,\n by__isnull=False,\n training_started_at__isnull=False,\n created_at__lt=self.created_at).first()\n\n if previous_update:\n if previous_update.use_language_model_featurizer is not \\\n self.repository.use_language_model_featurizer:\n return True\n if previous_update.use_competing_intents is not \\\n self.repository.use_competing_intents:\n return True\n if previous_update.failed_at:\n return True\n\n if not self.added.exists() and \\\n not self.translated_added.exists() and \\\n not self.deleted.exists():\n return False\n\n return len(self.requirements_to_train) is 0\n\n @property\n def intents(self):\n return list(set(self.examples.values_list('intent', flat=True)))\n\n @property\n def warnings(self):\n w = []\n if 0 < len(self.intents) < self.RECOMMENDED_INTENTS:\n w.append(_('You need to have at least {} intents for the ' +\n 'algorithm to identify intents.').format(\n self.RECOMMENDED_INTENTS))\n return w\n\n def __str__(self):\n return 'Repository Update #{}'.format(self.id)\n\n def validate_init_train(self, by=None):\n if self.trained_at:\n raise RepositoryUpdateAlreadyTrained()\n if self.training_started_at:\n raise RepositoryUpdateAlreadyStartedTraining()\n if by:\n authorization = self.repository.get_user_authorization(by)\n if not authorization.can_write:\n raise TrainingNotAllowed()\n\n def start_training(self, by):\n self.validate_init_train(by)\n self.by = by\n self.training_started_at = timezone.now()\n self.use_language_model_featurizer = self.repository \\\n .use_language_model_featurizer\n self.use_competing_intents = self.repository.use_competing_intents\n self.save(\n update_fields=[\n 'by',\n 'training_started_at',\n 'use_language_model_featurizer',\n 'use_competing_intents',\n ])\n\n def save_training(self, bot_data):\n if self.trained_at:\n raise RepositoryUpdateAlreadyTrained()\n\n self.trained_at = timezone.now()\n self.bot_data = base64.b64encode(bot_data).decode('utf8')\n self.save(\n update_fields=[\n 'trained_at',\n 'bot_data',\n ])\n\n def get_bot_data(self):\n return base64.b64decode(self.bot_data)\n\n def train_fail(self):\n self.failed_at = timezone.now()\n self.save(\n update_fields=[\n 'failed_at',\n ])\n\n\nclass RepositoryExample(models.Model):\n class Meta:\n verbose_name = _('repository example')\n verbose_name_plural = _('repository examples')\n ordering = ['-created_at']\n\n repository_update = models.ForeignKey(\n RepositoryUpdate,\n models.CASCADE,\n related_name='added',\n editable=False)\n deleted_in = models.ForeignKey(\n RepositoryUpdate,\n models.CASCADE,\n related_name='deleted',\n blank=True,\n null=True)\n text = models.TextField(\n _('text'),\n help_text=_('Example text'))\n intent = models.CharField(\n _('intent'),\n max_length=64,\n default='no_intent',\n help_text=_('Example intent reference'),\n validators=[validate_item_key])\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n @property\n def language(self):\n return self.repository_update.language\n\n def has_valid_entities(self, language=None):\n if not language or language == self.repository_update.language:\n return True\n return self.get_translation(language).has_valid_entities\n\n def get_translation(self, language):\n try:\n return self.translations.get(language=language)\n except RepositoryTranslatedExample.DoesNotExist:\n raise DoesNotHaveTranslation()\n\n def get_text(self, language=None):\n if not language or language == self.repository_update.language:\n return self.text\n return self.get_translation(language).text\n\n def get_entities(self, language):\n if not language or language == self.repository_update.language:\n return self.entities.all()\n return self.get_translation(language).entities.all()\n\n def delete(self):\n self.deleted_in = self.repository_update.repository.current_update(\n self.repository_update.language)\n self.save(update_fields=['deleted_in'])\n\n\nclass RepositoryTranslatedExampleManager(models.Manager):\n def create(self, *args, original_example=None, language=None, **kwargs):\n repository = original_example.repository_update.repository\n return super().create(\n *args,\n repository_update=repository.current_update(language),\n original_example=original_example,\n language=language,\n **kwargs)\n\n\nclass RepositoryTranslatedExample(models.Model):\n class Meta:\n verbose_name = _('repository translated example')\n verbose_name_plural = _('repository translated examples')\n unique_together = ['original_example', 'language']\n ordering = ['-created_at']\n\n repository_update = models.ForeignKey(\n RepositoryUpdate,\n models.CASCADE,\n related_name='translated_added',\n editable=False)\n original_example = models.ForeignKey(\n RepositoryExample,\n models.CASCADE,\n related_name='translations',\n editable=False,\n help_text=_('Example object'))\n language = models.CharField(\n _('language'),\n max_length=5,\n help_text=_('Translation language'),\n validators=[\n languages.validate_language,\n ])\n text = models.TextField(\n _('text'),\n help_text=_('Translation text'))\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryTranslatedExampleManager()\n\n def entities_list_lambda_sort(item):\n return item.get('entity')\n\n @classmethod\n def same_entities_validator(cls, a, b):\n a_len = len(a)\n if a_len != len(b):\n return False\n a_sorted = sorted(\n a,\n key=cls.entities_list_lambda_sort)\n b_sorted = sorted(\n b,\n key=cls.entities_list_lambda_sort)\n for i in range(a_len):\n if a_sorted[i].get('entity') != b_sorted[i].get('entity'):\n return False\n return True\n\n @classmethod\n def count_entities(cls, entities_list, to_str=False):\n r = {}\n for e in entities_list:\n r.update({e.get('entity'): r.get('entity', 0) + 1})\n if to_str:\n r = ', '.join(map(\n lambda x: '{} {}'.format(x[1], x[0]),\n r.items())) if entities_list else 'no entities'\n return r\n\n @property\n def has_valid_entities(self):\n original_entities = self.original_example.entities.all()\n my_entities = self.entities.all()\n return RepositoryTranslatedExample.same_entities_validator(\n list(map(lambda x: x.to_dict, original_entities)),\n list(map(lambda x: x.to_dict, my_entities)))\n\n\nclass RepositoryEntityLabelQueryset(models.QuerySet):\n def get(self, repository, value):\n try:\n return super().get(\n repository=repository,\n value=value)\n except self.model.DoesNotExist as e:\n return super().create(\n repository=repository,\n value=value)\n\n\nclass RepositoryEntityLabelManager(models.Manager):\n def get_queryset(self):\n return RepositoryEntityLabelQueryset(self.model, using=self._db)\n\n\nclass RepositoryEntityLabel(models.Model):\n class Meta:\n unique_together = ['repository', 'value']\n\n repository = models.ForeignKey(\n Repository,\n on_delete=models.CASCADE,\n related_name='labels')\n value = models.CharField(\n _('label'),\n max_length=64,\n validators=[\n validate_item_key,\n can_t_be_other,\n ],\n blank=True)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryEntityLabelManager()\n\n def examples(self, exclude_deleted=True):\n return self.repository.examples(\n exclude_deleted=exclude_deleted).filter(\n entities__entity__label=self)\n\n\nclass RepositoryEntityQueryset(models.QuerySet):\n def get(self, repository, value):\n try:\n return super().get(\n repository=repository,\n value=value)\n except self.model.DoesNotExist as e:\n return super().create(\n repository=repository,\n value=value)\n\n\nclass RepositoryEntityManager(models.Manager):\n def get_queryset(self):\n return RepositoryEntityQueryset(self.model, using=self._db)\n\n\nclass RepositoryEntity(models.Model):\n class Meta:\n unique_together = ['repository', 'value']\n\n repository = models.ForeignKey(\n Repository,\n on_delete=models.CASCADE,\n related_name='entities')\n value = models.CharField(\n _('entity'),\n max_length=64,\n help_text=_('Entity name'),\n validators=[validate_item_key])\n label = models.ForeignKey(\n RepositoryEntityLabel,\n on_delete=models.CASCADE,\n related_name='entities',\n null=True,\n blank=True)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryEntityManager()\n\n def set_label(self, value):\n if not value:\n self.label = None\n else:\n self.label = RepositoryEntityLabel.objects.get(\n repository=self.repository,\n value=value)\n\n\nclass EntityBaseQueryset(models.QuerySet):\n def create(self, entity, **kwargs):\n if type(entity) is not RepositoryEntity:\n instance = self.model(**kwargs)\n repository = instance.example.repository_update.repository\n entity = RepositoryEntity.objects.get(\n repository=repository,\n value=entity)\n return super().create(\n entity=entity,\n **kwargs)\n\n\nclass EntityBaseManager(models.Manager):\n def get_queryset(self):\n return EntityBaseQueryset(self.model, using=self._db)\n\n\nclass EntityBase(models.Model):\n class Meta:\n verbose_name = _('repository example entity')\n verbose_name_plural = _('repository example entities')\n abstract = True\n\n start = models.PositiveIntegerField(\n _('start'),\n help_text=_('Start index of entity value in example text'))\n end = models.PositiveIntegerField(\n _('end'),\n help_text=_('End index of entity value in example text'))\n entity = models.ForeignKey(\n RepositoryEntity,\n on_delete=models.CASCADE)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = EntityBaseManager()\n\n @property\n def example(self):\n return self.get_example()\n\n @property\n def value(self):\n return self.example.text[self.start:self.end]\n\n @property\n def rasa_nlu_data(self):\n return {\n 'start': self.start,\n 'end': self.end,\n 'value': self.value,\n 'entity': self.entity.value,\n }\n\n @property\n def to_dict(self):\n return self.get_rasa_nlu_data()\n\n def get_example(self):\n pass # pragma: no cover\n\n def get_rasa_nlu_data(self, label_as_entity=False):\n return {\n 'start': self.start,\n 'end': self.end,\n 'entity': self.entity.label.value\n if label_as_entity else self.entity.value,\n }\n\n\nclass RepositoryExampleEntity(EntityBase):\n repository_example = models.ForeignKey(\n RepositoryExample,\n models.CASCADE,\n related_name='entities',\n editable=False,\n help_text=_('Example object'))\n\n def get_example(self):\n return self.repository_example\n\n\nclass RepositoryTranslatedExampleEntity(EntityBase):\n repository_translated_example = models.ForeignKey(\n RepositoryTranslatedExample,\n models.CASCADE,\n related_name='entities',\n editable=False,\n help_text=_('Translated example object'))\n\n def get_example(self):\n return self.repository_translated_example\n\n\nclass RepositoryAuthorization(models.Model):\n class Meta:\n verbose_name = _('repository authorization')\n verbose_name_plural = _('repository authorizations')\n unique_together = ['user', 'repository']\n\n LEVEL_NOTHING = 0\n LEVEL_READER = 1\n LEVEL_CONTRIBUTOR = 2\n LEVEL_ADMIN = 3\n\n ROLE_NOT_SETTED = 0\n ROLE_USER = 1\n ROLE_CONTRIBUTOR = 2\n ROLE_ADMIN = 3\n\n ROLE_CHOICES = [\n (ROLE_NOT_SETTED, _('not set')),\n (ROLE_USER, _('user')),\n (ROLE_CONTRIBUTOR, _('contributor')),\n (ROLE_ADMIN, _('admin')),\n ]\n\n uuid = models.UUIDField(\n _('UUID'),\n primary_key=True,\n default=uuid.uuid4,\n editable=False)\n user = models.ForeignKey(\n User,\n models.CASCADE)\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='authorizations')\n role = models.PositiveIntegerField(\n _('role'),\n choices=ROLE_CHOICES,\n default=ROLE_NOT_SETTED)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n @property\n def level(self):\n try:\n user = self.user\n except User.DoesNotExist:\n user = None\n\n if user and self.repository.owner == user:\n return RepositoryAuthorization.LEVEL_ADMIN\n\n if self.role == RepositoryAuthorization.ROLE_NOT_SETTED:\n if self.repository.is_private:\n return RepositoryAuthorization.LEVEL_NOTHING\n return RepositoryAuthorization.LEVEL_READER\n\n if self.role == RepositoryAuthorization.ROLE_USER:\n return RepositoryAuthorization.LEVEL_READER\n\n if self.role == RepositoryAuthorization.ROLE_CONTRIBUTOR:\n return RepositoryAuthorization.LEVEL_CONTRIBUTOR\n\n if self.role == RepositoryAuthorization.ROLE_ADMIN:\n return RepositoryAuthorization.LEVEL_ADMIN\n\n return RepositoryAuthorization.LEVEL_NOTHING\n\n @property\n def can_read(self):\n return self.level in [\n RepositoryAuthorization.LEVEL_READER,\n RepositoryAuthorization.LEVEL_CONTRIBUTOR,\n RepositoryAuthorization.LEVEL_ADMIN,\n ]\n\n @property\n def can_contribute(self):\n return self.level in [\n RepositoryAuthorization.LEVEL_CONTRIBUTOR,\n RepositoryAuthorization.LEVEL_ADMIN,\n ]\n\n @property\n def can_write(self):\n return self.level in [\n RepositoryAuthorization.LEVEL_ADMIN,\n ]\n\n @property\n def is_admin(self):\n return self.level == RepositoryAuthorization.LEVEL_ADMIN\n\n @property\n def is_owner(self):\n try:\n user = self.user\n except User.DoesNotExist:\n return False\n return self.repository.owner == user\n\n @property\n def role_verbose(self):\n return dict(RepositoryAuthorization.ROLE_CHOICES).get(self.role)\n\n def send_new_role_email(self, responsible=None):\n if not settings.SEND_EMAILS:\n return False\n responsible_name = responsible and responsible.name \\\n or self.repository.owner.name\n context = {\n 'responsible_name': responsible_name,\n 'user_name': self.user.name,\n 'repository_name': self.repository.name,\n 'repository_url': self.repository.get_absolute_url(),\n 'new_role': self.role_verbose,\n }\n send_mail(\n _('New role in {}').format(self.repository.name),\n render_to_string(\n 'common/emails/new_role.txt',\n context),\n None,\n [self.user.email],\n html_message=render_to_string(\n 'common/emails/new_role.html',\n context))\n\n\nclass RepositoryVote(models.Model):\n UP_VOTE = 1\n DOWN_VOTE = -1\n NEUTRAL_VOTE = 0\n VOTE_CHOICES = [\n (UP_VOTE, _('Up'),),\n (DOWN_VOTE, _('Down')),\n (NEUTRAL_VOTE, _('Neutral')),\n ]\n\n class Meta:\n verbose_name = _('repository vote')\n verbose_name_plural = _('repository votes')\n unique_together = [\n 'user',\n 'repository',\n ]\n\n user = models.ForeignKey(\n User,\n models.CASCADE,\n related_name='repository_votes')\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='votes')\n vote = models.IntegerField(\n _('vote'),\n choices=VOTE_CHOICES)\n\n\nclass RequestRepositoryAuthorization(models.Model):\n class Meta:\n unique_together = ['user', 'repository']\n\n user = models.ForeignKey(\n User,\n models.CASCADE,\n related_name='requests')\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='requests')\n text = models.CharField(\n _('text'),\n max_length=250)\n approved_by = models.ForeignKey(\n User,\n models.CASCADE,\n blank=True,\n null=True)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True,\n editable=False)\n\n def send_new_request_email_to_admins(self):\n if not settings.SEND_EMAILS:\n return False\n context = {\n 'user_name': self.user.name,\n 'repository_name': self.repository.name,\n 'text': self.text,\n 'repository_url': self.repository.get_absolute_url(),\n }\n for admin in self.repository.admins:\n send_mail(\n _('New authorization request in {}').format(\n self.repository.name),\n render_to_string(\n 'common/emails/new_request.txt',\n context),\n None,\n [admin.email],\n html_message=render_to_string(\n 'common/emails/new_request.html',\n context))\n\n def send_request_rejected_email(self):\n if not settings.SEND_EMAILS:\n return False\n context = {\n 'repository_name': self.repository.name,\n }\n send_mail(\n _('Access denied to {}').format(\n self.repository.name),\n render_to_string(\n 'common/emails/request_rejected.txt',\n context),\n None,\n [self.user.email],\n html_message=render_to_string(\n 'common/emails/request_rejected.html',\n context))\n\n def send_request_approved_email(self):\n if not settings.SEND_EMAILS:\n return False\n context = {\n 'admin_name': self.approved_by.name,\n 'repository_name': self.repository.name,\n }\n send_mail(\n _('Authorization Request Approved to {}').format(\n self.repository.name),\n render_to_string(\n 'common/emails/request_approved.txt',\n context),\n None,\n [self.user.email],\n html_message=render_to_string(\n 'common/emails/request_approved.html',\n context))\n\n\n@receiver(models.signals.pre_save, sender=RequestRepositoryAuthorization)\ndef set_user_role_on_approved(instance, **kwargs):\n current = None\n try:\n current = RequestRepositoryAuthorization.objects.get(pk=instance.pk)\n except RequestRepositoryAuthorization.DoesNotExist as e:\n pass\n\n if not current:\n return False\n\n if current.approved_by is None and \\\n current.approved_by is not instance.approved_by:\n user_authorization = instance.repository.get_user_authorization(\n instance.user)\n user_authorization.role = RepositoryAuthorization.ROLE_USER\n user_authorization.save(update_fields=['role'])\n instance.send_request_approved_email()\n else:\n raise ValidationError(\n _('You can change approved_by just one time.'))\n\n\n@receiver(models.signals.post_save, sender=RequestRepositoryAuthorization)\ndef send_new_request_email_to_admins_on_created(instance, created, **kwargs):\n if created:\n instance.send_new_request_email_to_admins()\n\n\n@receiver(models.signals.post_delete, sender=RequestRepositoryAuthorization)\ndef send_request_rejected_email(instance, **kwargs):\n instance.send_request_rejected_email()\n", "path": "bothub/common/models.py" } ]
[ { "content": "import uuid\nimport base64\nimport requests\n\nfrom functools import reduce\nfrom django.db import models\nfrom django.utils.translation import gettext as _\nfrom django.utils import timezone\nfrom django.conf import settings\nfrom django.core.validators import RegexValidator, _lazy_re_compile\nfrom django.core.mail import send_mail\nfrom django.template.loader import render_to_string\nfrom django.dispatch import receiver\nfrom django.core.exceptions import ValidationError\n\nfrom bothub.authentication.models import User\n\nfrom . import languages\nfrom .exceptions import RepositoryUpdateAlreadyStartedTraining\nfrom .exceptions import RepositoryUpdateAlreadyTrained\nfrom .exceptions import TrainingNotAllowed\nfrom .exceptions import DoesNotHaveTranslation\n\n\nitem_key_regex = _lazy_re_compile(r'^[-a-z0-9_]+\\Z')\nvalidate_item_key = RegexValidator(\n item_key_regex,\n _('Enter a valid value consisting of lowercase letters, numbers, ' +\n 'underscores or hyphens.'),\n 'invalid'\n)\n\n\ndef can_t_be_other(value):\n if value == 'other':\n raise ValidationError(_('The label can\\'t be named as \"other\"'))\n\n\nclass RepositoryCategory(models.Model):\n class Meta:\n verbose_name = _('repository category')\n verbose_name_plural = _('repository categories')\n\n name = models.CharField(\n _('name'),\n max_length=32)\n\n def __str__(self):\n return self.name # pragma: no cover\n\n\nclass RepositoryQuerySet(models.QuerySet):\n def publics(self):\n return self.filter(is_private=False)\n\n def order_by_relevance(self):\n return self \\\n .annotate(votes_summ=models.Sum('votes__vote')) \\\n .annotate(examples_sum=models.Sum('updates__added')) \\\n .order_by('-votes_summ', '-examples_sum', '-created_at')\n\n\nclass RepositoryManager(models.Manager):\n def get_queryset(self):\n return RepositoryQuerySet(self.model, using=self._db)\n\n\nclass Repository(models.Model):\n class Meta:\n verbose_name = _('repository')\n verbose_name_plural = _('repositories')\n unique_together = ['owner', 'slug']\n\n CATEGORIES_HELP_TEXT = _('Categories for approaching repositories with ' +\n 'the same purpose')\n DESCRIPTION_HELP_TEXT = _('Tell what your bot do!')\n\n uuid = models.UUIDField(\n _('UUID'),\n primary_key=True,\n default=uuid.uuid4,\n editable=False)\n owner = models.ForeignKey(\n User,\n models.CASCADE,\n related_name='repositories')\n name = models.CharField(\n _('name'),\n max_length=64,\n help_text=_('Repository display name'))\n slug = models.SlugField(\n _('slug'),\n max_length=32,\n help_text=_('Easy way to found and share repositories'))\n language = models.CharField(\n _('language'),\n max_length=5,\n help_text=_('Repository\\'s examples language. The examples can be ' +\n 'translated to other languages.'),\n validators=[\n languages.validate_language,\n ])\n use_language_model_featurizer = models.BooleanField(\n _('Use language model featurizer'),\n help_text=_('You can use language featurizer to get words ' +\n 'similarity. You need less examples to create a great ' +\n 'bot.'),\n default=True)\n use_competing_intents = models.BooleanField(\n _('Use competing intents'),\n help_text=_('When using competing intents the confidence of the ' +\n 'prediction is distributed in all the intents.'),\n default=False)\n categories = models.ManyToManyField(\n RepositoryCategory,\n help_text=CATEGORIES_HELP_TEXT)\n description = models.TextField(\n _('description'),\n blank=True,\n help_text=DESCRIPTION_HELP_TEXT)\n is_private = models.BooleanField(\n _('private'),\n default=False,\n help_text=_('Your repository can be private, only you can see and' +\n ' use, or can be public and all community can see and ' +\n 'use.'))\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryManager()\n\n nlp_train_url = '{}train/'.format(settings.BOTHUB_NLP_BASE_URL)\n nlp_analyze_url = '{}parse/'.format(settings.BOTHUB_NLP_BASE_URL)\n\n @classmethod\n def request_nlp_train(cls, user_authorization):\n r = requests.post( # pragma: no cover\n cls.nlp_train_url,\n data={},\n headers={'Authorization': 'Bearer {}'.format(\n user_authorization.uuid)})\n return r # pragma: no cover\n\n @classmethod\n def request_nlp_analyze(cls, user_authorization, data):\n r = requests.post( # pragma: no cover\n cls.nlp_analyze_url,\n data={\n 'text': data.get('text'),\n 'language': data.get('language'),\n },\n headers={'Authorization': 'Bearer {}'.format(\n user_authorization.uuid)})\n return r # pragma: no cover\n\n @property\n def available_languages(self):\n examples = self.examples()\n examples_languages = examples.values_list(\n 'repository_update__language',\n flat=True)\n translations_languages = examples.annotate(\n translations_count=models.Count('translations')).filter(\n translations_count__gt=0).values_list(\n 'translations__language',\n flat=True)\n return list(set(\n [self.language] +\n list(examples_languages) +\n list(translations_languages)))\n\n @property\n def languages_status(self):\n return dict(\n map(\n lambda language: (\n language,\n self.language_status(language)),\n settings.SUPPORTED_LANGUAGES.keys(),\n ))\n\n @property\n def current_updates(self):\n return map(\n lambda lang: self.current_update(lang),\n self.available_languages)\n\n @property\n def requirements_to_train(self):\n return dict(filter(\n lambda l: l[1],\n map(\n lambda u: (u.language, u.requirements_to_train,),\n self.current_updates)))\n\n @property\n def languages_ready_for_train(self):\n return dict(map(\n lambda u: (u.language, u.ready_for_train,),\n self.current_updates))\n\n @property\n def ready_for_train(self):\n return reduce(\n lambda current, u: u.ready_for_train or current,\n self.current_updates,\n False)\n\n @property\n def languages_warnings(self):\n return dict(filter(\n lambda w: len(w[1]) > 0,\n map(\n lambda u: (u.language, u.warnings,),\n self.current_updates)))\n\n @property\n def votes_sum(self):\n return self.votes.aggregate(\n votes_sum=models.Sum('vote')).get('votes_sum')\n\n @property\n def intents(self):\n return list(set(self.examples(\n exclude_deleted=True).exclude(\n intent='').values_list(\n 'intent',\n flat=True)))\n\n @property\n def current_entities(self):\n return self.entities.filter(value__in=self.examples(\n exclude_deleted=True).exclude(\n entities__entity__value__isnull=True).values_list(\n 'entities__entity__value',\n flat=True).distinct())\n\n @property\n def entities_list(self):\n return self.current_entities.values_list(\n 'value',\n flat=True).distinct()\n\n @property\n def current_labels(self):\n return self.labels.filter(\n entities__value__in=self.entities_list).distinct()\n\n @property\n def labels_list(self):\n return self.current_labels.values_list(\n 'value',\n flat=True).distinct()\n\n @property\n def other_entities(self):\n return self.current_entities.filter(label__isnull=True)\n\n @property\n def admins(self):\n admins = [self.owner] + [\n authorization.user for authorization in\n self.authorizations.filter(role=RepositoryAuthorization.ROLE_ADMIN)\n ]\n return list(set(admins))\n\n def __str__(self):\n return 'Repository {} - {}/{}'.format(\n self.name,\n self.owner.nickname,\n self.slug,\n )\n\n def examples(self, language=None, exclude_deleted=True, queryset=None):\n if queryset is None:\n queryset = RepositoryExample.objects\n query = queryset.filter(\n repository_update__repository=self)\n if language:\n query = query.filter(\n repository_update__language=language)\n if exclude_deleted:\n return query.exclude(deleted_in__isnull=False)\n return query\n\n def language_status(self, language):\n is_base_language = self.language == language\n examples = self.examples(language)\n base_examples = self.examples(self.language)\n base_translations = RepositoryTranslatedExample.objects.filter(\n original_example__in=base_examples,\n language=language)\n\n examples_count = examples.count()\n base_examples_count = base_examples.count()\n base_translations_count = base_translations.count()\n base_translations_percentage = (\n base_translations_count / (\n base_examples_count if base_examples_count > 0 else 1)) * 100\n\n return {\n 'is_base_language': is_base_language,\n 'examples': {\n 'count': examples_count,\n 'entities': list(\n set(\n filter(\n lambda x: x,\n examples.values_list(\n 'entities__entity',\n flat=True).distinct()))),\n },\n 'base_translations': {\n 'count': base_translations_count,\n 'percentage': base_translations_percentage,\n },\n }\n\n def current_update(self, language=None):\n language = language or self.language\n repository_update, created = self.updates.get_or_create(\n language=language,\n training_started_at=None)\n return repository_update\n\n def last_trained_update(self, language=None):\n language = language or self.language\n return self.updates.filter(\n language=language,\n by__isnull=False,\n trained_at__isnull=False).first()\n\n def get_user_authorization(self, user):\n if user.is_anonymous:\n return RepositoryAuthorization(repository=self)\n get, created = RepositoryAuthorization.objects.get_or_create(\n user=user,\n repository=self)\n return get\n\n def get_absolute_url(self):\n return '{}{}/{}/'.format(\n settings.BOTHUB_WEBAPP_BASE_URL,\n self.owner.nickname,\n self.slug)\n\n\nclass RepositoryUpdate(models.Model):\n class Meta:\n verbose_name = _('repository update')\n verbose_name_plural = _('repository updates')\n ordering = ['-created_at']\n\n MIN_EXAMPLES_PER_INTENT = 2\n MIN_EXAMPLES_PER_ENTITY = 2\n RECOMMENDED_INTENTS = 2\n\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='updates')\n language = models.CharField(\n _('language'),\n max_length=5,\n validators=[\n languages.validate_language,\n ])\n use_language_model_featurizer = models.BooleanField(default=True)\n use_competing_intents = models.BooleanField(default=False)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n bot_data = models.TextField(\n _('bot data'),\n blank=True,\n editable=False)\n by = models.ForeignKey(\n User,\n models.CASCADE,\n blank=True,\n null=True)\n training_started_at = models.DateTimeField(\n _('training started at'),\n blank=True,\n null=True)\n trained_at = models.DateTimeField(\n _('trained at'),\n blank=True,\n null=True)\n failed_at = models.DateTimeField(\n _('failed at'),\n blank=True,\n null=True)\n training_log = models.TextField(\n _('training log'),\n blank=True,\n editable=False)\n\n @property\n def examples(self):\n examples = self.repository.examples(exclude_deleted=False).filter(\n models.Q(repository_update__language=self.language) |\n models.Q(translations__language=self.language))\n if self.training_started_at:\n t_started_at = self.training_started_at\n examples = examples.exclude(\n models.Q(repository_update__created_at__gt=t_started_at) |\n models.Q(deleted_in=self) |\n models.Q(deleted_in__training_started_at__lt=t_started_at))\n else:\n examples = examples.exclude(deleted_in__isnull=False)\n return examples\n\n @property\n def requirements_to_train(self):\n try:\n self.validate_init_train()\n except RepositoryUpdateAlreadyTrained as e:\n return [_('This bot version has already been trained.')]\n except RepositoryUpdateAlreadyStartedTraining as e:\n return [_('This bot version is being trained.')]\n\n r = []\n\n intents = self.examples.values_list('intent', flat=True)\n\n if '' in intents:\n r.append(_('All examples need have a intent.'))\n\n weak_intents = self.examples.values('intent').annotate(\n intent_count=models.Count('id')).order_by().exclude(\n intent_count__gte=self.MIN_EXAMPLES_PER_INTENT)\n if weak_intents.exists():\n for i in weak_intents:\n r.append(_('Intent \"{}\" has only {} examples. ' +\n 'Minimum is {}.').format(\n i.get('intent'),\n i.get('intent_count'),\n self.MIN_EXAMPLES_PER_INTENT))\n\n weak_entities = self.examples.annotate(\n es_count=models.Count('entities')).filter(\n es_count__gte=1).values(\n 'entities__entity__value').annotate(\n entities_count=models.Count('id')).order_by().exclude(\n entities_count__gte=self.MIN_EXAMPLES_PER_ENTITY)\n if weak_entities.exists():\n for e in weak_entities:\n r.append(_('Entity \"{}\" has only {} examples. ' +\n 'Minimum is {}.').format(\n e.get('entities__entity__value'),\n e.get('entities_count'),\n self.MIN_EXAMPLES_PER_ENTITY))\n\n return r\n\n @property\n def ready_for_train(self):\n if self.training_started_at:\n return False\n\n previous_update = self.repository.updates.filter(\n language=self.language,\n by__isnull=False,\n training_started_at__isnull=False,\n created_at__lt=self.created_at).first()\n\n if previous_update:\n if previous_update.use_language_model_featurizer is not \\\n self.repository.use_language_model_featurizer:\n return True\n if previous_update.use_competing_intents is not \\\n self.repository.use_competing_intents:\n return True\n if previous_update.failed_at:\n return True\n\n if not self.added.exists() and \\\n not self.translated_added.exists() and \\\n not self.deleted.exists():\n return False\n\n if self.examples.count() == 0:\n return False\n\n return len(self.requirements_to_train) is 0\n\n @property\n def intents(self):\n return list(set(self.examples.values_list('intent', flat=True)))\n\n @property\n def warnings(self):\n w = []\n if 0 < len(self.intents) < self.RECOMMENDED_INTENTS:\n w.append(_('You need to have at least {} intents for the ' +\n 'algorithm to identify intents.').format(\n self.RECOMMENDED_INTENTS))\n return w\n\n def __str__(self):\n return 'Repository Update #{}'.format(self.id)\n\n def validate_init_train(self, by=None):\n if self.trained_at:\n raise RepositoryUpdateAlreadyTrained()\n if self.training_started_at:\n raise RepositoryUpdateAlreadyStartedTraining()\n if by:\n authorization = self.repository.get_user_authorization(by)\n if not authorization.can_write:\n raise TrainingNotAllowed()\n\n def start_training(self, by):\n self.validate_init_train(by)\n self.by = by\n self.training_started_at = timezone.now()\n self.use_language_model_featurizer = self.repository \\\n .use_language_model_featurizer\n self.use_competing_intents = self.repository.use_competing_intents\n self.save(\n update_fields=[\n 'by',\n 'training_started_at',\n 'use_language_model_featurizer',\n 'use_competing_intents',\n ])\n\n def save_training(self, bot_data):\n if self.trained_at:\n raise RepositoryUpdateAlreadyTrained()\n\n self.trained_at = timezone.now()\n self.bot_data = base64.b64encode(bot_data).decode('utf8')\n self.save(\n update_fields=[\n 'trained_at',\n 'bot_data',\n ])\n\n def get_bot_data(self):\n return base64.b64decode(self.bot_data)\n\n def train_fail(self):\n self.failed_at = timezone.now()\n self.save(\n update_fields=[\n 'failed_at',\n ])\n\n\nclass RepositoryExample(models.Model):\n class Meta:\n verbose_name = _('repository example')\n verbose_name_plural = _('repository examples')\n ordering = ['-created_at']\n\n repository_update = models.ForeignKey(\n RepositoryUpdate,\n models.CASCADE,\n related_name='added',\n editable=False)\n deleted_in = models.ForeignKey(\n RepositoryUpdate,\n models.CASCADE,\n related_name='deleted',\n blank=True,\n null=True)\n text = models.TextField(\n _('text'),\n help_text=_('Example text'))\n intent = models.CharField(\n _('intent'),\n max_length=64,\n default='no_intent',\n help_text=_('Example intent reference'),\n validators=[validate_item_key])\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n @property\n def language(self):\n return self.repository_update.language\n\n def has_valid_entities(self, language=None):\n if not language or language == self.repository_update.language:\n return True\n return self.get_translation(language).has_valid_entities\n\n def get_translation(self, language):\n try:\n return self.translations.get(language=language)\n except RepositoryTranslatedExample.DoesNotExist:\n raise DoesNotHaveTranslation()\n\n def get_text(self, language=None):\n if not language or language == self.repository_update.language:\n return self.text\n return self.get_translation(language).text\n\n def get_entities(self, language):\n if not language or language == self.repository_update.language:\n return self.entities.all()\n return self.get_translation(language).entities.all()\n\n def delete(self):\n self.deleted_in = self.repository_update.repository.current_update(\n self.repository_update.language)\n self.save(update_fields=['deleted_in'])\n\n\nclass RepositoryTranslatedExampleManager(models.Manager):\n def create(self, *args, original_example=None, language=None, **kwargs):\n repository = original_example.repository_update.repository\n return super().create(\n *args,\n repository_update=repository.current_update(language),\n original_example=original_example,\n language=language,\n **kwargs)\n\n\nclass RepositoryTranslatedExample(models.Model):\n class Meta:\n verbose_name = _('repository translated example')\n verbose_name_plural = _('repository translated examples')\n unique_together = ['original_example', 'language']\n ordering = ['-created_at']\n\n repository_update = models.ForeignKey(\n RepositoryUpdate,\n models.CASCADE,\n related_name='translated_added',\n editable=False)\n original_example = models.ForeignKey(\n RepositoryExample,\n models.CASCADE,\n related_name='translations',\n editable=False,\n help_text=_('Example object'))\n language = models.CharField(\n _('language'),\n max_length=5,\n help_text=_('Translation language'),\n validators=[\n languages.validate_language,\n ])\n text = models.TextField(\n _('text'),\n help_text=_('Translation text'))\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryTranslatedExampleManager()\n\n def entities_list_lambda_sort(item):\n return item.get('entity')\n\n @classmethod\n def same_entities_validator(cls, a, b):\n a_len = len(a)\n if a_len != len(b):\n return False\n a_sorted = sorted(\n a,\n key=cls.entities_list_lambda_sort)\n b_sorted = sorted(\n b,\n key=cls.entities_list_lambda_sort)\n for i in range(a_len):\n if a_sorted[i].get('entity') != b_sorted[i].get('entity'):\n return False\n return True\n\n @classmethod\n def count_entities(cls, entities_list, to_str=False):\n r = {}\n for e in entities_list:\n r.update({e.get('entity'): r.get('entity', 0) + 1})\n if to_str:\n r = ', '.join(map(\n lambda x: '{} {}'.format(x[1], x[0]),\n r.items())) if entities_list else 'no entities'\n return r\n\n @property\n def has_valid_entities(self):\n original_entities = self.original_example.entities.all()\n my_entities = self.entities.all()\n return RepositoryTranslatedExample.same_entities_validator(\n list(map(lambda x: x.to_dict, original_entities)),\n list(map(lambda x: x.to_dict, my_entities)))\n\n\nclass RepositoryEntityLabelQueryset(models.QuerySet):\n def get(self, repository, value):\n try:\n return super().get(\n repository=repository,\n value=value)\n except self.model.DoesNotExist as e:\n return super().create(\n repository=repository,\n value=value)\n\n\nclass RepositoryEntityLabelManager(models.Manager):\n def get_queryset(self):\n return RepositoryEntityLabelQueryset(self.model, using=self._db)\n\n\nclass RepositoryEntityLabel(models.Model):\n class Meta:\n unique_together = ['repository', 'value']\n\n repository = models.ForeignKey(\n Repository,\n on_delete=models.CASCADE,\n related_name='labels')\n value = models.CharField(\n _('label'),\n max_length=64,\n validators=[\n validate_item_key,\n can_t_be_other,\n ],\n blank=True)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryEntityLabelManager()\n\n def examples(self, exclude_deleted=True):\n return self.repository.examples(\n exclude_deleted=exclude_deleted).filter(\n entities__entity__label=self)\n\n\nclass RepositoryEntityQueryset(models.QuerySet):\n def get(self, repository, value):\n try:\n return super().get(\n repository=repository,\n value=value)\n except self.model.DoesNotExist as e:\n return super().create(\n repository=repository,\n value=value)\n\n\nclass RepositoryEntityManager(models.Manager):\n def get_queryset(self):\n return RepositoryEntityQueryset(self.model, using=self._db)\n\n\nclass RepositoryEntity(models.Model):\n class Meta:\n unique_together = ['repository', 'value']\n\n repository = models.ForeignKey(\n Repository,\n on_delete=models.CASCADE,\n related_name='entities')\n value = models.CharField(\n _('entity'),\n max_length=64,\n help_text=_('Entity name'),\n validators=[validate_item_key])\n label = models.ForeignKey(\n RepositoryEntityLabel,\n on_delete=models.CASCADE,\n related_name='entities',\n null=True,\n blank=True)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = RepositoryEntityManager()\n\n def set_label(self, value):\n if not value:\n self.label = None\n else:\n self.label = RepositoryEntityLabel.objects.get(\n repository=self.repository,\n value=value)\n\n\nclass EntityBaseQueryset(models.QuerySet):\n def create(self, entity, **kwargs):\n if type(entity) is not RepositoryEntity:\n instance = self.model(**kwargs)\n repository = instance.example.repository_update.repository\n entity = RepositoryEntity.objects.get(\n repository=repository,\n value=entity)\n return super().create(\n entity=entity,\n **kwargs)\n\n\nclass EntityBaseManager(models.Manager):\n def get_queryset(self):\n return EntityBaseQueryset(self.model, using=self._db)\n\n\nclass EntityBase(models.Model):\n class Meta:\n verbose_name = _('repository example entity')\n verbose_name_plural = _('repository example entities')\n abstract = True\n\n start = models.PositiveIntegerField(\n _('start'),\n help_text=_('Start index of entity value in example text'))\n end = models.PositiveIntegerField(\n _('end'),\n help_text=_('End index of entity value in example text'))\n entity = models.ForeignKey(\n RepositoryEntity,\n on_delete=models.CASCADE)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n objects = EntityBaseManager()\n\n @property\n def example(self):\n return self.get_example()\n\n @property\n def value(self):\n return self.example.text[self.start:self.end]\n\n @property\n def rasa_nlu_data(self):\n return {\n 'start': self.start,\n 'end': self.end,\n 'value': self.value,\n 'entity': self.entity.value,\n }\n\n @property\n def to_dict(self):\n return self.get_rasa_nlu_data()\n\n def get_example(self):\n pass # pragma: no cover\n\n def get_rasa_nlu_data(self, label_as_entity=False):\n return {\n 'start': self.start,\n 'end': self.end,\n 'entity': self.entity.label.value\n if label_as_entity else self.entity.value,\n }\n\n\nclass RepositoryExampleEntity(EntityBase):\n repository_example = models.ForeignKey(\n RepositoryExample,\n models.CASCADE,\n related_name='entities',\n editable=False,\n help_text=_('Example object'))\n\n def get_example(self):\n return self.repository_example\n\n\nclass RepositoryTranslatedExampleEntity(EntityBase):\n repository_translated_example = models.ForeignKey(\n RepositoryTranslatedExample,\n models.CASCADE,\n related_name='entities',\n editable=False,\n help_text=_('Translated example object'))\n\n def get_example(self):\n return self.repository_translated_example\n\n\nclass RepositoryAuthorization(models.Model):\n class Meta:\n verbose_name = _('repository authorization')\n verbose_name_plural = _('repository authorizations')\n unique_together = ['user', 'repository']\n\n LEVEL_NOTHING = 0\n LEVEL_READER = 1\n LEVEL_CONTRIBUTOR = 2\n LEVEL_ADMIN = 3\n\n ROLE_NOT_SETTED = 0\n ROLE_USER = 1\n ROLE_CONTRIBUTOR = 2\n ROLE_ADMIN = 3\n\n ROLE_CHOICES = [\n (ROLE_NOT_SETTED, _('not set')),\n (ROLE_USER, _('user')),\n (ROLE_CONTRIBUTOR, _('contributor')),\n (ROLE_ADMIN, _('admin')),\n ]\n\n uuid = models.UUIDField(\n _('UUID'),\n primary_key=True,\n default=uuid.uuid4,\n editable=False)\n user = models.ForeignKey(\n User,\n models.CASCADE)\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='authorizations')\n role = models.PositiveIntegerField(\n _('role'),\n choices=ROLE_CHOICES,\n default=ROLE_NOT_SETTED)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True)\n\n @property\n def level(self):\n try:\n user = self.user\n except User.DoesNotExist:\n user = None\n\n if user and self.repository.owner == user:\n return RepositoryAuthorization.LEVEL_ADMIN\n\n if self.role == RepositoryAuthorization.ROLE_NOT_SETTED:\n if self.repository.is_private:\n return RepositoryAuthorization.LEVEL_NOTHING\n return RepositoryAuthorization.LEVEL_READER\n\n if self.role == RepositoryAuthorization.ROLE_USER:\n return RepositoryAuthorization.LEVEL_READER\n\n if self.role == RepositoryAuthorization.ROLE_CONTRIBUTOR:\n return RepositoryAuthorization.LEVEL_CONTRIBUTOR\n\n if self.role == RepositoryAuthorization.ROLE_ADMIN:\n return RepositoryAuthorization.LEVEL_ADMIN\n\n return RepositoryAuthorization.LEVEL_NOTHING\n\n @property\n def can_read(self):\n return self.level in [\n RepositoryAuthorization.LEVEL_READER,\n RepositoryAuthorization.LEVEL_CONTRIBUTOR,\n RepositoryAuthorization.LEVEL_ADMIN,\n ]\n\n @property\n def can_contribute(self):\n return self.level in [\n RepositoryAuthorization.LEVEL_CONTRIBUTOR,\n RepositoryAuthorization.LEVEL_ADMIN,\n ]\n\n @property\n def can_write(self):\n return self.level in [\n RepositoryAuthorization.LEVEL_ADMIN,\n ]\n\n @property\n def is_admin(self):\n return self.level == RepositoryAuthorization.LEVEL_ADMIN\n\n @property\n def is_owner(self):\n try:\n user = self.user\n except User.DoesNotExist:\n return False\n return self.repository.owner == user\n\n @property\n def role_verbose(self):\n return dict(RepositoryAuthorization.ROLE_CHOICES).get(self.role)\n\n def send_new_role_email(self, responsible=None):\n if not settings.SEND_EMAILS:\n return False\n responsible_name = responsible and responsible.name \\\n or self.repository.owner.name\n context = {\n 'responsible_name': responsible_name,\n 'user_name': self.user.name,\n 'repository_name': self.repository.name,\n 'repository_url': self.repository.get_absolute_url(),\n 'new_role': self.role_verbose,\n }\n send_mail(\n _('New role in {}').format(self.repository.name),\n render_to_string(\n 'common/emails/new_role.txt',\n context),\n None,\n [self.user.email],\n html_message=render_to_string(\n 'common/emails/new_role.html',\n context))\n\n\nclass RepositoryVote(models.Model):\n UP_VOTE = 1\n DOWN_VOTE = -1\n NEUTRAL_VOTE = 0\n VOTE_CHOICES = [\n (UP_VOTE, _('Up'),),\n (DOWN_VOTE, _('Down')),\n (NEUTRAL_VOTE, _('Neutral')),\n ]\n\n class Meta:\n verbose_name = _('repository vote')\n verbose_name_plural = _('repository votes')\n unique_together = [\n 'user',\n 'repository',\n ]\n\n user = models.ForeignKey(\n User,\n models.CASCADE,\n related_name='repository_votes')\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='votes')\n vote = models.IntegerField(\n _('vote'),\n choices=VOTE_CHOICES)\n\n\nclass RequestRepositoryAuthorization(models.Model):\n class Meta:\n unique_together = ['user', 'repository']\n\n user = models.ForeignKey(\n User,\n models.CASCADE,\n related_name='requests')\n repository = models.ForeignKey(\n Repository,\n models.CASCADE,\n related_name='requests')\n text = models.CharField(\n _('text'),\n max_length=250)\n approved_by = models.ForeignKey(\n User,\n models.CASCADE,\n blank=True,\n null=True)\n created_at = models.DateTimeField(\n _('created at'),\n auto_now_add=True,\n editable=False)\n\n def send_new_request_email_to_admins(self):\n if not settings.SEND_EMAILS:\n return False\n context = {\n 'user_name': self.user.name,\n 'repository_name': self.repository.name,\n 'text': self.text,\n 'repository_url': self.repository.get_absolute_url(),\n }\n for admin in self.repository.admins:\n send_mail(\n _('New authorization request in {}').format(\n self.repository.name),\n render_to_string(\n 'common/emails/new_request.txt',\n context),\n None,\n [admin.email],\n html_message=render_to_string(\n 'common/emails/new_request.html',\n context))\n\n def send_request_rejected_email(self):\n if not settings.SEND_EMAILS:\n return False\n context = {\n 'repository_name': self.repository.name,\n }\n send_mail(\n _('Access denied to {}').format(\n self.repository.name),\n render_to_string(\n 'common/emails/request_rejected.txt',\n context),\n None,\n [self.user.email],\n html_message=render_to_string(\n 'common/emails/request_rejected.html',\n context))\n\n def send_request_approved_email(self):\n if not settings.SEND_EMAILS:\n return False\n context = {\n 'admin_name': self.approved_by.name,\n 'repository_name': self.repository.name,\n }\n send_mail(\n _('Authorization Request Approved to {}').format(\n self.repository.name),\n render_to_string(\n 'common/emails/request_approved.txt',\n context),\n None,\n [self.user.email],\n html_message=render_to_string(\n 'common/emails/request_approved.html',\n context))\n\n\n@receiver(models.signals.pre_save, sender=RequestRepositoryAuthorization)\ndef set_user_role_on_approved(instance, **kwargs):\n current = None\n try:\n current = RequestRepositoryAuthorization.objects.get(pk=instance.pk)\n except RequestRepositoryAuthorization.DoesNotExist as e:\n pass\n\n if not current:\n return False\n\n if current.approved_by is None and \\\n current.approved_by is not instance.approved_by:\n user_authorization = instance.repository.get_user_authorization(\n instance.user)\n user_authorization.role = RepositoryAuthorization.ROLE_USER\n user_authorization.save(update_fields=['role'])\n instance.send_request_approved_email()\n else:\n raise ValidationError(\n _('You can change approved_by just one time.'))\n\n\n@receiver(models.signals.post_save, sender=RequestRepositoryAuthorization)\ndef send_new_request_email_to_admins_on_created(instance, created, **kwargs):\n if created:\n instance.send_new_request_email_to_admins()\n\n\n@receiver(models.signals.post_delete, sender=RequestRepositoryAuthorization)\ndef send_request_rejected_email(instance, **kwargs):\n instance.send_request_rejected_email()\n", "path": "bothub/common/models.py" } ]
diff --git a/bothub/common/models.py b/bothub/common/models.py index ac4eab27..74711b74 100644 --- a/bothub/common/models.py +++ b/bothub/common/models.py @@ -481,6 +481,9 @@ def ready_for_train(self): not self.deleted.exists(): return False + if self.examples.count() == 0: + return False + return len(self.requirements_to_train) is 0 @property diff --git a/bothub/common/tests.py b/bothub/common/tests.py index d6b666cb..441e2aa1 100644 --- a/bothub/common/tests.py +++ b/bothub/common/tests.py @@ -871,6 +871,15 @@ def test_entity_dont_have_min_examples(self): entity='hi') self.assertTrue(self.repository.current_update().ready_for_train) + def test_no_examples(self): + example = RepositoryExample.objects.create( + repository_update=self.repository.current_update(), + text='hi', + intent='greet') + self.repository.current_update().start_training(self.owner) + example.delete() + self.assertFalse(self.repository.current_update().ready_for_train) + class RequestRepositoryAuthorizationTestCase(TestCase): def setUp(self):
fail2ban__fail2ban-2057
badips.py should use https Hi, What about asking `badips.py` to use https ? I think the following : `_badips = "http://www.badips.com"` Would just have to be changed to : `_badips = "https://www.badips.com"` Thank you 👍 Ben
[ { "content": "# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-\n# vi: set ft=python sts=4 ts=4 sw=4 noet :\n\n# This file is part of Fail2Ban.\n#\n# Fail2Ban is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# Fail2Ban is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Fail2Ban; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n\nimport sys\nif sys.version_info < (2, 7):\n\traise ImportError(\"badips.py action requires Python >= 2.7\")\nimport json\nimport threading\nimport logging\nif sys.version_info >= (3, ):\n\tfrom urllib.request import Request, urlopen\n\tfrom urllib.parse import urlencode\n\tfrom urllib.error import HTTPError\nelse:\n\tfrom urllib2 import Request, urlopen, HTTPError\n\tfrom urllib import urlencode\n\nfrom fail2ban.server.actions import ActionBase\n\n\nclass BadIPsAction(ActionBase): # pragma: no cover - may be unavailable\n\t\"\"\"Fail2Ban action which reports bans to badips.com, and also\n\tblacklist bad IPs listed on badips.com by using another action's\n\tban method.\n\n\tParameters\n\t----------\n\tjail : Jail\n\t\tThe jail which the action belongs to.\n\tname : str\n\t\tName assigned to the action.\n\tcategory : str\n\t\tValid badips.com category for reporting failures.\n\tscore : int, optional\n\t\tMinimum score for bad IPs. Default 3.\n\tage : str, optional\n\t\tAge of last report for bad IPs, per badips.com syntax.\n\t\tDefault \"24h\" (24 hours)\n\tkey : str, optional\n\t\tKey issued by badips.com to report bans, for later retrieval\n\t\tof personalised content.\n\tbanaction : str, optional\n\t\tName of banaction to use for blacklisting bad IPs. If `None`,\n\t\tno blacklist of IPs will take place.\n\t\tDefault `None`.\n\tbancategory : str, optional\n\t\tName of category to use for blacklisting, which can differ\n\t\tfrom category used for reporting. e.g. may want to report\n\t\t\"postfix\", but want to use whole \"mail\" category for blacklist.\n\t\tDefault `category`.\n\tbankey : str, optional\n\t\tKey issued by badips.com to blacklist IPs reported with the\n\t\tassociated key.\n\tupdateperiod : int, optional\n\t\tTime in seconds between updating bad IPs blacklist.\n\t\tDefault 900 (15 minutes)\n\tagent : str, optional\n\t\tUser agent transmitted to server.\n\t\tDefault `Fail2Ban/ver.`\n\n\tRaises\n\t------\n\tValueError\n\t\tIf invalid `category`, `score`, `banaction` or `updateperiod`.\n\t\"\"\"\n\n\tTIMEOUT = 10\n\t_badips = \"http://www.badips.com\"\n\tdef _Request(self, url, **argv):\n\t\treturn Request(url, headers={'User-Agent': self.agent}, **argv)\n\n\tdef __init__(self, jail, name, category, score=3, age=\"24h\", key=None,\n\t\tbanaction=None, bancategory=None, bankey=None, updateperiod=900, agent=\"Fail2Ban\", \n\t\ttimeout=TIMEOUT):\n\t\tsuper(BadIPsAction, self).__init__(jail, name)\n\n\t\tself.timeout = timeout\n\t\tself.agent = agent\n\t\tself.category = category\n\t\tself.score = score\n\t\tself.age = age\n\t\tself.key = key\n\t\tself.banaction = banaction\n\t\tself.bancategory = bancategory or category\n\t\tself.bankey = bankey\n\t\tself.updateperiod = updateperiod\n\n\t\tself._bannedips = set()\n\t\t# Used later for threading.Timer for updating badips\n\t\tself._timer = None\n\n\t@staticmethod\n\tdef isAvailable(timeout=1):\n\t\ttry:\n\t\t\tresponse = urlopen(Request(\"/\".join([BadIPsAction._badips]),\n\t\t\t\t\theaders={'User-Agent': \"Fail2Ban\"}), timeout=timeout)\n\t\t\treturn True, ''\n\t\texcept Exception as e: # pragma: no cover\n\t\t\treturn False, e\n\n\n\tdef getCategories(self, incParents=False):\n\t\t\"\"\"Get badips.com categories.\n\n\t\tReturns\n\t\t-------\n\t\tset\n\t\t\tSet of categories.\n\n\t\tRaises\n\t\t------\n\t\tHTTPError\n\t\t\tAny issues with badips.com request.\n\t\tValueError\n\t\t\tIf badips.com response didn't contain necessary information\n\t\t\"\"\"\n\t\ttry:\n\t\t\tresponse = urlopen(\n\t\t\t\tself._Request(\"/\".join([self._badips, \"get\", \"categories\"])), timeout=self.timeout)\n\t\texcept HTTPError as response:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.error(\n\t\t\t\t\"Failed to fetch categories. badips.com response: '%s'\",\n\t\t\t\tmessages['err'])\n\t\t\traise\n\t\telse:\n\t\t\tresponse_json = json.loads(response.read().decode('utf-8'))\n\t\t\tif not 'categories' in response_json:\n\t\t\t\terr = \"badips.com response lacked categories specification. Response was: %s\" \\\n\t\t\t\t % (response_json,)\n\t\t\t\tself._logSys.error(err)\n\t\t\t\traise ValueError(err)\n\t\t\tcategories = response_json['categories']\n\t\t\tcategories_names = set(\n\t\t\t\tvalue['Name'] for value in categories)\n\t\t\tif incParents:\n\t\t\t\tcategories_names.update(set(\n\t\t\t\t\tvalue['Parent'] for value in categories\n\t\t\t\t\tif \"Parent\" in value))\n\t\t\treturn categories_names\n\n\tdef getList(self, category, score, age, key=None):\n\t\t\"\"\"Get badips.com list of bad IPs.\n\n\t\tParameters\n\t\t----------\n\t\tcategory : str\n\t\t\tValid badips.com category.\n\t\tscore : int\n\t\t\tMinimum score for bad IPs.\n\t\tage : str\n\t\t\tAge of last report for bad IPs, per badips.com syntax.\n\t\tkey : str, optional\n\t\t\tKey issued by badips.com to fetch IPs reported with the\n\t\t\tassociated key.\n\n\t\tReturns\n\t\t-------\n\t\tset\n\t\t\tSet of bad IPs.\n\n\t\tRaises\n\t\t------\n\t\tHTTPError\n\t\t\tAny issues with badips.com request.\n\t\t\"\"\"\n\t\ttry:\n\t\t\turl = \"?\".join([\n\t\t\t\t\"/\".join([self._badips, \"get\", \"list\", category, str(score)]),\n\t\t\t\turlencode({'age': age})])\n\t\t\tif key:\n\t\t\t\turl = \"&\".join([url, urlencode({'key': key})])\n\t\t\tresponse = urlopen(self._Request(url), timeout=self.timeout)\n\t\texcept HTTPError as response:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.error(\n\t\t\t\t\"Failed to fetch bad IP list. badips.com response: '%s'\",\n\t\t\t\tmessages['err'])\n\t\t\traise\n\t\telse:\n\t\t\treturn set(response.read().decode('utf-8').split())\n\n\t@property\n\tdef category(self):\n\t\t\"\"\"badips.com category for reporting IPs.\n\t\t\"\"\"\n\t\treturn self._category\n\n\[email protected]\n\tdef category(self, category):\n\t\tif category not in self.getCategories():\n\t\t\tself._logSys.error(\"Category name '%s' not valid. \"\n\t\t\t\t\"see badips.com for list of valid categories\",\n\t\t\t\tcategory)\n\t\t\traise ValueError(\"Invalid category: %s\" % category)\n\t\tself._category = category\n\n\t@property\n\tdef bancategory(self):\n\t\t\"\"\"badips.com bancategory for fetching IPs.\n\t\t\"\"\"\n\t\treturn self._bancategory\n\n\[email protected]\n\tdef bancategory(self, bancategory):\n\t\tif bancategory not in self.getCategories(incParents=True):\n\t\t\tself._logSys.error(\"Category name '%s' not valid. \"\n\t\t\t\t\"see badips.com for list of valid categories\",\n\t\t\t\tbancategory)\n\t\t\traise ValueError(\"Invalid bancategory: %s\" % bancategory)\n\t\tself._bancategory = bancategory\n\n\t@property\n\tdef score(self):\n\t\t\"\"\"badips.com minimum score for fetching IPs.\n\t\t\"\"\"\n\t\treturn self._score\n\n\[email protected]\n\tdef score(self, score):\n\t\tscore = int(score)\n\t\tif 0 <= score <= 5:\n\t\t\tself._score = score\n\t\telse:\n\t\t\traise ValueError(\"Score must be 0-5\")\n\n\t@property\n\tdef banaction(self):\n\t\t\"\"\"Jail action to use for banning/unbanning.\n\t\t\"\"\"\n\t\treturn self._banaction\n\n\[email protected]\n\tdef banaction(self, banaction):\n\t\tif banaction is not None and banaction not in self._jail.actions:\n\t\t\tself._logSys.error(\"Action name '%s' not in jail '%s'\",\n\t\t\t\tbanaction, self._jail.name)\n\t\t\traise ValueError(\"Invalid banaction\")\n\t\tself._banaction = banaction\n\n\t@property\n\tdef updateperiod(self):\n\t\t\"\"\"Period in seconds between banned bad IPs will be updated.\n\t\t\"\"\"\n\t\treturn self._updateperiod\n\n\[email protected]\n\tdef updateperiod(self, updateperiod):\n\t\tupdateperiod = int(updateperiod)\n\t\tif updateperiod > 0:\n\t\t\tself._updateperiod = updateperiod\n\t\telse:\n\t\t\traise ValueError(\"Update period must be integer greater than 0\")\n\n\tdef _banIPs(self, ips):\n\t\tfor ip in ips:\n\t\t\ttry:\n\t\t\t\tself._jail.actions[self.banaction].ban({\n\t\t\t\t\t'ip': ip,\n\t\t\t\t\t'failures': 0,\n\t\t\t\t\t'matches': \"\",\n\t\t\t\t\t'ipmatches': \"\",\n\t\t\t\t\t'ipjailmatches': \"\",\n\t\t\t\t})\n\t\t\texcept Exception as e:\n\t\t\t\tself._logSys.error(\n\t\t\t\t\t\"Error banning IP %s for jail '%s' with action '%s': %s\",\n\t\t\t\t\tip, self._jail.name, self.banaction, e,\n\t\t\t\t\texc_info=self._logSys.getEffectiveLevel()<=logging.DEBUG)\n\t\t\telse:\n\t\t\t\tself._bannedips.add(ip)\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Banned IP %s for jail '%s' with action '%s'\",\n\t\t\t\t\tip, self._jail.name, self.banaction)\n\n\tdef _unbanIPs(self, ips):\n\t\tfor ip in ips:\n\t\t\ttry:\n\t\t\t\tself._jail.actions[self.banaction].unban({\n\t\t\t\t\t'ip': ip,\n\t\t\t\t\t'failures': 0,\n\t\t\t\t\t'matches': \"\",\n\t\t\t\t\t'ipmatches': \"\",\n\t\t\t\t\t'ipjailmatches': \"\",\n\t\t\t\t})\n\t\t\texcept Exception as e:\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Error unbanning IP %s for jail '%s' with action '%s': %s\",\n\t\t\t\t\tip, self._jail.name, self.banaction, e,\n\t\t\t\t\texc_info=self._logSys.getEffectiveLevel()<=logging.DEBUG)\n\t\t\telse:\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Unbanned IP %s for jail '%s' with action '%s'\",\n\t\t\t\t\tip, self._jail.name, self.banaction)\n\t\t\tfinally:\n\t\t\t\tself._bannedips.remove(ip)\n\n\tdef start(self):\n\t\t\"\"\"If `banaction` set, blacklists bad IPs.\n\t\t\"\"\"\n\t\tif self.banaction is not None:\n\t\t\tself.update()\n\n\tdef update(self):\n\t\t\"\"\"If `banaction` set, updates blacklisted IPs.\n\n\t\tQueries badips.com for list of bad IPs, removing IPs from the\n\t\tblacklist if no longer present, and adds new bad IPs to the\n\t\tblacklist.\n\t\t\"\"\"\n\t\tif self.banaction is not None:\n\t\t\tif self._timer:\n\t\t\t\tself._timer.cancel()\n\t\t\t\tself._timer = None\n\n\t\t\ttry:\n\t\t\t\tips = self.getList(\n\t\t\t\t\tself.bancategory, self.score, self.age, self.bankey)\n\t\t\t\t# Remove old IPs no longer listed\n\t\t\t\tself._unbanIPs(self._bannedips - ips)\n\t\t\t\t# Add new IPs which are now listed\n\t\t\t\tself._banIPs(ips - self._bannedips)\n\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Updated IPs for jail '%s'. Update again in %i seconds\",\n\t\t\t\t\tself._jail.name, self.updateperiod)\n\t\t\tfinally:\n\t\t\t\tself._timer = threading.Timer(self.updateperiod, self.update)\n\t\t\t\tself._timer.start()\n\n\tdef stop(self):\n\t\t\"\"\"If `banaction` set, clears blacklisted IPs.\n\t\t\"\"\"\n\t\tif self.banaction is not None:\n\t\t\tif self._timer:\n\t\t\t\tself._timer.cancel()\n\t\t\t\tself._timer = None\n\t\t\tself._unbanIPs(self._bannedips.copy())\n\n\tdef ban(self, aInfo):\n\t\t\"\"\"Reports banned IP to badips.com.\n\n\t\tParameters\n\t\t----------\n\t\taInfo : dict\n\t\t\tDictionary which includes information in relation to\n\t\t\tthe ban.\n\n\t\tRaises\n\t\t------\n\t\tHTTPError\n\t\t\tAny issues with badips.com request.\n\t\t\"\"\"\n\t\ttry:\n\t\t\turl = \"/\".join([self._badips, \"add\", self.category, aInfo['ip']])\n\t\t\tif self.key:\n\t\t\t\turl = \"?\".join([url, urlencode({'key': self.key})])\n\t\t\tresponse = urlopen(self._Request(url), timeout=self.timeout)\n\t\texcept HTTPError as response:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.error(\n\t\t\t\t\"Response from badips.com report: '%s'\",\n\t\t\t\tmessages['err'])\n\t\t\traise\n\t\telse:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.info(\n\t\t\t\t\"Response from badips.com report: '%s'\",\n\t\t\t\tmessages['suc'])\n\nAction = BadIPsAction\n", "path": "config/action.d/badips.py" } ]
[ { "content": "# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-\n# vi: set ft=python sts=4 ts=4 sw=4 noet :\n\n# This file is part of Fail2Ban.\n#\n# Fail2Ban is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# Fail2Ban is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Fail2Ban; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n\nimport sys\nif sys.version_info < (2, 7):\n\traise ImportError(\"badips.py action requires Python >= 2.7\")\nimport json\nimport threading\nimport logging\nif sys.version_info >= (3, ):\n\tfrom urllib.request import Request, urlopen\n\tfrom urllib.parse import urlencode\n\tfrom urllib.error import HTTPError\nelse:\n\tfrom urllib2 import Request, urlopen, HTTPError\n\tfrom urllib import urlencode\n\nfrom fail2ban.server.actions import ActionBase\n\n\nclass BadIPsAction(ActionBase): # pragma: no cover - may be unavailable\n\t\"\"\"Fail2Ban action which reports bans to badips.com, and also\n\tblacklist bad IPs listed on badips.com by using another action's\n\tban method.\n\n\tParameters\n\t----------\n\tjail : Jail\n\t\tThe jail which the action belongs to.\n\tname : str\n\t\tName assigned to the action.\n\tcategory : str\n\t\tValid badips.com category for reporting failures.\n\tscore : int, optional\n\t\tMinimum score for bad IPs. Default 3.\n\tage : str, optional\n\t\tAge of last report for bad IPs, per badips.com syntax.\n\t\tDefault \"24h\" (24 hours)\n\tkey : str, optional\n\t\tKey issued by badips.com to report bans, for later retrieval\n\t\tof personalised content.\n\tbanaction : str, optional\n\t\tName of banaction to use for blacklisting bad IPs. If `None`,\n\t\tno blacklist of IPs will take place.\n\t\tDefault `None`.\n\tbancategory : str, optional\n\t\tName of category to use for blacklisting, which can differ\n\t\tfrom category used for reporting. e.g. may want to report\n\t\t\"postfix\", but want to use whole \"mail\" category for blacklist.\n\t\tDefault `category`.\n\tbankey : str, optional\n\t\tKey issued by badips.com to blacklist IPs reported with the\n\t\tassociated key.\n\tupdateperiod : int, optional\n\t\tTime in seconds between updating bad IPs blacklist.\n\t\tDefault 900 (15 minutes)\n\tagent : str, optional\n\t\tUser agent transmitted to server.\n\t\tDefault `Fail2Ban/ver.`\n\n\tRaises\n\t------\n\tValueError\n\t\tIf invalid `category`, `score`, `banaction` or `updateperiod`.\n\t\"\"\"\n\n\tTIMEOUT = 10\n\t_badips = \"https://www.badips.com\"\n\tdef _Request(self, url, **argv):\n\t\treturn Request(url, headers={'User-Agent': self.agent}, **argv)\n\n\tdef __init__(self, jail, name, category, score=3, age=\"24h\", key=None,\n\t\tbanaction=None, bancategory=None, bankey=None, updateperiod=900, agent=\"Fail2Ban\", \n\t\ttimeout=TIMEOUT):\n\t\tsuper(BadIPsAction, self).__init__(jail, name)\n\n\t\tself.timeout = timeout\n\t\tself.agent = agent\n\t\tself.category = category\n\t\tself.score = score\n\t\tself.age = age\n\t\tself.key = key\n\t\tself.banaction = banaction\n\t\tself.bancategory = bancategory or category\n\t\tself.bankey = bankey\n\t\tself.updateperiod = updateperiod\n\n\t\tself._bannedips = set()\n\t\t# Used later for threading.Timer for updating badips\n\t\tself._timer = None\n\n\t@staticmethod\n\tdef isAvailable(timeout=1):\n\t\ttry:\n\t\t\tresponse = urlopen(Request(\"/\".join([BadIPsAction._badips]),\n\t\t\t\t\theaders={'User-Agent': \"Fail2Ban\"}), timeout=timeout)\n\t\t\treturn True, ''\n\t\texcept Exception as e: # pragma: no cover\n\t\t\treturn False, e\n\n\n\tdef getCategories(self, incParents=False):\n\t\t\"\"\"Get badips.com categories.\n\n\t\tReturns\n\t\t-------\n\t\tset\n\t\t\tSet of categories.\n\n\t\tRaises\n\t\t------\n\t\tHTTPError\n\t\t\tAny issues with badips.com request.\n\t\tValueError\n\t\t\tIf badips.com response didn't contain necessary information\n\t\t\"\"\"\n\t\ttry:\n\t\t\tresponse = urlopen(\n\t\t\t\tself._Request(\"/\".join([self._badips, \"get\", \"categories\"])), timeout=self.timeout)\n\t\texcept HTTPError as response:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.error(\n\t\t\t\t\"Failed to fetch categories. badips.com response: '%s'\",\n\t\t\t\tmessages['err'])\n\t\t\traise\n\t\telse:\n\t\t\tresponse_json = json.loads(response.read().decode('utf-8'))\n\t\t\tif not 'categories' in response_json:\n\t\t\t\terr = \"badips.com response lacked categories specification. Response was: %s\" \\\n\t\t\t\t % (response_json,)\n\t\t\t\tself._logSys.error(err)\n\t\t\t\traise ValueError(err)\n\t\t\tcategories = response_json['categories']\n\t\t\tcategories_names = set(\n\t\t\t\tvalue['Name'] for value in categories)\n\t\t\tif incParents:\n\t\t\t\tcategories_names.update(set(\n\t\t\t\t\tvalue['Parent'] for value in categories\n\t\t\t\t\tif \"Parent\" in value))\n\t\t\treturn categories_names\n\n\tdef getList(self, category, score, age, key=None):\n\t\t\"\"\"Get badips.com list of bad IPs.\n\n\t\tParameters\n\t\t----------\n\t\tcategory : str\n\t\t\tValid badips.com category.\n\t\tscore : int\n\t\t\tMinimum score for bad IPs.\n\t\tage : str\n\t\t\tAge of last report for bad IPs, per badips.com syntax.\n\t\tkey : str, optional\n\t\t\tKey issued by badips.com to fetch IPs reported with the\n\t\t\tassociated key.\n\n\t\tReturns\n\t\t-------\n\t\tset\n\t\t\tSet of bad IPs.\n\n\t\tRaises\n\t\t------\n\t\tHTTPError\n\t\t\tAny issues with badips.com request.\n\t\t\"\"\"\n\t\ttry:\n\t\t\turl = \"?\".join([\n\t\t\t\t\"/\".join([self._badips, \"get\", \"list\", category, str(score)]),\n\t\t\t\turlencode({'age': age})])\n\t\t\tif key:\n\t\t\t\turl = \"&\".join([url, urlencode({'key': key})])\n\t\t\tresponse = urlopen(self._Request(url), timeout=self.timeout)\n\t\texcept HTTPError as response:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.error(\n\t\t\t\t\"Failed to fetch bad IP list. badips.com response: '%s'\",\n\t\t\t\tmessages['err'])\n\t\t\traise\n\t\telse:\n\t\t\treturn set(response.read().decode('utf-8').split())\n\n\t@property\n\tdef category(self):\n\t\t\"\"\"badips.com category for reporting IPs.\n\t\t\"\"\"\n\t\treturn self._category\n\n\[email protected]\n\tdef category(self, category):\n\t\tif category not in self.getCategories():\n\t\t\tself._logSys.error(\"Category name '%s' not valid. \"\n\t\t\t\t\"see badips.com for list of valid categories\",\n\t\t\t\tcategory)\n\t\t\traise ValueError(\"Invalid category: %s\" % category)\n\t\tself._category = category\n\n\t@property\n\tdef bancategory(self):\n\t\t\"\"\"badips.com bancategory for fetching IPs.\n\t\t\"\"\"\n\t\treturn self._bancategory\n\n\[email protected]\n\tdef bancategory(self, bancategory):\n\t\tif bancategory not in self.getCategories(incParents=True):\n\t\t\tself._logSys.error(\"Category name '%s' not valid. \"\n\t\t\t\t\"see badips.com for list of valid categories\",\n\t\t\t\tbancategory)\n\t\t\traise ValueError(\"Invalid bancategory: %s\" % bancategory)\n\t\tself._bancategory = bancategory\n\n\t@property\n\tdef score(self):\n\t\t\"\"\"badips.com minimum score for fetching IPs.\n\t\t\"\"\"\n\t\treturn self._score\n\n\[email protected]\n\tdef score(self, score):\n\t\tscore = int(score)\n\t\tif 0 <= score <= 5:\n\t\t\tself._score = score\n\t\telse:\n\t\t\traise ValueError(\"Score must be 0-5\")\n\n\t@property\n\tdef banaction(self):\n\t\t\"\"\"Jail action to use for banning/unbanning.\n\t\t\"\"\"\n\t\treturn self._banaction\n\n\[email protected]\n\tdef banaction(self, banaction):\n\t\tif banaction is not None and banaction not in self._jail.actions:\n\t\t\tself._logSys.error(\"Action name '%s' not in jail '%s'\",\n\t\t\t\tbanaction, self._jail.name)\n\t\t\traise ValueError(\"Invalid banaction\")\n\t\tself._banaction = banaction\n\n\t@property\n\tdef updateperiod(self):\n\t\t\"\"\"Period in seconds between banned bad IPs will be updated.\n\t\t\"\"\"\n\t\treturn self._updateperiod\n\n\[email protected]\n\tdef updateperiod(self, updateperiod):\n\t\tupdateperiod = int(updateperiod)\n\t\tif updateperiod > 0:\n\t\t\tself._updateperiod = updateperiod\n\t\telse:\n\t\t\traise ValueError(\"Update period must be integer greater than 0\")\n\n\tdef _banIPs(self, ips):\n\t\tfor ip in ips:\n\t\t\ttry:\n\t\t\t\tself._jail.actions[self.banaction].ban({\n\t\t\t\t\t'ip': ip,\n\t\t\t\t\t'failures': 0,\n\t\t\t\t\t'matches': \"\",\n\t\t\t\t\t'ipmatches': \"\",\n\t\t\t\t\t'ipjailmatches': \"\",\n\t\t\t\t})\n\t\t\texcept Exception as e:\n\t\t\t\tself._logSys.error(\n\t\t\t\t\t\"Error banning IP %s for jail '%s' with action '%s': %s\",\n\t\t\t\t\tip, self._jail.name, self.banaction, e,\n\t\t\t\t\texc_info=self._logSys.getEffectiveLevel()<=logging.DEBUG)\n\t\t\telse:\n\t\t\t\tself._bannedips.add(ip)\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Banned IP %s for jail '%s' with action '%s'\",\n\t\t\t\t\tip, self._jail.name, self.banaction)\n\n\tdef _unbanIPs(self, ips):\n\t\tfor ip in ips:\n\t\t\ttry:\n\t\t\t\tself._jail.actions[self.banaction].unban({\n\t\t\t\t\t'ip': ip,\n\t\t\t\t\t'failures': 0,\n\t\t\t\t\t'matches': \"\",\n\t\t\t\t\t'ipmatches': \"\",\n\t\t\t\t\t'ipjailmatches': \"\",\n\t\t\t\t})\n\t\t\texcept Exception as e:\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Error unbanning IP %s for jail '%s' with action '%s': %s\",\n\t\t\t\t\tip, self._jail.name, self.banaction, e,\n\t\t\t\t\texc_info=self._logSys.getEffectiveLevel()<=logging.DEBUG)\n\t\t\telse:\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Unbanned IP %s for jail '%s' with action '%s'\",\n\t\t\t\t\tip, self._jail.name, self.banaction)\n\t\t\tfinally:\n\t\t\t\tself._bannedips.remove(ip)\n\n\tdef start(self):\n\t\t\"\"\"If `banaction` set, blacklists bad IPs.\n\t\t\"\"\"\n\t\tif self.banaction is not None:\n\t\t\tself.update()\n\n\tdef update(self):\n\t\t\"\"\"If `banaction` set, updates blacklisted IPs.\n\n\t\tQueries badips.com for list of bad IPs, removing IPs from the\n\t\tblacklist if no longer present, and adds new bad IPs to the\n\t\tblacklist.\n\t\t\"\"\"\n\t\tif self.banaction is not None:\n\t\t\tif self._timer:\n\t\t\t\tself._timer.cancel()\n\t\t\t\tself._timer = None\n\n\t\t\ttry:\n\t\t\t\tips = self.getList(\n\t\t\t\t\tself.bancategory, self.score, self.age, self.bankey)\n\t\t\t\t# Remove old IPs no longer listed\n\t\t\t\tself._unbanIPs(self._bannedips - ips)\n\t\t\t\t# Add new IPs which are now listed\n\t\t\t\tself._banIPs(ips - self._bannedips)\n\n\t\t\t\tself._logSys.info(\n\t\t\t\t\t\"Updated IPs for jail '%s'. Update again in %i seconds\",\n\t\t\t\t\tself._jail.name, self.updateperiod)\n\t\t\tfinally:\n\t\t\t\tself._timer = threading.Timer(self.updateperiod, self.update)\n\t\t\t\tself._timer.start()\n\n\tdef stop(self):\n\t\t\"\"\"If `banaction` set, clears blacklisted IPs.\n\t\t\"\"\"\n\t\tif self.banaction is not None:\n\t\t\tif self._timer:\n\t\t\t\tself._timer.cancel()\n\t\t\t\tself._timer = None\n\t\t\tself._unbanIPs(self._bannedips.copy())\n\n\tdef ban(self, aInfo):\n\t\t\"\"\"Reports banned IP to badips.com.\n\n\t\tParameters\n\t\t----------\n\t\taInfo : dict\n\t\t\tDictionary which includes information in relation to\n\t\t\tthe ban.\n\n\t\tRaises\n\t\t------\n\t\tHTTPError\n\t\t\tAny issues with badips.com request.\n\t\t\"\"\"\n\t\ttry:\n\t\t\turl = \"/\".join([self._badips, \"add\", self.category, aInfo['ip']])\n\t\t\tif self.key:\n\t\t\t\turl = \"?\".join([url, urlencode({'key': self.key})])\n\t\t\tresponse = urlopen(self._Request(url), timeout=self.timeout)\n\t\texcept HTTPError as response:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.error(\n\t\t\t\t\"Response from badips.com report: '%s'\",\n\t\t\t\tmessages['err'])\n\t\t\traise\n\t\telse:\n\t\t\tmessages = json.loads(response.read().decode('utf-8'))\n\t\t\tself._logSys.info(\n\t\t\t\t\"Response from badips.com report: '%s'\",\n\t\t\t\tmessages['suc'])\n\nAction = BadIPsAction\n", "path": "config/action.d/badips.py" } ]
diff --git a/ChangeLog b/ChangeLog index 4bee628855..b16827a2e4 100644 --- a/ChangeLog +++ b/ChangeLog @@ -44,6 +44,7 @@ ver. 0.10.3-dev-1 (20??/??/??) - development edition * possibility to specify own regex-pattern to match epoch date-time, e. g. `^\[{EPOCH}\]` or `^\[{LEPOCH}\]` (gh-2038); the epoch-pattern similar to `{DATE}` patterns does the capture and cuts out the match of whole pattern from the log-line, e. g. date-pattern `^\[{LEPOCH}\]\s+:` will match and cut out `[1516469849551000] :` from begin of the log-line. +* badips.py now uses https instead of plain http when requesting badips.com (gh-2057); ver. 0.10.2 (2018/01/18) - nothing-burns-like-the-cold diff --git a/config/action.d/badips.py b/config/action.d/badips.py index 473fbf335f..03fe7856ee 100644 --- a/config/action.d/badips.py +++ b/config/action.d/badips.py @@ -81,7 +81,7 @@ class BadIPsAction(ActionBase): # pragma: no cover - may be unavailable """ TIMEOUT = 10 - _badips = "http://www.badips.com" + _badips = "https://www.badips.com" def _Request(self, url, **argv): return Request(url, headers={'User-Agent': self.agent}, **argv)
Pylons__pyramid-2225
Update to Sphinx 1.3.4 when released There is a [bug in Sphinx 1.3.3 and 1.3.1](https://github.com/sphinx-doc/sphinx/issues/2189) (I haven't tried 1.3.2) where next and previous links in Sphinx documentation are broken when going into children and across sibling directories. When 1.3.4 is released, we need to pin sphinx to 1.3.4, which will include the commit made 8 days after the 1.3.3 release.
[ { "content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.3.1',\n 'docutils',\n 'repoze.sphinx.autointerface',\n 'pylons_sphinx_latesturl',\n 'pylons-sphinx-themes',\n 'sphinxcontrib-programoutput',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.6',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Development Status :: 6 - Mature\",\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [pyramid.pshell_runner]\n python=pyramid.scripts.pshell:python_shell_runner\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py" } ]
[ { "content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.3.4',\n 'docutils',\n 'repoze.sphinx.autointerface',\n 'pylons_sphinx_latesturl',\n 'pylons-sphinx-themes',\n 'sphinxcontrib-programoutput',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.6',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Development Status :: 6 - Mature\",\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [pyramid.pshell_runner]\n python=pyramid.scripts.pshell:python_shell_runner\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index b1624291b8..7d308f35d6 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ tests_require.append('zope.component>=3.11.0') docs_extras = [ - 'Sphinx >= 1.3.1', + 'Sphinx >= 1.3.4', 'docutils', 'repoze.sphinx.autointerface', 'pylons_sphinx_latesturl',
Pylons__pyramid-2224
Update to Sphinx 1.3.4 when released There is a [bug in Sphinx 1.3.3 and 1.3.1](https://github.com/sphinx-doc/sphinx/issues/2189) (I haven't tried 1.3.2) where next and previous links in Sphinx documentation are broken when going into children and across sibling directories. When 1.3.4 is released, we need to pin sphinx to 1.3.4, which will include the commit made 8 days after the 1.3.3 release.
[ { "content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.3.1',\n 'docutils',\n 'repoze.sphinx.autointerface',\n 'pylons_sphinx_latesturl',\n 'pylons-sphinx-themes',\n 'sphinxcontrib-programoutput',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.7.dev0',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Development Status :: 6 - Mature\",\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [pyramid.pshell_runner]\n python=pyramid.scripts.pshell:python_shell_runner\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py" } ]
[ { "content": "##############################################################################\n#\n# Copyright (c) 2008-2013 Agendaless Consulting and Contributors.\n# All Rights Reserved.\n#\n# This software is subject to the provisions of the BSD-like license at\n# http://www.repoze.org/LICENSE.txt. A copy of the license should accompany\n# this distribution. THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY AND ALL\n# EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND\n# FITNESS FOR A PARTICULAR PURPOSE\n#\n##############################################################################\n\nimport os\nimport sys\n\nfrom setuptools import setup, find_packages\n\npy_version = sys.version_info[:2]\n\nPY3 = py_version[0] == 3\n\nif PY3:\n if py_version < (3, 2):\n raise RuntimeError('On Python 3, Pyramid requires Python 3.2 or better')\nelse:\n if py_version < (2, 6):\n raise RuntimeError('On Python 2, Pyramid requires Python 2.6 or better')\n\nhere = os.path.abspath(os.path.dirname(__file__))\ntry:\n with open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n with open(os.path.join(here, 'CHANGES.txt')) as f:\n CHANGES = f.read()\nexcept IOError:\n README = CHANGES = ''\n\ninstall_requires=[\n 'setuptools',\n 'WebOb >= 1.3.1', # request.domain and CookieProfile\n 'repoze.lru >= 0.4', # py3 compat\n 'zope.interface >= 3.8.0', # has zope.interface.registry\n 'zope.deprecation >= 3.5.0', # py3 compat\n 'venusian >= 1.0a3', # ``ignore``\n 'translationstring >= 0.4', # py3 compat\n 'PasteDeploy >= 1.5.0', # py3 compat\n ]\n\ntests_require = [\n 'WebTest >= 1.3.1', # py3 compat\n ]\n\nif not PY3:\n tests_require.append('zope.component>=3.11.0')\n\ndocs_extras = [\n 'Sphinx >= 1.3.4',\n 'docutils',\n 'repoze.sphinx.autointerface',\n 'pylons_sphinx_latesturl',\n 'pylons-sphinx-themes',\n 'sphinxcontrib-programoutput',\n ]\n\ntesting_extras = tests_require + [\n 'nose',\n 'coverage',\n 'virtualenv', # for scaffolding tests\n ]\n\nsetup(name='pyramid',\n version='1.7.dev0',\n description='The Pyramid Web Framework, a Pylons project',\n long_description=README + '\\n\\n' + CHANGES,\n classifiers=[\n \"Development Status :: 6 - Mature\",\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.2\",\n \"Programming Language :: Python :: 3.3\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Framework :: Pyramid\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI\",\n \"License :: Repoze Public License\",\n ],\n keywords='web wsgi pylons pyramid',\n author=\"Chris McDonough, Agendaless Consulting\",\n author_email=\"[email protected]\",\n url=\"http://docs.pylonsproject.org/en/latest/docs/pyramid.html\",\n license=\"BSD-derived (http://www.repoze.org/LICENSE.txt)\",\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires = install_requires,\n extras_require = {\n 'testing':testing_extras,\n 'docs':docs_extras,\n },\n tests_require = tests_require,\n test_suite=\"pyramid.tests\",\n entry_points = \"\"\"\\\n [pyramid.scaffold]\n starter=pyramid.scaffolds:StarterProjectTemplate\n zodb=pyramid.scaffolds:ZODBProjectTemplate\n alchemy=pyramid.scaffolds:AlchemyProjectTemplate\n [pyramid.pshell_runner]\n python=pyramid.scripts.pshell:python_shell_runner\n [console_scripts]\n pcreate = pyramid.scripts.pcreate:main\n pserve = pyramid.scripts.pserve:main\n pshell = pyramid.scripts.pshell:main\n proutes = pyramid.scripts.proutes:main\n pviews = pyramid.scripts.pviews:main\n ptweens = pyramid.scripts.ptweens:main\n prequest = pyramid.scripts.prequest:main\n pdistreport = pyramid.scripts.pdistreport:main\n [paste.server_runner]\n wsgiref = pyramid.scripts.pserve:wsgiref_server_runner\n cherrypy = pyramid.scripts.pserve:cherrypy_server_runner\n \"\"\"\n )\n\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 9bdfcd90ed..daccd32587 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ tests_require.append('zope.component>=3.11.0') docs_extras = [ - 'Sphinx >= 1.3.1', + 'Sphinx >= 1.3.4', 'docutils', 'repoze.sphinx.autointerface', 'pylons_sphinx_latesturl',
google__openhtf-870
'module' object has no attribute 'MIMETYPE_MAP' - station_server.py I have openHTF installed in two locations but I only get this error on one of them (The newer one I installed today. I installed from source). I try running `frontend_example.py`, and this is the error I get: ``` Traceback (most recent call last): File "frontend_example.py", line 21, in <module> from openhtf.output.servers import station_server File "/home/lab_machine_2/anaconda2/lib/python2.7/site-packages/openhtf-1.3.0-py2.7.egg/openhtf/output/servers/station_server.py", line 36, in <module> v: k for k, v in six.iteritems(mfg_inspector.MIMETYPE_MAP) AttributeError: 'module' object has no attribute 'MIMETYPE_MAP' ``` I am working on a project that also crashes on the new installation but works on a previously installed version. 'module' object has no attribute 'MIMETYPE_MAP' - station_server.py I have openHTF installed in two locations but I only get this error on one of them (The newer one I installed today. I installed from source). I try running `frontend_example.py`, and this is the error I get: ``` Traceback (most recent call last): File "frontend_example.py", line 21, in <module> from openhtf.output.servers import station_server File "/home/lab_machine_2/anaconda2/lib/python2.7/site-packages/openhtf-1.3.0-py2.7.egg/openhtf/output/servers/station_server.py", line 36, in <module> v: k for k, v in six.iteritems(mfg_inspector.MIMETYPE_MAP) AttributeError: 'module' object has no attribute 'MIMETYPE_MAP' ``` I am working on a project that also crashes on the new installation but works on a previously installed version.
[ { "content": "\"\"\"Serves an Angular frontend and information about a running OpenHTF test.\n\nThis server does not currently support more than one test running in the same\nprocess. However, the dashboard server (dashboard_server.py) can be used to\naggregate info from multiple station servers with a single frontend.\n\"\"\"\n\nimport contextlib\nimport itertools\nimport json\nimport logging\nimport os\nimport re\nimport six\nimport socket\nimport threading\nimport time\nimport types\n\nimport sockjs.tornado\n\nimport openhtf\nfrom openhtf.output.callbacks import mfg_inspector\nfrom openhtf.output.servers import pub_sub\nfrom openhtf.output.servers import web_gui_server\nfrom openhtf.util import conf\nfrom openhtf.util import data\nfrom openhtf.util import functions\nfrom openhtf.util import logs\nfrom openhtf.util import multicast\nfrom openhtf.util import timeouts\n\nSTATION_SERVER_TYPE = 'station'\n\nMIMETYPE_REVERSE_MAP = {\n v: k for k, v in six.iteritems(mfg_inspector.MIMETYPE_MAP)\n}\nMULTICAST_QUERY = 'OPENHTF_DISCOVERY'\nTEST_STATUS_COMPLETED = 'COMPLETED'\n\n_LOG = logging.getLogger(__name__)\n\n# Constants related to response times within the server.\n_CHECK_FOR_FINISHED_TEST_POLL_S = 0.5\n_DEFAULT_FRONTEND_THROTTLE_S = 0.15\n_WAIT_FOR_ANY_EVENT_POLL_S = 0.05\n_WAIT_FOR_EXECUTING_TEST_POLL_S = 0.1\n\nconf.declare('frontend_throttle_s', default_value=_DEFAULT_FRONTEND_THROTTLE_S,\n description=('Min wait time between successive updates to the '\n 'frontend.'))\nconf.declare('station_server_port', default_value=0,\n description=('Port on which to serve the app. If set to zero (the '\n 'default) then an arbitrary port will be chosen.'))\n\n# These have default values in openhtf.util.multicast.py.\nconf.declare('station_discovery_address')\nconf.declare('station_discovery_port')\nconf.declare('station_discovery_ttl')\n\n\ndef _get_executing_test():\n \"\"\"Get the currently executing test and its state.\n\n When this function returns, it is not guaranteed that the returned test is\n still running. A consumer of this function that wants to access test.state is\n exposed to a race condition in which test.state may become None at any time\n due to the test finishing. To address this, in addition to returning the test\n itself, this function returns the last known test state.\n\n Returns:\n test: The test that was executing when this function was called, or None.\n test_state: The state of the executing test, or None.\n \"\"\"\n tests = list(six.itervalues(openhtf.Test.TEST_INSTANCES))\n\n if not tests:\n return None, None\n\n if len(tests) > 1:\n _LOG.warn('Station server does not support multiple executing tests.')\n\n test = tests[0]\n test_state = test.state\n\n if test_state is None:\n # This is the case if:\n # 1. The test executor was created but has not started running.\n # 2. The test finished while this function was running, after we got the\n # list of tests but before we accessed the test state.\n return None, None\n\n return test, test_state\n\n\ndef _test_state_from_record(test_record_dict, execution_uid=None):\n \"\"\"Convert a test record dict to a test state dict.\n\n Args:\n test_record_dict: An OpenHTF TestRecord, converted to base types.\n execution_uid: Execution ID of the running test.\n\n Returns:\n Dictionary representation of a test's final state. On top of the fields from\n TestState._asdict() we add 'execution_uid' which is needed by the\n frontend app.\n \"\"\"\n return {\n 'execution_uid': execution_uid,\n 'plugs': {\n 'plug_states': {},\n },\n 'running_phase_state': None,\n 'status': TEST_STATUS_COMPLETED,\n 'test_record': test_record_dict,\n }\n\n\ndef _wait_for_any_event(events, timeout_s):\n \"\"\"Wait for any in a list of threading.Event's to be set.\n\n Args:\n events: List of threading.Event's.\n timeout_s: Max duration in seconds to wait before returning.\n\n Returns:\n True if at least one event was set before the timeout expired, else False.\n \"\"\"\n def any_event_set():\n return any(event.is_set() for event in events)\n\n result = timeouts.loop_until_timeout_or_true(\n timeout_s, any_event_set, sleep_s=_WAIT_FOR_ANY_EVENT_POLL_S)\n\n return result or any_event_set()\n\n\nclass StationWatcher(threading.Thread):\n \"\"\"Watches for changes in the state of the currently running OpenHTF test.\n\n The StationWatcher uses an event-based mechanism to detect changes in test\n state. This means we rely on the OpenHTF framework to call notify_update()\n when a change occurs. Authors of frontend-aware plugs must ensure that\n notify_update() is called when a change occurs to that plug's state.\n \"\"\"\n daemon = True\n\n def __init__(self, update_callback):\n super(StationWatcher, self).__init__(name=type(self).__name__)\n self._update_callback = update_callback\n\n def run(self):\n \"\"\"Call self._poll_for_update() in a loop and handle errors.\"\"\"\n while True:\n try:\n self._poll_for_update()\n except RuntimeError as error:\n # Note that because logging triggers a call to notify_update(), by\n # logging a message, we automatically retry publishing the update\n # after an error occurs.\n if error.message == 'dictionary changed size during iteration':\n # These errors occur occasionally and it is infeasible to get rid of\n # them entirely unless data.convert_to_base_types() is made\n # thread-safe. Ignore the error and retry quickly.\n _LOG.debug('Ignoring (probably harmless) error in station watcher: '\n '`dictionary changed size during iteration`.')\n time.sleep(0.1)\n else:\n _LOG.exception('Error in station watcher: %s', error)\n time.sleep(1)\n except Exception as error: # pylint: disable=broad-except\n _LOG.exception('Error in station watcher: %s', error)\n time.sleep(1)\n\n @functions.call_at_most_every(float(conf.frontend_throttle_s))\n def _poll_for_update(self):\n \"\"\"Call the callback with the current test state, then wait for a change.\"\"\"\n test, test_state = _get_executing_test()\n\n if test is None:\n time.sleep(_WAIT_FOR_EXECUTING_TEST_POLL_S)\n return\n\n state_dict, event = self._to_dict_with_event(test_state)\n self._update_callback(state_dict)\n\n plug_manager = test_state.plug_manager\n plug_events = [\n plug_manager.get_plug_by_class_path(plug_name).asdict_with_event()[1]\n for plug_name in plug_manager.get_frontend_aware_plug_names()\n ]\n events = [event] + plug_events\n\n # Wait for the test state or a plug state to change, or for the previously\n # executing test to finish.\n while not _wait_for_any_event(events, _CHECK_FOR_FINISHED_TEST_POLL_S):\n new_test, _ = _get_executing_test()\n if test != new_test:\n break\n\n @classmethod\n def _to_dict_with_event(cls, test_state):\n \"\"\"Process a test state into the format we want to send to the frontend.\"\"\"\n original_dict, event = test_state.asdict_with_event()\n\n # This line may produce a 'dictionary changed size during iteration' error.\n test_state_dict = data.convert_to_base_types(original_dict)\n\n test_state_dict['execution_uid'] = test_state.execution_uid\n return test_state_dict, event\n\n\nclass DashboardPubSub(sockjs.tornado.SockJSConnection):\n \"\"\"WebSocket endpoint for the list of available stations.\n\n In this case, there is always exactly one available station: the station\n running the StationServer. See dashboard_server.py for an implementation of\n the dashboard WebSocket endpoint for multiple stations.\n\n TODO(Kenadia): Remove this endpoint from the station server. Since the\n frontend knows whether it is running off of a station server or dashboard\n server, it should be smart enough not to look for this endpoint on the station\n server.\n \"\"\"\n port = None # Set by for_port().\n\n @classmethod\n def for_port(cls, port):\n \"\"\"Returns a new subclass with the port set.\"\"\"\n return type(cls.__name__, (cls,), {'port': port})\n\n def on_open(self, unused_info):\n \"\"\"Called by the base class when a client connects.\"\"\"\n self.send(self._make_message())\n\n @classmethod\n def _make_message(cls):\n host = 'localhost'\n host_port = '%s:%s' % (host, cls.port)\n return {\n host_port: {\n 'station_id': conf.station_id, # From openhtf.core.test_state.\n 'host': host,\n 'port': cls.port,\n 'status': 'ONLINE',\n }\n }\n\n\nclass StationPubSub(pub_sub.PubSub):\n \"\"\"WebSocket endpoint for test updates.\n\n The endpoint provides information about the test that is currently running\n with this StationServer. Two types of message are sent: 'update' and 'record',\n where 'record' indicates the final state of a test.\n \"\"\"\n _lock = threading.Lock() # Required by pub_sub.PubSub.\n subscribers = set() # Required by pub_sub.PubSub.\n _last_execution_uid = None\n _last_message = None\n\n @classmethod\n def publish_test_record(cls, test_record):\n test_record_dict = data.convert_to_base_types(test_record)\n test_state_dict = _test_state_from_record(test_record_dict,\n cls._last_execution_uid)\n cls._publish_test_state(test_state_dict, 'record')\n\n @classmethod\n def publish_update(cls, test_state_dict):\n \"\"\"Publish the state of the currently executing test.\"\"\"\n cls._publish_test_state(test_state_dict, 'update')\n\n @classmethod\n def _publish_test_state(cls, test_state_dict, message_type):\n message = {\n 'state': test_state_dict,\n 'test_uid': test_state_dict['execution_uid'],\n 'type': message_type,\n }\n super(StationPubSub, cls).publish(message)\n cls._last_execution_uid = test_state_dict['execution_uid']\n cls._last_message = message\n\n def on_subscribe(self, info):\n \"\"\"Send the more recent test state to new subscribers when they connect.\"\"\"\n if self._last_message is not None:\n self.send(self._last_message)\n\n\nclass BaseTestHandler(web_gui_server.CorsRequestHandler):\n \"\"\"Base class for HTTP endpoints that get test data.\"\"\"\n\n def get_test(self, test_uid):\n \"\"\"Get the specified test. Write 404 and return None if it is not found.\"\"\"\n test, test_state = _get_executing_test()\n\n if test is None or str(test.uid) != test_uid:\n self.write('Unknown test UID %s' % test_uid)\n self.set_status(404)\n return None, None\n\n return test, test_state\n\n\nclass AttachmentsHandler(BaseTestHandler):\n \"\"\"GET endpoint for a file attached to a test.\"\"\"\n\n def get(self, test_uid, phase_descriptor_id, attachment_name):\n _, test_state = self.get_test(test_uid)\n\n if test_state is None:\n return\n\n # Find the phase matching `phase_descriptor_id`.\n running_phase = test_state.running_phase_state\n phase_records = itertools.chain(\n test_state.test_record.phases,\n [running_phase.phase_record] if running_phase is not None else [])\n\n matched_phase = None\n for phase in phase_records:\n if str(phase.descriptor_id) == phase_descriptor_id:\n matched_phase = phase\n break\n\n if matched_phase is None:\n self.write('Unknown phase descriptor %s' % phase_descriptor_id)\n self.set_status(404)\n return\n\n # Find the attachment matching `attachment_name`.\n if attachment_name in matched_phase.attachments:\n attachment = matched_phase.attachments[attachment_name]\n else:\n self.write('Unknown attachment %s' % attachment_name)\n self.set_status(404)\n return\n\n self.set_header('Content-Type', attachment.mimetype)\n self.write(attachment.data)\n\n\nclass PhasesHandler(BaseTestHandler):\n \"\"\"GET endpoint for phase descriptors for a test, i.e. the full phase list.\"\"\"\n\n def get(self, test_uid):\n test, _ = self.get_test(test_uid)\n\n if test is None:\n return\n\n phase_descriptors = [\n dict(id=id(phase), **data.convert_to_base_types(phase))\n for phase in test.descriptor.phase_group]\n\n # Wrap value in a dict because writing a list directly is prohibited.\n self.write({'data': phase_descriptors})\n\n\nclass PlugsHandler(BaseTestHandler):\n \"\"\"POST endpoints to receive plug responses from the frontend.\"\"\"\n\n def post(self, test_uid, plug_name):\n _, test_state = self.get_test(test_uid)\n\n if test_state is None:\n return\n\n # Find the plug matching `plug_name`.\n plug = test_state.plug_manager.get_plug_by_class_path(plug_name)\n if plug is None:\n self.write('Unknown plug %s' % plug_name)\n self.set_status(404)\n return\n\n try:\n request = json.loads(self.request.body.decode('utf-8'))\n method_name = request['method']\n args = request['args']\n except (KeyError, ValueError):\n self.write('Malformed JSON request.')\n self.set_status(400)\n return\n\n method = getattr(plug, method_name, None)\n\n if not (plug.enable_remote and\n isinstance(method, types.MethodType) and\n not method_name.startswith('_') and\n method_name not in plug.disable_remote_attrs):\n self.write('Cannot access method %s of plug %s.' % (method_name,\n plug_name))\n self.set_status(400)\n return\n\n try:\n response = json.dumps(method(*args))\n except Exception as e: # pylint: disable=broad-except\n self.write('Plug error: %s' % repr(e))\n self.set_status(500)\n else:\n self.write(response)\n\n\nclass BaseHistoryHandler(web_gui_server.CorsRequestHandler):\n\n def initialize(self, history_path):\n self.history_path = history_path\n\n\nclass HistoryListHandler(BaseHistoryHandler):\n \"\"\"GET endpoint for the list of tests in the history.\n\n When requesting the history list, we respond with all files in the history\n folder ending with the '.pb' extension. Ideally, file names should match the\n following form (see chtf.py):\n\n 'mfg_event_{dut_id}_{start_time_millis}.pb'\n\n The requester can filter the returned history items by passing DUT ID and/or\n start time as query parameters.\n \"\"\"\n\n def get(self):\n filter_dut_id = self.get_arguments('dutId')\n filter_start_time_millis = self.get_arguments('startTimeMillis')\n\n history_items = []\n\n for file_name in os.listdir(self.history_path):\n if not file_name.endswith('.pb'):\n continue\n\n if not os.path.isfile(os.path.join(self.history_path, file_name)):\n continue\n\n dut_id = None\n start_time_millis = None\n match = re.match(r'mfg_event_(.+)_(\\d+)\\.pb$', file_name)\n\n if match is not None:\n dut_id = match.group(1)\n start_time_millis = int(match.group(2))\n\n if filter_dut_id and dut_id not in filter_dut_id:\n continue\n\n if (filter_start_time_millis and\n str(start_time_millis) not in filter_start_time_millis):\n continue\n\n history_items.append({\n 'dut_id': dut_id,\n 'file_name': file_name,\n 'start_time_millis': start_time_millis,\n })\n\n # Wrap value in a dict because writing a list directly is prohibited.\n self.write({'data': history_items})\n\n\nclass HistoryItemHandler(BaseHistoryHandler):\n \"\"\"GET endpoint for a test record from the history.\"\"\"\n\n def get(self, file_name):\n # TODO(Kenadia): Implement the history item handler. The implementation\n # depends on the format used to store test records on disk.\n self.write('Not implemented.')\n self.set_status(500)\n\n\nclass HistoryAttachmentsHandler(BaseHistoryHandler):\n \"\"\"GET endpoint for an attachment from an MfgEvent in the history.\n\n The sha1 query parameter is optional and used as a backup to identify an\n attachment if the name does not match any known name. Including this parameter\n is recommended, as some systems may modify attachment names when storing them\n on the MfgEvent in the case where multiple attachments have the same name.\n \"\"\"\n\n def get(self, file_name, attachment_name):\n # TODO(Kenadia): Implement the history item handler. The implementation\n # depends on the format used to store test records on disk.\n self.write('Not implemented.')\n self.set_status(500)\n\n\nclass StationMulticast(multicast.MulticastListener):\n \"\"\"Announce the existence of a station server to any searching dashboards.\"\"\"\n\n def __init__(self, station_server_port):\n # These have default values in openhtf.util.multicast.py.\n kwargs = {\n attr: conf['station_discovery_%s' % attr]\n for attr in ('address', 'port', 'ttl')\n if 'station_discovery_%s' % attr in conf\n }\n super(StationMulticast, self).__init__(self._make_message, **kwargs)\n self.station_server_port = station_server_port\n\n def _make_message(self, message):\n if message != MULTICAST_QUERY:\n if message == 'OPENHTF_PING':\n # Don't log for the old multicast string.\n return\n _LOG.debug('Got unexpected traffic on multicast socket: %s', message)\n return\n\n _, test_state = _get_executing_test()\n\n if test_state:\n cell = test_state.test_record.metadata.get('cell')\n test_description = test_state.test_record.metadata.get('test_description')\n test_name = test_state.test_record.metadata.get('test_name')\n else:\n cell = None\n test_description = None\n test_name = None\n\n return json.dumps({\n 'cell': cell,\n 'port': self.station_server_port,\n 'station_id': conf.station_id, # From openhtf.core.test_state.\n 'test_description': test_description,\n 'test_name': test_name,\n })\n\n\nclass StationServer(web_gui_server.WebGuiServer):\n \"\"\"Provides endpoints for interacting with an OpenHTF test.\n\n Also serves an Angular frontend that interfaces with those endpoints.\n\n Can be used as a context manager to ensure the server is stopped cleanly:\n\n with StationServer(history_path) as server:\n test = openhtf.Test(*my_phases)\n test.add_output_callbacks(server.publish_final_state)\n test.execute()\n\n Can also be used via the maybe_run() helper function:\n\n with maybe_run(should_run, history_path) as server:\n test = openhtf.Test(*my_phases)\n if server:\n test.add_output_callbacks(server.publish_final_state)\n test.execute()\n \"\"\"\n\n def __init__(self, history_path=None):\n # Disable tornado's logging.\n # TODO(Kenadia): Enable these logs if verbosity flag is at least -vvv.\n # I think this will require changing how StoreRepsInModule works.\n # Currently, if we call logs.ARG_PARSER.parse_known_args() multiple\n # times, we multiply the number of v's that we get.\n tornado_logger = logging.getLogger('tornado')\n tornado_logger.propagate = False\n if not tornado_logger.handlers:\n tornado_logger.addHandler(logging.NullHandler())\n\n # Bind port early so that the correct port number can be used in the routes.\n sockets, port = web_gui_server.bind_port(int(conf.station_server_port))\n\n # Set up the station watcher.\n station_watcher = StationWatcher(StationPubSub.publish_update)\n station_watcher.start()\n\n # Set up the SockJS endpoints.\n dashboard_class = DashboardPubSub.for_port(port)\n dash_router = sockjs.tornado.SockJSRouter(dashboard_class, '/sub/dashboard')\n station_router = sockjs.tornado.SockJSRouter(StationPubSub, '/sub/station')\n routes = dash_router.urls + station_router.urls\n\n # Set up the other endpoints.\n routes.extend((\n (r'/tests/(?P<test_uid>[\\w\\d:]+)/phases', PhasesHandler),\n (r'/tests/(?P<test_uid>[\\w\\d:]+)/plugs/(?P<plug_name>.+)',\n PlugsHandler),\n (r'/tests/(?P<test_uid>[\\w\\d:]+)/phases/(?P<phase_descriptor_id>\\d+)/'\n 'attachments/(?P<attachment_name>.+)', AttachmentsHandler),\n ))\n\n # Optionally enable history from disk.\n if history_path is not None:\n routes.extend((\n (r'/history', HistoryListHandler, {'history_path': history_path}),\n (r'/history/(?P<file_name>[^/]+)', HistoryItemHandler,\n {'history_path': history_path}),\n (r'/history/(?P<file_name>[^/]+)/attachments/(?P<attachment_name>.+)',\n HistoryAttachmentsHandler, {'history_path': history_path}),\n ))\n\n super(StationServer, self).__init__(routes, port, sockets=sockets)\n self.station_multicast = StationMulticast(port)\n\n def _get_config(self):\n return {\n 'server_type': STATION_SERVER_TYPE,\n }\n\n def run(self):\n _LOG.info('Announcing station server via multicast on %s:%s',\n self.station_multicast.address, self.station_multicast.port)\n self.station_multicast.start()\n _LOG.info(\n 'Starting station server at:\\n'\n ' Local: http://localhost:{port}\\n'\n ' Remote: http://{host}:{port}'\n .format(host=socket.gethostname(), port=self.port))\n super(StationServer, self).run()\n\n def stop(self):\n _LOG.info('Stopping station server.')\n super(StationServer, self).stop()\n _LOG.info('Stopping multicast.')\n self.station_multicast.stop(timeout_s=0)\n\n def publish_final_state(self, test_record):\n \"\"\"Test output callback publishing a final state from the test record.\"\"\"\n StationPubSub.publish_test_record(test_record)\n\n\[email protected]\ndef maybe_run(should_run, history_path=None):\n \"\"\"Provides a context which conditionally runs a StationServer.\"\"\"\n if not should_run:\n yield\n return\n with StationServer(history_path) as server:\n yield server\n", "path": "openhtf/output/servers/station_server.py" } ]
[ { "content": "\"\"\"Serves an Angular frontend and information about a running OpenHTF test.\n\nThis server does not currently support more than one test running in the same\nprocess. However, the dashboard server (dashboard_server.py) can be used to\naggregate info from multiple station servers with a single frontend.\n\"\"\"\n\nimport contextlib\nimport itertools\nimport json\nimport logging\nimport os\nimport re\nimport six\nimport socket\nimport threading\nimport time\nimport types\n\nimport sockjs.tornado\n\nimport openhtf\nfrom openhtf.output.callbacks import mfg_inspector\nfrom openhtf.output.servers import pub_sub\nfrom openhtf.output.servers import web_gui_server\nfrom openhtf.util import conf\nfrom openhtf.util import data\nfrom openhtf.util import functions\nfrom openhtf.util import logs\nfrom openhtf.util import multicast\nfrom openhtf.util import timeouts\n\nSTATION_SERVER_TYPE = 'station'\n\nMULTICAST_QUERY = 'OPENHTF_DISCOVERY'\nTEST_STATUS_COMPLETED = 'COMPLETED'\n\n_LOG = logging.getLogger(__name__)\n\n# Constants related to response times within the server.\n_CHECK_FOR_FINISHED_TEST_POLL_S = 0.5\n_DEFAULT_FRONTEND_THROTTLE_S = 0.15\n_WAIT_FOR_ANY_EVENT_POLL_S = 0.05\n_WAIT_FOR_EXECUTING_TEST_POLL_S = 0.1\n\nconf.declare('frontend_throttle_s', default_value=_DEFAULT_FRONTEND_THROTTLE_S,\n description=('Min wait time between successive updates to the '\n 'frontend.'))\nconf.declare('station_server_port', default_value=0,\n description=('Port on which to serve the app. If set to zero (the '\n 'default) then an arbitrary port will be chosen.'))\n\n# These have default values in openhtf.util.multicast.py.\nconf.declare('station_discovery_address')\nconf.declare('station_discovery_port')\nconf.declare('station_discovery_ttl')\n\n\ndef _get_executing_test():\n \"\"\"Get the currently executing test and its state.\n\n When this function returns, it is not guaranteed that the returned test is\n still running. A consumer of this function that wants to access test.state is\n exposed to a race condition in which test.state may become None at any time\n due to the test finishing. To address this, in addition to returning the test\n itself, this function returns the last known test state.\n\n Returns:\n test: The test that was executing when this function was called, or None.\n test_state: The state of the executing test, or None.\n \"\"\"\n tests = list(six.itervalues(openhtf.Test.TEST_INSTANCES))\n\n if not tests:\n return None, None\n\n if len(tests) > 1:\n _LOG.warn('Station server does not support multiple executing tests.')\n\n test = tests[0]\n test_state = test.state\n\n if test_state is None:\n # This is the case if:\n # 1. The test executor was created but has not started running.\n # 2. The test finished while this function was running, after we got the\n # list of tests but before we accessed the test state.\n return None, None\n\n return test, test_state\n\n\ndef _test_state_from_record(test_record_dict, execution_uid=None):\n \"\"\"Convert a test record dict to a test state dict.\n\n Args:\n test_record_dict: An OpenHTF TestRecord, converted to base types.\n execution_uid: Execution ID of the running test.\n\n Returns:\n Dictionary representation of a test's final state. On top of the fields from\n TestState._asdict() we add 'execution_uid' which is needed by the\n frontend app.\n \"\"\"\n return {\n 'execution_uid': execution_uid,\n 'plugs': {\n 'plug_states': {},\n },\n 'running_phase_state': None,\n 'status': TEST_STATUS_COMPLETED,\n 'test_record': test_record_dict,\n }\n\n\ndef _wait_for_any_event(events, timeout_s):\n \"\"\"Wait for any in a list of threading.Event's to be set.\n\n Args:\n events: List of threading.Event's.\n timeout_s: Max duration in seconds to wait before returning.\n\n Returns:\n True if at least one event was set before the timeout expired, else False.\n \"\"\"\n def any_event_set():\n return any(event.is_set() for event in events)\n\n result = timeouts.loop_until_timeout_or_true(\n timeout_s, any_event_set, sleep_s=_WAIT_FOR_ANY_EVENT_POLL_S)\n\n return result or any_event_set()\n\n\nclass StationWatcher(threading.Thread):\n \"\"\"Watches for changes in the state of the currently running OpenHTF test.\n\n The StationWatcher uses an event-based mechanism to detect changes in test\n state. This means we rely on the OpenHTF framework to call notify_update()\n when a change occurs. Authors of frontend-aware plugs must ensure that\n notify_update() is called when a change occurs to that plug's state.\n \"\"\"\n daemon = True\n\n def __init__(self, update_callback):\n super(StationWatcher, self).__init__(name=type(self).__name__)\n self._update_callback = update_callback\n\n def run(self):\n \"\"\"Call self._poll_for_update() in a loop and handle errors.\"\"\"\n while True:\n try:\n self._poll_for_update()\n except RuntimeError as error:\n # Note that because logging triggers a call to notify_update(), by\n # logging a message, we automatically retry publishing the update\n # after an error occurs.\n if error.message == 'dictionary changed size during iteration':\n # These errors occur occasionally and it is infeasible to get rid of\n # them entirely unless data.convert_to_base_types() is made\n # thread-safe. Ignore the error and retry quickly.\n _LOG.debug('Ignoring (probably harmless) error in station watcher: '\n '`dictionary changed size during iteration`.')\n time.sleep(0.1)\n else:\n _LOG.exception('Error in station watcher: %s', error)\n time.sleep(1)\n except Exception as error: # pylint: disable=broad-except\n _LOG.exception('Error in station watcher: %s', error)\n time.sleep(1)\n\n @functions.call_at_most_every(float(conf.frontend_throttle_s))\n def _poll_for_update(self):\n \"\"\"Call the callback with the current test state, then wait for a change.\"\"\"\n test, test_state = _get_executing_test()\n\n if test is None:\n time.sleep(_WAIT_FOR_EXECUTING_TEST_POLL_S)\n return\n\n state_dict, event = self._to_dict_with_event(test_state)\n self._update_callback(state_dict)\n\n plug_manager = test_state.plug_manager\n plug_events = [\n plug_manager.get_plug_by_class_path(plug_name).asdict_with_event()[1]\n for plug_name in plug_manager.get_frontend_aware_plug_names()\n ]\n events = [event] + plug_events\n\n # Wait for the test state or a plug state to change, or for the previously\n # executing test to finish.\n while not _wait_for_any_event(events, _CHECK_FOR_FINISHED_TEST_POLL_S):\n new_test, _ = _get_executing_test()\n if test != new_test:\n break\n\n @classmethod\n def _to_dict_with_event(cls, test_state):\n \"\"\"Process a test state into the format we want to send to the frontend.\"\"\"\n original_dict, event = test_state.asdict_with_event()\n\n # This line may produce a 'dictionary changed size during iteration' error.\n test_state_dict = data.convert_to_base_types(original_dict)\n\n test_state_dict['execution_uid'] = test_state.execution_uid\n return test_state_dict, event\n\n\nclass DashboardPubSub(sockjs.tornado.SockJSConnection):\n \"\"\"WebSocket endpoint for the list of available stations.\n\n In this case, there is always exactly one available station: the station\n running the StationServer. See dashboard_server.py for an implementation of\n the dashboard WebSocket endpoint for multiple stations.\n\n TODO(Kenadia): Remove this endpoint from the station server. Since the\n frontend knows whether it is running off of a station server or dashboard\n server, it should be smart enough not to look for this endpoint on the station\n server.\n \"\"\"\n port = None # Set by for_port().\n\n @classmethod\n def for_port(cls, port):\n \"\"\"Returns a new subclass with the port set.\"\"\"\n return type(cls.__name__, (cls,), {'port': port})\n\n def on_open(self, unused_info):\n \"\"\"Called by the base class when a client connects.\"\"\"\n self.send(self._make_message())\n\n @classmethod\n def _make_message(cls):\n host = 'localhost'\n host_port = '%s:%s' % (host, cls.port)\n return {\n host_port: {\n 'station_id': conf.station_id, # From openhtf.core.test_state.\n 'host': host,\n 'port': cls.port,\n 'status': 'ONLINE',\n }\n }\n\n\nclass StationPubSub(pub_sub.PubSub):\n \"\"\"WebSocket endpoint for test updates.\n\n The endpoint provides information about the test that is currently running\n with this StationServer. Two types of message are sent: 'update' and 'record',\n where 'record' indicates the final state of a test.\n \"\"\"\n _lock = threading.Lock() # Required by pub_sub.PubSub.\n subscribers = set() # Required by pub_sub.PubSub.\n _last_execution_uid = None\n _last_message = None\n\n @classmethod\n def publish_test_record(cls, test_record):\n test_record_dict = data.convert_to_base_types(test_record)\n test_state_dict = _test_state_from_record(test_record_dict,\n cls._last_execution_uid)\n cls._publish_test_state(test_state_dict, 'record')\n\n @classmethod\n def publish_update(cls, test_state_dict):\n \"\"\"Publish the state of the currently executing test.\"\"\"\n cls._publish_test_state(test_state_dict, 'update')\n\n @classmethod\n def _publish_test_state(cls, test_state_dict, message_type):\n message = {\n 'state': test_state_dict,\n 'test_uid': test_state_dict['execution_uid'],\n 'type': message_type,\n }\n super(StationPubSub, cls).publish(message)\n cls._last_execution_uid = test_state_dict['execution_uid']\n cls._last_message = message\n\n def on_subscribe(self, info):\n \"\"\"Send the more recent test state to new subscribers when they connect.\"\"\"\n if self._last_message is not None:\n self.send(self._last_message)\n\n\nclass BaseTestHandler(web_gui_server.CorsRequestHandler):\n \"\"\"Base class for HTTP endpoints that get test data.\"\"\"\n\n def get_test(self, test_uid):\n \"\"\"Get the specified test. Write 404 and return None if it is not found.\"\"\"\n test, test_state = _get_executing_test()\n\n if test is None or str(test.uid) != test_uid:\n self.write('Unknown test UID %s' % test_uid)\n self.set_status(404)\n return None, None\n\n return test, test_state\n\n\nclass AttachmentsHandler(BaseTestHandler):\n \"\"\"GET endpoint for a file attached to a test.\"\"\"\n\n def get(self, test_uid, phase_descriptor_id, attachment_name):\n _, test_state = self.get_test(test_uid)\n\n if test_state is None:\n return\n\n # Find the phase matching `phase_descriptor_id`.\n running_phase = test_state.running_phase_state\n phase_records = itertools.chain(\n test_state.test_record.phases,\n [running_phase.phase_record] if running_phase is not None else [])\n\n matched_phase = None\n for phase in phase_records:\n if str(phase.descriptor_id) == phase_descriptor_id:\n matched_phase = phase\n break\n\n if matched_phase is None:\n self.write('Unknown phase descriptor %s' % phase_descriptor_id)\n self.set_status(404)\n return\n\n # Find the attachment matching `attachment_name`.\n if attachment_name in matched_phase.attachments:\n attachment = matched_phase.attachments[attachment_name]\n else:\n self.write('Unknown attachment %s' % attachment_name)\n self.set_status(404)\n return\n\n self.set_header('Content-Type', attachment.mimetype)\n self.write(attachment.data)\n\n\nclass PhasesHandler(BaseTestHandler):\n \"\"\"GET endpoint for phase descriptors for a test, i.e. the full phase list.\"\"\"\n\n def get(self, test_uid):\n test, _ = self.get_test(test_uid)\n\n if test is None:\n return\n\n phase_descriptors = [\n dict(id=id(phase), **data.convert_to_base_types(phase))\n for phase in test.descriptor.phase_group]\n\n # Wrap value in a dict because writing a list directly is prohibited.\n self.write({'data': phase_descriptors})\n\n\nclass PlugsHandler(BaseTestHandler):\n \"\"\"POST endpoints to receive plug responses from the frontend.\"\"\"\n\n def post(self, test_uid, plug_name):\n _, test_state = self.get_test(test_uid)\n\n if test_state is None:\n return\n\n # Find the plug matching `plug_name`.\n plug = test_state.plug_manager.get_plug_by_class_path(plug_name)\n if plug is None:\n self.write('Unknown plug %s' % plug_name)\n self.set_status(404)\n return\n\n try:\n request = json.loads(self.request.body.decode('utf-8'))\n method_name = request['method']\n args = request['args']\n except (KeyError, ValueError):\n self.write('Malformed JSON request.')\n self.set_status(400)\n return\n\n method = getattr(plug, method_name, None)\n\n if not (plug.enable_remote and\n isinstance(method, types.MethodType) and\n not method_name.startswith('_') and\n method_name not in plug.disable_remote_attrs):\n self.write('Cannot access method %s of plug %s.' % (method_name,\n plug_name))\n self.set_status(400)\n return\n\n try:\n response = json.dumps(method(*args))\n except Exception as e: # pylint: disable=broad-except\n self.write('Plug error: %s' % repr(e))\n self.set_status(500)\n else:\n self.write(response)\n\n\nclass BaseHistoryHandler(web_gui_server.CorsRequestHandler):\n\n def initialize(self, history_path):\n self.history_path = history_path\n\n\nclass HistoryListHandler(BaseHistoryHandler):\n \"\"\"GET endpoint for the list of tests in the history.\n\n When requesting the history list, we respond with all files in the history\n folder ending with the '.pb' extension. Ideally, file names should match the\n following form (see chtf.py):\n\n 'mfg_event_{dut_id}_{start_time_millis}.pb'\n\n The requester can filter the returned history items by passing DUT ID and/or\n start time as query parameters.\n \"\"\"\n\n def get(self):\n filter_dut_id = self.get_arguments('dutId')\n filter_start_time_millis = self.get_arguments('startTimeMillis')\n\n history_items = []\n\n for file_name in os.listdir(self.history_path):\n if not file_name.endswith('.pb'):\n continue\n\n if not os.path.isfile(os.path.join(self.history_path, file_name)):\n continue\n\n dut_id = None\n start_time_millis = None\n match = re.match(r'mfg_event_(.+)_(\\d+)\\.pb$', file_name)\n\n if match is not None:\n dut_id = match.group(1)\n start_time_millis = int(match.group(2))\n\n if filter_dut_id and dut_id not in filter_dut_id:\n continue\n\n if (filter_start_time_millis and\n str(start_time_millis) not in filter_start_time_millis):\n continue\n\n history_items.append({\n 'dut_id': dut_id,\n 'file_name': file_name,\n 'start_time_millis': start_time_millis,\n })\n\n # Wrap value in a dict because writing a list directly is prohibited.\n self.write({'data': history_items})\n\n\nclass HistoryItemHandler(BaseHistoryHandler):\n \"\"\"GET endpoint for a test record from the history.\"\"\"\n\n def get(self, file_name):\n # TODO(Kenadia): Implement the history item handler. The implementation\n # depends on the format used to store test records on disk.\n self.write('Not implemented.')\n self.set_status(500)\n\n\nclass HistoryAttachmentsHandler(BaseHistoryHandler):\n \"\"\"GET endpoint for an attachment from an MfgEvent in the history.\n\n The sha1 query parameter is optional and used as a backup to identify an\n attachment if the name does not match any known name. Including this parameter\n is recommended, as some systems may modify attachment names when storing them\n on the MfgEvent in the case where multiple attachments have the same name.\n \"\"\"\n\n def get(self, file_name, attachment_name):\n # TODO(Kenadia): Implement the history item handler. The implementation\n # depends on the format used to store test records on disk.\n self.write('Not implemented.')\n self.set_status(500)\n\n\nclass StationMulticast(multicast.MulticastListener):\n \"\"\"Announce the existence of a station server to any searching dashboards.\"\"\"\n\n def __init__(self, station_server_port):\n # These have default values in openhtf.util.multicast.py.\n kwargs = {\n attr: conf['station_discovery_%s' % attr]\n for attr in ('address', 'port', 'ttl')\n if 'station_discovery_%s' % attr in conf\n }\n super(StationMulticast, self).__init__(self._make_message, **kwargs)\n self.station_server_port = station_server_port\n\n def _make_message(self, message):\n if message != MULTICAST_QUERY:\n if message == 'OPENHTF_PING':\n # Don't log for the old multicast string.\n return\n _LOG.debug('Got unexpected traffic on multicast socket: %s', message)\n return\n\n _, test_state = _get_executing_test()\n\n if test_state:\n cell = test_state.test_record.metadata.get('cell')\n test_description = test_state.test_record.metadata.get('test_description')\n test_name = test_state.test_record.metadata.get('test_name')\n else:\n cell = None\n test_description = None\n test_name = None\n\n return json.dumps({\n 'cell': cell,\n 'port': self.station_server_port,\n 'station_id': conf.station_id, # From openhtf.core.test_state.\n 'test_description': test_description,\n 'test_name': test_name,\n })\n\n\nclass StationServer(web_gui_server.WebGuiServer):\n \"\"\"Provides endpoints for interacting with an OpenHTF test.\n\n Also serves an Angular frontend that interfaces with those endpoints.\n\n Can be used as a context manager to ensure the server is stopped cleanly:\n\n with StationServer(history_path) as server:\n test = openhtf.Test(*my_phases)\n test.add_output_callbacks(server.publish_final_state)\n test.execute()\n\n Can also be used via the maybe_run() helper function:\n\n with maybe_run(should_run, history_path) as server:\n test = openhtf.Test(*my_phases)\n if server:\n test.add_output_callbacks(server.publish_final_state)\n test.execute()\n \"\"\"\n\n def __init__(self, history_path=None):\n # Disable tornado's logging.\n # TODO(Kenadia): Enable these logs if verbosity flag is at least -vvv.\n # I think this will require changing how StoreRepsInModule works.\n # Currently, if we call logs.ARG_PARSER.parse_known_args() multiple\n # times, we multiply the number of v's that we get.\n tornado_logger = logging.getLogger('tornado')\n tornado_logger.propagate = False\n if not tornado_logger.handlers:\n tornado_logger.addHandler(logging.NullHandler())\n\n # Bind port early so that the correct port number can be used in the routes.\n sockets, port = web_gui_server.bind_port(int(conf.station_server_port))\n\n # Set up the station watcher.\n station_watcher = StationWatcher(StationPubSub.publish_update)\n station_watcher.start()\n\n # Set up the SockJS endpoints.\n dashboard_class = DashboardPubSub.for_port(port)\n dash_router = sockjs.tornado.SockJSRouter(dashboard_class, '/sub/dashboard')\n station_router = sockjs.tornado.SockJSRouter(StationPubSub, '/sub/station')\n routes = dash_router.urls + station_router.urls\n\n # Set up the other endpoints.\n routes.extend((\n (r'/tests/(?P<test_uid>[\\w\\d:]+)/phases', PhasesHandler),\n (r'/tests/(?P<test_uid>[\\w\\d:]+)/plugs/(?P<plug_name>.+)',\n PlugsHandler),\n (r'/tests/(?P<test_uid>[\\w\\d:]+)/phases/(?P<phase_descriptor_id>\\d+)/'\n 'attachments/(?P<attachment_name>.+)', AttachmentsHandler),\n ))\n\n # Optionally enable history from disk.\n if history_path is not None:\n routes.extend((\n (r'/history', HistoryListHandler, {'history_path': history_path}),\n (r'/history/(?P<file_name>[^/]+)', HistoryItemHandler,\n {'history_path': history_path}),\n (r'/history/(?P<file_name>[^/]+)/attachments/(?P<attachment_name>.+)',\n HistoryAttachmentsHandler, {'history_path': history_path}),\n ))\n\n super(StationServer, self).__init__(routes, port, sockets=sockets)\n self.station_multicast = StationMulticast(port)\n\n def _get_config(self):\n return {\n 'server_type': STATION_SERVER_TYPE,\n }\n\n def run(self):\n _LOG.info('Announcing station server via multicast on %s:%s',\n self.station_multicast.address, self.station_multicast.port)\n self.station_multicast.start()\n _LOG.info(\n 'Starting station server at:\\n'\n ' Local: http://localhost:{port}\\n'\n ' Remote: http://{host}:{port}'\n .format(host=socket.gethostname(), port=self.port))\n super(StationServer, self).run()\n\n def stop(self):\n _LOG.info('Stopping station server.')\n super(StationServer, self).stop()\n _LOG.info('Stopping multicast.')\n self.station_multicast.stop(timeout_s=0)\n\n def publish_final_state(self, test_record):\n \"\"\"Test output callback publishing a final state from the test record.\"\"\"\n StationPubSub.publish_test_record(test_record)\n\n\[email protected]\ndef maybe_run(should_run, history_path=None):\n \"\"\"Provides a context which conditionally runs a StationServer.\"\"\"\n if not should_run:\n yield\n return\n with StationServer(history_path) as server:\n yield server\n", "path": "openhtf/output/servers/station_server.py" } ]
diff --git a/openhtf/output/servers/station_server.py b/openhtf/output/servers/station_server.py index 1554a5e56..fe20bb2bb 100644 --- a/openhtf/output/servers/station_server.py +++ b/openhtf/output/servers/station_server.py @@ -32,9 +32,6 @@ STATION_SERVER_TYPE = 'station' -MIMETYPE_REVERSE_MAP = { - v: k for k, v in six.iteritems(mfg_inspector.MIMETYPE_MAP) -} MULTICAST_QUERY = 'OPENHTF_DISCOVERY' TEST_STATUS_COMPLETED = 'COMPLETED'
scikit-hep__pyhf-895
Docs build broken with Sphinx v3.1.0 # Description Today (2020-06-08) [Sphinx `v3.1.0`](https://github.com/sphinx-doc/sphinx/releases/tag/v3.1.0) was released which now classifies pyhf's particular usages of the "autoclass" directive as an Error in the docs generated for [`interpolators/code0.py`](https://github.com/scikit-hep/pyhf/blob/62becc2e469f89babf75534a2decfb3ace6ff179/src/pyhf/interpolators/code0.py) ``` Warning, treated as error: /home/runner/work/pyhf/pyhf/docs/_generated/pyhf.interpolators.code0.rst:8:Error in "autoclass" directive: 1 argument(s) required, 0 supplied. .. autoclass:: :show-inheritance: .. rubric:: Methods .. automethod:: .__init__ ##[error]Process completed with exit code 1. ```
[ { "content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': ['tensorflow~=2.0', 'tensorflow-probability~=0.8'],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pyflakes',\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'check-manifest',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n 'black',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py" } ]
[ { "content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': ['tensorflow~=2.0', 'tensorflow-probability~=0.8'],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pyflakes',\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'check-manifest',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n 'black',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx!=3.1.0',\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py" } ]
diff --git a/setup.py b/setup.py index 78b99aa700..1302abb3c2 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ extras_require['docs'] = sorted( set( [ - 'sphinx', + 'sphinx!=3.1.0', 'sphinxcontrib-bibtex', 'sphinx-click', 'sphinx_rtd_theme',
psf__black-2665
py310 match: one-line case breaks **Describe the bug** In `python3.10` the `case` black can be written in the same line as the `case` keyword. However, this breaks `black`. **To Reproduce** Take this example code ```python # example.py x = 5 match x: case 5: print("it works") ``` It runs under `python3.10`: ```sh python example.py it works ``` However, `black` crashes: ``` black --target-version py310 example.py error: cannot format example.py: INTERNAL ERROR: Black produced invalid code on pass 1: expected an indented block after 'case' statement on line 3 (<unknown>, line 4). Please report a bug on https://github.com/psf/black/issues. This invalid output might be helpful: /var/folders/cz/v1lxwy5579502ksd81z14_t40000gp/T/blk_inxhkvdg.log Oh no! 💥 💔 💥 1 file failed to reformat. ``` After manually reformatting the example as follows ```python x = 5 match x: case 5: print("it works") ``` `black` stops crashing. **Expected behavior** `black` should not crash. **Environment** * macOS Big Sur 11.6 * `python 3.10.0` * `black --version`: `black, 21.11b1 (compiled: no)`
[ { "content": "\"\"\"\nblib2to3 Node/Leaf transformation-related utility functions.\n\"\"\"\n\nimport sys\nfrom typing import (\n Collection,\n Generic,\n Iterator,\n List,\n Optional,\n Set,\n Tuple,\n TypeVar,\n Union,\n)\n\nif sys.version_info >= (3, 8):\n from typing import Final\nelse:\n from typing_extensions import Final\n\nfrom mypy_extensions import mypyc_attr\n\n# lib2to3 fork\nfrom blib2to3.pytree import Node, Leaf, type_repr\nfrom blib2to3 import pygram\nfrom blib2to3.pgen2 import token\n\nfrom black.cache import CACHE_DIR\nfrom black.strings import has_triple_quotes\n\n\npygram.initialize(CACHE_DIR)\nsyms: Final = pygram.python_symbols\n\n\n# types\nT = TypeVar(\"T\")\nLN = Union[Leaf, Node]\nLeafID = int\nNodeType = int\n\n\nWHITESPACE: Final = {token.DEDENT, token.INDENT, token.NEWLINE}\nSTATEMENT: Final = {\n syms.if_stmt,\n syms.while_stmt,\n syms.for_stmt,\n syms.try_stmt,\n syms.except_clause,\n syms.with_stmt,\n syms.funcdef,\n syms.classdef,\n}\nSTANDALONE_COMMENT: Final = 153\ntoken.tok_name[STANDALONE_COMMENT] = \"STANDALONE_COMMENT\"\nLOGIC_OPERATORS: Final = {\"and\", \"or\"}\nCOMPARATORS: Final = {\n token.LESS,\n token.GREATER,\n token.EQEQUAL,\n token.NOTEQUAL,\n token.LESSEQUAL,\n token.GREATEREQUAL,\n}\nMATH_OPERATORS: Final = {\n token.VBAR,\n token.CIRCUMFLEX,\n token.AMPER,\n token.LEFTSHIFT,\n token.RIGHTSHIFT,\n token.PLUS,\n token.MINUS,\n token.STAR,\n token.SLASH,\n token.DOUBLESLASH,\n token.PERCENT,\n token.AT,\n token.TILDE,\n token.DOUBLESTAR,\n}\nSTARS: Final = {token.STAR, token.DOUBLESTAR}\nVARARGS_SPECIALS: Final = STARS | {token.SLASH}\nVARARGS_PARENTS: Final = {\n syms.arglist,\n syms.argument, # double star in arglist\n syms.trailer, # single argument to call\n syms.typedargslist,\n syms.varargslist, # lambdas\n}\nUNPACKING_PARENTS: Final = {\n syms.atom, # single element of a list or set literal\n syms.dictsetmaker,\n syms.listmaker,\n syms.testlist_gexp,\n syms.testlist_star_expr,\n}\nTEST_DESCENDANTS: Final = {\n syms.test,\n syms.lambdef,\n syms.or_test,\n syms.and_test,\n syms.not_test,\n syms.comparison,\n syms.star_expr,\n syms.expr,\n syms.xor_expr,\n syms.and_expr,\n syms.shift_expr,\n syms.arith_expr,\n syms.trailer,\n syms.term,\n syms.power,\n}\nASSIGNMENTS: Final = {\n \"=\",\n \"+=\",\n \"-=\",\n \"*=\",\n \"@=\",\n \"/=\",\n \"%=\",\n \"&=\",\n \"|=\",\n \"^=\",\n \"<<=\",\n \">>=\",\n \"**=\",\n \"//=\",\n}\n\nIMPLICIT_TUPLE: Final = {syms.testlist, syms.testlist_star_expr, syms.exprlist}\nBRACKET: Final = {\n token.LPAR: token.RPAR,\n token.LSQB: token.RSQB,\n token.LBRACE: token.RBRACE,\n}\nOPENING_BRACKETS: Final = set(BRACKET.keys())\nCLOSING_BRACKETS: Final = set(BRACKET.values())\nBRACKETS: Final = OPENING_BRACKETS | CLOSING_BRACKETS\nALWAYS_NO_SPACE: Final = CLOSING_BRACKETS | {token.COMMA, STANDALONE_COMMENT}\n\nRARROW = 55\n\n\n@mypyc_attr(allow_interpreted_subclasses=True)\nclass Visitor(Generic[T]):\n \"\"\"Basic lib2to3 visitor that yields things of type `T` on `visit()`.\"\"\"\n\n def visit(self, node: LN) -> Iterator[T]:\n \"\"\"Main method to visit `node` and its children.\n\n It tries to find a `visit_*()` method for the given `node.type`, like\n `visit_simple_stmt` for Node objects or `visit_INDENT` for Leaf objects.\n If no dedicated `visit_*()` method is found, chooses `visit_default()`\n instead.\n\n Then yields objects of type `T` from the selected visitor.\n \"\"\"\n if node.type < 256:\n name = token.tok_name[node.type]\n else:\n name = str(type_repr(node.type))\n # We explicitly branch on whether a visitor exists (instead of\n # using self.visit_default as the default arg to getattr) in order\n # to save needing to create a bound method object and so mypyc can\n # generate a native call to visit_default.\n visitf = getattr(self, f\"visit_{name}\", None)\n if visitf:\n yield from visitf(node)\n else:\n yield from self.visit_default(node)\n\n def visit_default(self, node: LN) -> Iterator[T]:\n \"\"\"Default `visit_*()` implementation. Recurses to children of `node`.\"\"\"\n if isinstance(node, Node):\n for child in node.children:\n yield from self.visit(child)\n\n\ndef whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901\n \"\"\"Return whitespace prefix if needed for the given `leaf`.\n\n `complex_subscript` signals whether the given leaf is part of a subscription\n which has non-trivial arguments, like arithmetic expressions or function calls.\n \"\"\"\n NO: Final = \"\"\n SPACE: Final = \" \"\n DOUBLESPACE: Final = \" \"\n t = leaf.type\n p = leaf.parent\n v = leaf.value\n if t in ALWAYS_NO_SPACE:\n return NO\n\n if t == token.COMMENT:\n return DOUBLESPACE\n\n assert p is not None, f\"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}\"\n if t == token.COLON and p.type not in {\n syms.subscript,\n syms.subscriptlist,\n syms.sliceop,\n }:\n return NO\n\n prev = leaf.prev_sibling\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type in OPENING_BRACKETS:\n return NO\n\n if t == token.COLON:\n if prevp.type == token.COLON:\n return NO\n\n elif prevp.type != token.COMMA and not complex_subscript:\n return NO\n\n return SPACE\n\n if prevp.type == token.EQUAL:\n if prevp.parent:\n if prevp.parent.type in {\n syms.arglist,\n syms.argument,\n syms.parameters,\n syms.varargslist,\n }:\n return NO\n\n elif prevp.parent.type == syms.typedargslist:\n # A bit hacky: if the equal sign has whitespace, it means we\n # previously found it's a typed argument. So, we're using\n # that, too.\n return prevp.prefix\n\n elif prevp.type in VARARGS_SPECIALS:\n if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS):\n return NO\n\n elif prevp.type == token.COLON:\n if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}:\n return SPACE if complex_subscript else NO\n\n elif (\n prevp.parent\n and prevp.parent.type == syms.factor\n and prevp.type in MATH_OPERATORS\n ):\n return NO\n\n elif (\n prevp.type == token.RIGHTSHIFT\n and prevp.parent\n and prevp.parent.type == syms.shift_expr\n and prevp.prev_sibling\n and prevp.prev_sibling.type == token.NAME\n and prevp.prev_sibling.value == \"print\" # type: ignore\n ):\n # Python 2 print chevron\n return NO\n elif prevp.type == token.AT and p.parent and p.parent.type == syms.decorator:\n # no space in decorators\n return NO\n\n elif prev.type in OPENING_BRACKETS:\n return NO\n\n if p.type in {syms.parameters, syms.arglist}:\n # untyped function signatures or calls\n if not prev or prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.varargslist:\n # lambdas\n if prev and prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.typedargslist:\n # typed function signatures\n if not prev:\n return NO\n\n if t == token.EQUAL:\n if prev.type != syms.tname:\n return NO\n\n elif prev.type == token.EQUAL:\n # A bit hacky: if the equal sign has whitespace, it means we\n # previously found it's a typed argument. So, we're using that, too.\n return prev.prefix\n\n elif prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.tname:\n # type names\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type != token.COMMA:\n return NO\n\n elif p.type == syms.trailer:\n # attributes and calls\n if t == token.LPAR or t == token.RPAR:\n return NO\n\n if not prev:\n if t == token.DOT:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type != token.NUMBER:\n return NO\n\n elif t == token.LSQB:\n return NO\n\n elif prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.argument:\n # single argument\n if t == token.EQUAL:\n return NO\n\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type == token.LPAR:\n return NO\n\n elif prev.type in {token.EQUAL} | VARARGS_SPECIALS:\n return NO\n\n elif p.type == syms.decorator:\n # decorators\n return NO\n\n elif p.type == syms.dotted_name:\n if prev:\n return NO\n\n prevp = preceding_leaf(p)\n if not prevp or prevp.type == token.AT or prevp.type == token.DOT:\n return NO\n\n elif p.type == syms.classdef:\n if t == token.LPAR:\n return NO\n\n if prev and prev.type == token.LPAR:\n return NO\n\n elif p.type in {syms.subscript, syms.sliceop}:\n # indexing\n if not prev:\n assert p.parent is not None, \"subscripts are always parented\"\n if p.parent.type == syms.subscriptlist:\n return SPACE\n\n return NO\n\n elif not complex_subscript:\n return NO\n\n elif p.type == syms.atom:\n if prev and t == token.DOT:\n # dots, but not the first one.\n return NO\n\n elif p.type == syms.dictsetmaker:\n # dict unpacking\n if prev and prev.type == token.DOUBLESTAR:\n return NO\n\n elif p.type in {syms.factor, syms.star_expr}:\n # unary ops\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type in OPENING_BRACKETS:\n return NO\n\n prevp_parent = prevp.parent\n assert prevp_parent is not None\n if prevp.type == token.COLON and prevp_parent.type in {\n syms.subscript,\n syms.sliceop,\n }:\n return NO\n\n elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument:\n return NO\n\n elif t in {token.NAME, token.NUMBER, token.STRING}:\n return NO\n\n elif p.type == syms.import_from:\n if t == token.DOT:\n if prev and prev.type == token.DOT:\n return NO\n\n elif t == token.NAME:\n if v == \"import\":\n return SPACE\n\n if prev and prev.type == token.DOT:\n return NO\n\n elif p.type == syms.sliceop:\n return NO\n\n return SPACE\n\n\ndef preceding_leaf(node: Optional[LN]) -> Optional[Leaf]:\n \"\"\"Return the first leaf that precedes `node`, if any.\"\"\"\n while node:\n res = node.prev_sibling\n if res:\n if isinstance(res, Leaf):\n return res\n\n try:\n return list(res.leaves())[-1]\n\n except IndexError:\n return None\n\n node = node.parent\n return None\n\n\ndef prev_siblings_are(node: Optional[LN], tokens: List[Optional[NodeType]]) -> bool:\n \"\"\"Return if the `node` and its previous siblings match types against the provided\n list of tokens; the provided `node`has its type matched against the last element in\n the list. `None` can be used as the first element to declare that the start of the\n list is anchored at the start of its parent's children.\"\"\"\n if not tokens:\n return True\n if tokens[-1] is None:\n return node is None\n if not node:\n return False\n if node.type != tokens[-1]:\n return False\n return prev_siblings_are(node.prev_sibling, tokens[:-1])\n\n\ndef last_two_except(leaves: List[Leaf], omit: Collection[LeafID]) -> Tuple[Leaf, Leaf]:\n \"\"\"Return (penultimate, last) leaves skipping brackets in `omit` and contents.\"\"\"\n stop_after: Optional[Leaf] = None\n last: Optional[Leaf] = None\n for leaf in reversed(leaves):\n if stop_after:\n if leaf is stop_after:\n stop_after = None\n continue\n\n if last:\n return leaf, last\n\n if id(leaf) in omit:\n stop_after = leaf.opening_bracket\n else:\n last = leaf\n else:\n raise LookupError(\"Last two leaves were also skipped\")\n\n\ndef parent_type(node: Optional[LN]) -> Optional[NodeType]:\n \"\"\"\n Returns:\n @node.parent.type, if @node is not None and has a parent.\n OR\n None, otherwise.\n \"\"\"\n if node is None or node.parent is None:\n return None\n\n return node.parent.type\n\n\ndef child_towards(ancestor: Node, descendant: LN) -> Optional[LN]:\n \"\"\"Return the child of `ancestor` that contains `descendant`.\"\"\"\n node: Optional[LN] = descendant\n while node and node.parent != ancestor:\n node = node.parent\n return node\n\n\ndef replace_child(old_child: LN, new_child: LN) -> None:\n \"\"\"\n Side Effects:\n * If @old_child.parent is set, replace @old_child with @new_child in\n @old_child's underlying Node structure.\n OR\n * Otherwise, this function does nothing.\n \"\"\"\n parent = old_child.parent\n if not parent:\n return\n\n child_idx = old_child.remove()\n if child_idx is not None:\n parent.insert_child(child_idx, new_child)\n\n\ndef container_of(leaf: Leaf) -> LN:\n \"\"\"Return `leaf` or one of its ancestors that is the topmost container of it.\n\n By \"container\" we mean a node where `leaf` is the very first child.\n \"\"\"\n same_prefix = leaf.prefix\n container: LN = leaf\n while container:\n parent = container.parent\n if parent is None:\n break\n\n if parent.children[0].prefix != same_prefix:\n break\n\n if parent.type == syms.file_input:\n break\n\n if parent.prev_sibling is not None and parent.prev_sibling.type in BRACKETS:\n break\n\n container = parent\n return container\n\n\ndef first_leaf_column(node: Node) -> Optional[int]:\n \"\"\"Returns the column of the first leaf child of a node.\"\"\"\n for child in node.children:\n if isinstance(child, Leaf):\n return child.column\n return None\n\n\ndef first_child_is_arith(node: Node) -> bool:\n \"\"\"Whether first child is an arithmetic or a binary arithmetic expression\"\"\"\n expr_types = {\n syms.arith_expr,\n syms.shift_expr,\n syms.xor_expr,\n syms.and_expr,\n }\n return bool(node.children and node.children[0].type in expr_types)\n\n\ndef is_docstring(leaf: Leaf) -> bool:\n if prev_siblings_are(\n leaf.parent, [None, token.NEWLINE, token.INDENT, syms.simple_stmt]\n ):\n return True\n\n # Multiline docstring on the same line as the `def`.\n if prev_siblings_are(leaf.parent, [syms.parameters, token.COLON, syms.simple_stmt]):\n # `syms.parameters` is only used in funcdefs and async_funcdefs in the Python\n # grammar. We're safe to return True without further checks.\n return True\n\n return False\n\n\ndef is_empty_tuple(node: LN) -> bool:\n \"\"\"Return True if `node` holds an empty tuple.\"\"\"\n return (\n node.type == syms.atom\n and len(node.children) == 2\n and node.children[0].type == token.LPAR\n and node.children[1].type == token.RPAR\n )\n\n\ndef is_one_tuple(node: LN) -> bool:\n \"\"\"Return True if `node` holds a tuple with one element, with or without parens.\"\"\"\n if node.type == syms.atom:\n gexp = unwrap_singleton_parenthesis(node)\n if gexp is None or gexp.type != syms.testlist_gexp:\n return False\n\n return len(gexp.children) == 2 and gexp.children[1].type == token.COMMA\n\n return (\n node.type in IMPLICIT_TUPLE\n and len(node.children) == 2\n and node.children[1].type == token.COMMA\n )\n\n\ndef is_one_tuple_between(opening: Leaf, closing: Leaf, leaves: List[Leaf]) -> bool:\n \"\"\"Return True if content between `opening` and `closing` looks like a one-tuple.\"\"\"\n if opening.type != token.LPAR and closing.type != token.RPAR:\n return False\n\n depth = closing.bracket_depth + 1\n for _opening_index, leaf in enumerate(leaves):\n if leaf is opening:\n break\n\n else:\n raise LookupError(\"Opening paren not found in `leaves`\")\n\n commas = 0\n _opening_index += 1\n for leaf in leaves[_opening_index:]:\n if leaf is closing:\n break\n\n bracket_depth = leaf.bracket_depth\n if bracket_depth == depth and leaf.type == token.COMMA:\n commas += 1\n if leaf.parent and leaf.parent.type in {\n syms.arglist,\n syms.typedargslist,\n }:\n commas += 1\n break\n\n return commas < 2\n\n\ndef is_walrus_assignment(node: LN) -> bool:\n \"\"\"Return True iff `node` is of the shape ( test := test )\"\"\"\n inner = unwrap_singleton_parenthesis(node)\n return inner is not None and inner.type == syms.namedexpr_test\n\n\ndef is_simple_decorator_trailer(node: LN, last: bool = False) -> bool:\n \"\"\"Return True iff `node` is a trailer valid in a simple decorator\"\"\"\n return node.type == syms.trailer and (\n (\n len(node.children) == 2\n and node.children[0].type == token.DOT\n and node.children[1].type == token.NAME\n )\n # last trailer can be an argument-less parentheses pair\n or (\n last\n and len(node.children) == 2\n and node.children[0].type == token.LPAR\n and node.children[1].type == token.RPAR\n )\n # last trailer can be arguments\n or (\n last\n and len(node.children) == 3\n and node.children[0].type == token.LPAR\n # and node.children[1].type == syms.argument\n and node.children[2].type == token.RPAR\n )\n )\n\n\ndef is_simple_decorator_expression(node: LN) -> bool:\n \"\"\"Return True iff `node` could be a 'dotted name' decorator\n\n This function takes the node of the 'namedexpr_test' of the new decorator\n grammar and test if it would be valid under the old decorator grammar.\n\n The old grammar was: decorator: @ dotted_name [arguments] NEWLINE\n The new grammar is : decorator: @ namedexpr_test NEWLINE\n \"\"\"\n if node.type == token.NAME:\n return True\n if node.type == syms.power:\n if node.children:\n return (\n node.children[0].type == token.NAME\n and all(map(is_simple_decorator_trailer, node.children[1:-1]))\n and (\n len(node.children) < 2\n or is_simple_decorator_trailer(node.children[-1], last=True)\n )\n )\n return False\n\n\ndef is_yield(node: LN) -> bool:\n \"\"\"Return True if `node` holds a `yield` or `yield from` expression.\"\"\"\n if node.type == syms.yield_expr:\n return True\n\n if node.type == token.NAME and node.value == \"yield\": # type: ignore\n return True\n\n if node.type != syms.atom:\n return False\n\n if len(node.children) != 3:\n return False\n\n lpar, expr, rpar = node.children\n if lpar.type == token.LPAR and rpar.type == token.RPAR:\n return is_yield(expr)\n\n return False\n\n\ndef is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool:\n \"\"\"Return True if `leaf` is a star or double star in a vararg or kwarg.\n\n If `within` includes VARARGS_PARENTS, this applies to function signatures.\n If `within` includes UNPACKING_PARENTS, it applies to right hand-side\n extended iterable unpacking (PEP 3132) and additional unpacking\n generalizations (PEP 448).\n \"\"\"\n if leaf.type not in VARARGS_SPECIALS or not leaf.parent:\n return False\n\n p = leaf.parent\n if p.type == syms.star_expr:\n # Star expressions are also used as assignment targets in extended\n # iterable unpacking (PEP 3132). See what its parent is instead.\n if not p.parent:\n return False\n\n p = p.parent\n\n return p.type in within\n\n\ndef is_multiline_string(leaf: Leaf) -> bool:\n \"\"\"Return True if `leaf` is a multiline string that actually spans many lines.\"\"\"\n return has_triple_quotes(leaf.value) and \"\\n\" in leaf.value\n\n\ndef is_stub_suite(node: Node) -> bool:\n \"\"\"Return True if `node` is a suite with a stub body.\"\"\"\n if (\n len(node.children) != 4\n or node.children[0].type != token.NEWLINE\n or node.children[1].type != token.INDENT\n or node.children[3].type != token.DEDENT\n ):\n return False\n\n return is_stub_body(node.children[2])\n\n\ndef is_stub_body(node: LN) -> bool:\n \"\"\"Return True if `node` is a simple statement containing an ellipsis.\"\"\"\n if not isinstance(node, Node) or node.type != syms.simple_stmt:\n return False\n\n if len(node.children) != 2:\n return False\n\n child = node.children[0]\n return (\n child.type == syms.atom\n and len(child.children) == 3\n and all(leaf == Leaf(token.DOT, \".\") for leaf in child.children)\n )\n\n\ndef is_atom_with_invisible_parens(node: LN) -> bool:\n \"\"\"Given a `LN`, determines whether it's an atom `node` with invisible\n parens. Useful in dedupe-ing and normalizing parens.\n \"\"\"\n if isinstance(node, Leaf) or node.type != syms.atom:\n return False\n\n first, last = node.children[0], node.children[-1]\n return (\n isinstance(first, Leaf)\n and first.type == token.LPAR\n and first.value == \"\"\n and isinstance(last, Leaf)\n and last.type == token.RPAR\n and last.value == \"\"\n )\n\n\ndef is_empty_par(leaf: Leaf) -> bool:\n return is_empty_lpar(leaf) or is_empty_rpar(leaf)\n\n\ndef is_empty_lpar(leaf: Leaf) -> bool:\n return leaf.type == token.LPAR and leaf.value == \"\"\n\n\ndef is_empty_rpar(leaf: Leaf) -> bool:\n return leaf.type == token.RPAR and leaf.value == \"\"\n\n\ndef is_import(leaf: Leaf) -> bool:\n \"\"\"Return True if the given leaf starts an import statement.\"\"\"\n p = leaf.parent\n t = leaf.type\n v = leaf.value\n return bool(\n t == token.NAME\n and (\n (v == \"import\" and p and p.type == syms.import_name)\n or (v == \"from\" and p and p.type == syms.import_from)\n )\n )\n\n\ndef is_type_comment(leaf: Leaf, suffix: str = \"\") -> bool:\n \"\"\"Return True if the given leaf is a special comment.\n Only returns true for type comments for now.\"\"\"\n t = leaf.type\n v = leaf.value\n return t in {token.COMMENT, STANDALONE_COMMENT} and v.startswith(\"# type:\" + suffix)\n\n\ndef wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> None:\n \"\"\"Wrap `child` in parentheses.\n\n This replaces `child` with an atom holding the parentheses and the old\n child. That requires moving the prefix.\n\n If `visible` is False, the leaves will be valueless (and thus invisible).\n \"\"\"\n lpar = Leaf(token.LPAR, \"(\" if visible else \"\")\n rpar = Leaf(token.RPAR, \")\" if visible else \"\")\n prefix = child.prefix\n child.prefix = \"\"\n index = child.remove() or 0\n new_child = Node(syms.atom, [lpar, child, rpar])\n new_child.prefix = prefix\n parent.insert_child(index, new_child)\n\n\ndef unwrap_singleton_parenthesis(node: LN) -> Optional[LN]:\n \"\"\"Returns `wrapped` if `node` is of the shape ( wrapped ).\n\n Parenthesis can be optional. Returns None otherwise\"\"\"\n if len(node.children) != 3:\n return None\n\n lpar, wrapped, rpar = node.children\n if not (lpar.type == token.LPAR and rpar.type == token.RPAR):\n return None\n\n return wrapped\n\n\ndef ensure_visible(leaf: Leaf) -> None:\n \"\"\"Make sure parentheses are visible.\n\n They could be invisible as part of some statements (see\n :func:`normalize_invisible_parens` and :func:`visit_import_from`).\n \"\"\"\n if leaf.type == token.LPAR:\n leaf.value = \"(\"\n elif leaf.type == token.RPAR:\n leaf.value = \")\"\n", "path": "src/black/nodes.py" } ]
[ { "content": "\"\"\"\nblib2to3 Node/Leaf transformation-related utility functions.\n\"\"\"\n\nimport sys\nfrom typing import (\n Collection,\n Generic,\n Iterator,\n List,\n Optional,\n Set,\n Tuple,\n TypeVar,\n Union,\n)\n\nif sys.version_info >= (3, 8):\n from typing import Final\nelse:\n from typing_extensions import Final\n\nfrom mypy_extensions import mypyc_attr\n\n# lib2to3 fork\nfrom blib2to3.pytree import Node, Leaf, type_repr\nfrom blib2to3 import pygram\nfrom blib2to3.pgen2 import token\n\nfrom black.cache import CACHE_DIR\nfrom black.strings import has_triple_quotes\n\n\npygram.initialize(CACHE_DIR)\nsyms: Final = pygram.python_symbols\n\n\n# types\nT = TypeVar(\"T\")\nLN = Union[Leaf, Node]\nLeafID = int\nNodeType = int\n\n\nWHITESPACE: Final = {token.DEDENT, token.INDENT, token.NEWLINE}\nSTATEMENT: Final = {\n syms.if_stmt,\n syms.while_stmt,\n syms.for_stmt,\n syms.try_stmt,\n syms.except_clause,\n syms.with_stmt,\n syms.funcdef,\n syms.classdef,\n syms.match_stmt,\n syms.case_block,\n}\nSTANDALONE_COMMENT: Final = 153\ntoken.tok_name[STANDALONE_COMMENT] = \"STANDALONE_COMMENT\"\nLOGIC_OPERATORS: Final = {\"and\", \"or\"}\nCOMPARATORS: Final = {\n token.LESS,\n token.GREATER,\n token.EQEQUAL,\n token.NOTEQUAL,\n token.LESSEQUAL,\n token.GREATEREQUAL,\n}\nMATH_OPERATORS: Final = {\n token.VBAR,\n token.CIRCUMFLEX,\n token.AMPER,\n token.LEFTSHIFT,\n token.RIGHTSHIFT,\n token.PLUS,\n token.MINUS,\n token.STAR,\n token.SLASH,\n token.DOUBLESLASH,\n token.PERCENT,\n token.AT,\n token.TILDE,\n token.DOUBLESTAR,\n}\nSTARS: Final = {token.STAR, token.DOUBLESTAR}\nVARARGS_SPECIALS: Final = STARS | {token.SLASH}\nVARARGS_PARENTS: Final = {\n syms.arglist,\n syms.argument, # double star in arglist\n syms.trailer, # single argument to call\n syms.typedargslist,\n syms.varargslist, # lambdas\n}\nUNPACKING_PARENTS: Final = {\n syms.atom, # single element of a list or set literal\n syms.dictsetmaker,\n syms.listmaker,\n syms.testlist_gexp,\n syms.testlist_star_expr,\n}\nTEST_DESCENDANTS: Final = {\n syms.test,\n syms.lambdef,\n syms.or_test,\n syms.and_test,\n syms.not_test,\n syms.comparison,\n syms.star_expr,\n syms.expr,\n syms.xor_expr,\n syms.and_expr,\n syms.shift_expr,\n syms.arith_expr,\n syms.trailer,\n syms.term,\n syms.power,\n}\nASSIGNMENTS: Final = {\n \"=\",\n \"+=\",\n \"-=\",\n \"*=\",\n \"@=\",\n \"/=\",\n \"%=\",\n \"&=\",\n \"|=\",\n \"^=\",\n \"<<=\",\n \">>=\",\n \"**=\",\n \"//=\",\n}\n\nIMPLICIT_TUPLE: Final = {syms.testlist, syms.testlist_star_expr, syms.exprlist}\nBRACKET: Final = {\n token.LPAR: token.RPAR,\n token.LSQB: token.RSQB,\n token.LBRACE: token.RBRACE,\n}\nOPENING_BRACKETS: Final = set(BRACKET.keys())\nCLOSING_BRACKETS: Final = set(BRACKET.values())\nBRACKETS: Final = OPENING_BRACKETS | CLOSING_BRACKETS\nALWAYS_NO_SPACE: Final = CLOSING_BRACKETS | {token.COMMA, STANDALONE_COMMENT}\n\nRARROW = 55\n\n\n@mypyc_attr(allow_interpreted_subclasses=True)\nclass Visitor(Generic[T]):\n \"\"\"Basic lib2to3 visitor that yields things of type `T` on `visit()`.\"\"\"\n\n def visit(self, node: LN) -> Iterator[T]:\n \"\"\"Main method to visit `node` and its children.\n\n It tries to find a `visit_*()` method for the given `node.type`, like\n `visit_simple_stmt` for Node objects or `visit_INDENT` for Leaf objects.\n If no dedicated `visit_*()` method is found, chooses `visit_default()`\n instead.\n\n Then yields objects of type `T` from the selected visitor.\n \"\"\"\n if node.type < 256:\n name = token.tok_name[node.type]\n else:\n name = str(type_repr(node.type))\n # We explicitly branch on whether a visitor exists (instead of\n # using self.visit_default as the default arg to getattr) in order\n # to save needing to create a bound method object and so mypyc can\n # generate a native call to visit_default.\n visitf = getattr(self, f\"visit_{name}\", None)\n if visitf:\n yield from visitf(node)\n else:\n yield from self.visit_default(node)\n\n def visit_default(self, node: LN) -> Iterator[T]:\n \"\"\"Default `visit_*()` implementation. Recurses to children of `node`.\"\"\"\n if isinstance(node, Node):\n for child in node.children:\n yield from self.visit(child)\n\n\ndef whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901\n \"\"\"Return whitespace prefix if needed for the given `leaf`.\n\n `complex_subscript` signals whether the given leaf is part of a subscription\n which has non-trivial arguments, like arithmetic expressions or function calls.\n \"\"\"\n NO: Final = \"\"\n SPACE: Final = \" \"\n DOUBLESPACE: Final = \" \"\n t = leaf.type\n p = leaf.parent\n v = leaf.value\n if t in ALWAYS_NO_SPACE:\n return NO\n\n if t == token.COMMENT:\n return DOUBLESPACE\n\n assert p is not None, f\"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}\"\n if t == token.COLON and p.type not in {\n syms.subscript,\n syms.subscriptlist,\n syms.sliceop,\n }:\n return NO\n\n prev = leaf.prev_sibling\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type in OPENING_BRACKETS:\n return NO\n\n if t == token.COLON:\n if prevp.type == token.COLON:\n return NO\n\n elif prevp.type != token.COMMA and not complex_subscript:\n return NO\n\n return SPACE\n\n if prevp.type == token.EQUAL:\n if prevp.parent:\n if prevp.parent.type in {\n syms.arglist,\n syms.argument,\n syms.parameters,\n syms.varargslist,\n }:\n return NO\n\n elif prevp.parent.type == syms.typedargslist:\n # A bit hacky: if the equal sign has whitespace, it means we\n # previously found it's a typed argument. So, we're using\n # that, too.\n return prevp.prefix\n\n elif prevp.type in VARARGS_SPECIALS:\n if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS):\n return NO\n\n elif prevp.type == token.COLON:\n if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}:\n return SPACE if complex_subscript else NO\n\n elif (\n prevp.parent\n and prevp.parent.type == syms.factor\n and prevp.type in MATH_OPERATORS\n ):\n return NO\n\n elif (\n prevp.type == token.RIGHTSHIFT\n and prevp.parent\n and prevp.parent.type == syms.shift_expr\n and prevp.prev_sibling\n and prevp.prev_sibling.type == token.NAME\n and prevp.prev_sibling.value == \"print\" # type: ignore\n ):\n # Python 2 print chevron\n return NO\n elif prevp.type == token.AT and p.parent and p.parent.type == syms.decorator:\n # no space in decorators\n return NO\n\n elif prev.type in OPENING_BRACKETS:\n return NO\n\n if p.type in {syms.parameters, syms.arglist}:\n # untyped function signatures or calls\n if not prev or prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.varargslist:\n # lambdas\n if prev and prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.typedargslist:\n # typed function signatures\n if not prev:\n return NO\n\n if t == token.EQUAL:\n if prev.type != syms.tname:\n return NO\n\n elif prev.type == token.EQUAL:\n # A bit hacky: if the equal sign has whitespace, it means we\n # previously found it's a typed argument. So, we're using that, too.\n return prev.prefix\n\n elif prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.tname:\n # type names\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type != token.COMMA:\n return NO\n\n elif p.type == syms.trailer:\n # attributes and calls\n if t == token.LPAR or t == token.RPAR:\n return NO\n\n if not prev:\n if t == token.DOT:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type != token.NUMBER:\n return NO\n\n elif t == token.LSQB:\n return NO\n\n elif prev.type != token.COMMA:\n return NO\n\n elif p.type == syms.argument:\n # single argument\n if t == token.EQUAL:\n return NO\n\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type == token.LPAR:\n return NO\n\n elif prev.type in {token.EQUAL} | VARARGS_SPECIALS:\n return NO\n\n elif p.type == syms.decorator:\n # decorators\n return NO\n\n elif p.type == syms.dotted_name:\n if prev:\n return NO\n\n prevp = preceding_leaf(p)\n if not prevp or prevp.type == token.AT or prevp.type == token.DOT:\n return NO\n\n elif p.type == syms.classdef:\n if t == token.LPAR:\n return NO\n\n if prev and prev.type == token.LPAR:\n return NO\n\n elif p.type in {syms.subscript, syms.sliceop}:\n # indexing\n if not prev:\n assert p.parent is not None, \"subscripts are always parented\"\n if p.parent.type == syms.subscriptlist:\n return SPACE\n\n return NO\n\n elif not complex_subscript:\n return NO\n\n elif p.type == syms.atom:\n if prev and t == token.DOT:\n # dots, but not the first one.\n return NO\n\n elif p.type == syms.dictsetmaker:\n # dict unpacking\n if prev and prev.type == token.DOUBLESTAR:\n return NO\n\n elif p.type in {syms.factor, syms.star_expr}:\n # unary ops\n if not prev:\n prevp = preceding_leaf(p)\n if not prevp or prevp.type in OPENING_BRACKETS:\n return NO\n\n prevp_parent = prevp.parent\n assert prevp_parent is not None\n if prevp.type == token.COLON and prevp_parent.type in {\n syms.subscript,\n syms.sliceop,\n }:\n return NO\n\n elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument:\n return NO\n\n elif t in {token.NAME, token.NUMBER, token.STRING}:\n return NO\n\n elif p.type == syms.import_from:\n if t == token.DOT:\n if prev and prev.type == token.DOT:\n return NO\n\n elif t == token.NAME:\n if v == \"import\":\n return SPACE\n\n if prev and prev.type == token.DOT:\n return NO\n\n elif p.type == syms.sliceop:\n return NO\n\n return SPACE\n\n\ndef preceding_leaf(node: Optional[LN]) -> Optional[Leaf]:\n \"\"\"Return the first leaf that precedes `node`, if any.\"\"\"\n while node:\n res = node.prev_sibling\n if res:\n if isinstance(res, Leaf):\n return res\n\n try:\n return list(res.leaves())[-1]\n\n except IndexError:\n return None\n\n node = node.parent\n return None\n\n\ndef prev_siblings_are(node: Optional[LN], tokens: List[Optional[NodeType]]) -> bool:\n \"\"\"Return if the `node` and its previous siblings match types against the provided\n list of tokens; the provided `node`has its type matched against the last element in\n the list. `None` can be used as the first element to declare that the start of the\n list is anchored at the start of its parent's children.\"\"\"\n if not tokens:\n return True\n if tokens[-1] is None:\n return node is None\n if not node:\n return False\n if node.type != tokens[-1]:\n return False\n return prev_siblings_are(node.prev_sibling, tokens[:-1])\n\n\ndef last_two_except(leaves: List[Leaf], omit: Collection[LeafID]) -> Tuple[Leaf, Leaf]:\n \"\"\"Return (penultimate, last) leaves skipping brackets in `omit` and contents.\"\"\"\n stop_after: Optional[Leaf] = None\n last: Optional[Leaf] = None\n for leaf in reversed(leaves):\n if stop_after:\n if leaf is stop_after:\n stop_after = None\n continue\n\n if last:\n return leaf, last\n\n if id(leaf) in omit:\n stop_after = leaf.opening_bracket\n else:\n last = leaf\n else:\n raise LookupError(\"Last two leaves were also skipped\")\n\n\ndef parent_type(node: Optional[LN]) -> Optional[NodeType]:\n \"\"\"\n Returns:\n @node.parent.type, if @node is not None and has a parent.\n OR\n None, otherwise.\n \"\"\"\n if node is None or node.parent is None:\n return None\n\n return node.parent.type\n\n\ndef child_towards(ancestor: Node, descendant: LN) -> Optional[LN]:\n \"\"\"Return the child of `ancestor` that contains `descendant`.\"\"\"\n node: Optional[LN] = descendant\n while node and node.parent != ancestor:\n node = node.parent\n return node\n\n\ndef replace_child(old_child: LN, new_child: LN) -> None:\n \"\"\"\n Side Effects:\n * If @old_child.parent is set, replace @old_child with @new_child in\n @old_child's underlying Node structure.\n OR\n * Otherwise, this function does nothing.\n \"\"\"\n parent = old_child.parent\n if not parent:\n return\n\n child_idx = old_child.remove()\n if child_idx is not None:\n parent.insert_child(child_idx, new_child)\n\n\ndef container_of(leaf: Leaf) -> LN:\n \"\"\"Return `leaf` or one of its ancestors that is the topmost container of it.\n\n By \"container\" we mean a node where `leaf` is the very first child.\n \"\"\"\n same_prefix = leaf.prefix\n container: LN = leaf\n while container:\n parent = container.parent\n if parent is None:\n break\n\n if parent.children[0].prefix != same_prefix:\n break\n\n if parent.type == syms.file_input:\n break\n\n if parent.prev_sibling is not None and parent.prev_sibling.type in BRACKETS:\n break\n\n container = parent\n return container\n\n\ndef first_leaf_column(node: Node) -> Optional[int]:\n \"\"\"Returns the column of the first leaf child of a node.\"\"\"\n for child in node.children:\n if isinstance(child, Leaf):\n return child.column\n return None\n\n\ndef first_child_is_arith(node: Node) -> bool:\n \"\"\"Whether first child is an arithmetic or a binary arithmetic expression\"\"\"\n expr_types = {\n syms.arith_expr,\n syms.shift_expr,\n syms.xor_expr,\n syms.and_expr,\n }\n return bool(node.children and node.children[0].type in expr_types)\n\n\ndef is_docstring(leaf: Leaf) -> bool:\n if prev_siblings_are(\n leaf.parent, [None, token.NEWLINE, token.INDENT, syms.simple_stmt]\n ):\n return True\n\n # Multiline docstring on the same line as the `def`.\n if prev_siblings_are(leaf.parent, [syms.parameters, token.COLON, syms.simple_stmt]):\n # `syms.parameters` is only used in funcdefs and async_funcdefs in the Python\n # grammar. We're safe to return True without further checks.\n return True\n\n return False\n\n\ndef is_empty_tuple(node: LN) -> bool:\n \"\"\"Return True if `node` holds an empty tuple.\"\"\"\n return (\n node.type == syms.atom\n and len(node.children) == 2\n and node.children[0].type == token.LPAR\n and node.children[1].type == token.RPAR\n )\n\n\ndef is_one_tuple(node: LN) -> bool:\n \"\"\"Return True if `node` holds a tuple with one element, with or without parens.\"\"\"\n if node.type == syms.atom:\n gexp = unwrap_singleton_parenthesis(node)\n if gexp is None or gexp.type != syms.testlist_gexp:\n return False\n\n return len(gexp.children) == 2 and gexp.children[1].type == token.COMMA\n\n return (\n node.type in IMPLICIT_TUPLE\n and len(node.children) == 2\n and node.children[1].type == token.COMMA\n )\n\n\ndef is_one_tuple_between(opening: Leaf, closing: Leaf, leaves: List[Leaf]) -> bool:\n \"\"\"Return True if content between `opening` and `closing` looks like a one-tuple.\"\"\"\n if opening.type != token.LPAR and closing.type != token.RPAR:\n return False\n\n depth = closing.bracket_depth + 1\n for _opening_index, leaf in enumerate(leaves):\n if leaf is opening:\n break\n\n else:\n raise LookupError(\"Opening paren not found in `leaves`\")\n\n commas = 0\n _opening_index += 1\n for leaf in leaves[_opening_index:]:\n if leaf is closing:\n break\n\n bracket_depth = leaf.bracket_depth\n if bracket_depth == depth and leaf.type == token.COMMA:\n commas += 1\n if leaf.parent and leaf.parent.type in {\n syms.arglist,\n syms.typedargslist,\n }:\n commas += 1\n break\n\n return commas < 2\n\n\ndef is_walrus_assignment(node: LN) -> bool:\n \"\"\"Return True iff `node` is of the shape ( test := test )\"\"\"\n inner = unwrap_singleton_parenthesis(node)\n return inner is not None and inner.type == syms.namedexpr_test\n\n\ndef is_simple_decorator_trailer(node: LN, last: bool = False) -> bool:\n \"\"\"Return True iff `node` is a trailer valid in a simple decorator\"\"\"\n return node.type == syms.trailer and (\n (\n len(node.children) == 2\n and node.children[0].type == token.DOT\n and node.children[1].type == token.NAME\n )\n # last trailer can be an argument-less parentheses pair\n or (\n last\n and len(node.children) == 2\n and node.children[0].type == token.LPAR\n and node.children[1].type == token.RPAR\n )\n # last trailer can be arguments\n or (\n last\n and len(node.children) == 3\n and node.children[0].type == token.LPAR\n # and node.children[1].type == syms.argument\n and node.children[2].type == token.RPAR\n )\n )\n\n\ndef is_simple_decorator_expression(node: LN) -> bool:\n \"\"\"Return True iff `node` could be a 'dotted name' decorator\n\n This function takes the node of the 'namedexpr_test' of the new decorator\n grammar and test if it would be valid under the old decorator grammar.\n\n The old grammar was: decorator: @ dotted_name [arguments] NEWLINE\n The new grammar is : decorator: @ namedexpr_test NEWLINE\n \"\"\"\n if node.type == token.NAME:\n return True\n if node.type == syms.power:\n if node.children:\n return (\n node.children[0].type == token.NAME\n and all(map(is_simple_decorator_trailer, node.children[1:-1]))\n and (\n len(node.children) < 2\n or is_simple_decorator_trailer(node.children[-1], last=True)\n )\n )\n return False\n\n\ndef is_yield(node: LN) -> bool:\n \"\"\"Return True if `node` holds a `yield` or `yield from` expression.\"\"\"\n if node.type == syms.yield_expr:\n return True\n\n if node.type == token.NAME and node.value == \"yield\": # type: ignore\n return True\n\n if node.type != syms.atom:\n return False\n\n if len(node.children) != 3:\n return False\n\n lpar, expr, rpar = node.children\n if lpar.type == token.LPAR and rpar.type == token.RPAR:\n return is_yield(expr)\n\n return False\n\n\ndef is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool:\n \"\"\"Return True if `leaf` is a star or double star in a vararg or kwarg.\n\n If `within` includes VARARGS_PARENTS, this applies to function signatures.\n If `within` includes UNPACKING_PARENTS, it applies to right hand-side\n extended iterable unpacking (PEP 3132) and additional unpacking\n generalizations (PEP 448).\n \"\"\"\n if leaf.type not in VARARGS_SPECIALS or not leaf.parent:\n return False\n\n p = leaf.parent\n if p.type == syms.star_expr:\n # Star expressions are also used as assignment targets in extended\n # iterable unpacking (PEP 3132). See what its parent is instead.\n if not p.parent:\n return False\n\n p = p.parent\n\n return p.type in within\n\n\ndef is_multiline_string(leaf: Leaf) -> bool:\n \"\"\"Return True if `leaf` is a multiline string that actually spans many lines.\"\"\"\n return has_triple_quotes(leaf.value) and \"\\n\" in leaf.value\n\n\ndef is_stub_suite(node: Node) -> bool:\n \"\"\"Return True if `node` is a suite with a stub body.\"\"\"\n if (\n len(node.children) != 4\n or node.children[0].type != token.NEWLINE\n or node.children[1].type != token.INDENT\n or node.children[3].type != token.DEDENT\n ):\n return False\n\n return is_stub_body(node.children[2])\n\n\ndef is_stub_body(node: LN) -> bool:\n \"\"\"Return True if `node` is a simple statement containing an ellipsis.\"\"\"\n if not isinstance(node, Node) or node.type != syms.simple_stmt:\n return False\n\n if len(node.children) != 2:\n return False\n\n child = node.children[0]\n return (\n child.type == syms.atom\n and len(child.children) == 3\n and all(leaf == Leaf(token.DOT, \".\") for leaf in child.children)\n )\n\n\ndef is_atom_with_invisible_parens(node: LN) -> bool:\n \"\"\"Given a `LN`, determines whether it's an atom `node` with invisible\n parens. Useful in dedupe-ing and normalizing parens.\n \"\"\"\n if isinstance(node, Leaf) or node.type != syms.atom:\n return False\n\n first, last = node.children[0], node.children[-1]\n return (\n isinstance(first, Leaf)\n and first.type == token.LPAR\n and first.value == \"\"\n and isinstance(last, Leaf)\n and last.type == token.RPAR\n and last.value == \"\"\n )\n\n\ndef is_empty_par(leaf: Leaf) -> bool:\n return is_empty_lpar(leaf) or is_empty_rpar(leaf)\n\n\ndef is_empty_lpar(leaf: Leaf) -> bool:\n return leaf.type == token.LPAR and leaf.value == \"\"\n\n\ndef is_empty_rpar(leaf: Leaf) -> bool:\n return leaf.type == token.RPAR and leaf.value == \"\"\n\n\ndef is_import(leaf: Leaf) -> bool:\n \"\"\"Return True if the given leaf starts an import statement.\"\"\"\n p = leaf.parent\n t = leaf.type\n v = leaf.value\n return bool(\n t == token.NAME\n and (\n (v == \"import\" and p and p.type == syms.import_name)\n or (v == \"from\" and p and p.type == syms.import_from)\n )\n )\n\n\ndef is_type_comment(leaf: Leaf, suffix: str = \"\") -> bool:\n \"\"\"Return True if the given leaf is a special comment.\n Only returns true for type comments for now.\"\"\"\n t = leaf.type\n v = leaf.value\n return t in {token.COMMENT, STANDALONE_COMMENT} and v.startswith(\"# type:\" + suffix)\n\n\ndef wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> None:\n \"\"\"Wrap `child` in parentheses.\n\n This replaces `child` with an atom holding the parentheses and the old\n child. That requires moving the prefix.\n\n If `visible` is False, the leaves will be valueless (and thus invisible).\n \"\"\"\n lpar = Leaf(token.LPAR, \"(\" if visible else \"\")\n rpar = Leaf(token.RPAR, \")\" if visible else \"\")\n prefix = child.prefix\n child.prefix = \"\"\n index = child.remove() or 0\n new_child = Node(syms.atom, [lpar, child, rpar])\n new_child.prefix = prefix\n parent.insert_child(index, new_child)\n\n\ndef unwrap_singleton_parenthesis(node: LN) -> Optional[LN]:\n \"\"\"Returns `wrapped` if `node` is of the shape ( wrapped ).\n\n Parenthesis can be optional. Returns None otherwise\"\"\"\n if len(node.children) != 3:\n return None\n\n lpar, wrapped, rpar = node.children\n if not (lpar.type == token.LPAR and rpar.type == token.RPAR):\n return None\n\n return wrapped\n\n\ndef ensure_visible(leaf: Leaf) -> None:\n \"\"\"Make sure parentheses are visible.\n\n They could be invisible as part of some statements (see\n :func:`normalize_invisible_parens` and :func:`visit_import_from`).\n \"\"\"\n if leaf.type == token.LPAR:\n leaf.value = \"(\"\n elif leaf.type == token.RPAR:\n leaf.value = \")\"\n", "path": "src/black/nodes.py" } ]
diff --git a/CHANGES.md b/CHANGES.md index 59042914174..c9a4f09a72a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -13,6 +13,7 @@ `match a, *b:` (#2639) (#2659) - Fix `match`/`case` statements that contain `match`/`case` soft keywords multiple times, like `match re.match()` (#2661) +- Fix `case` statements with an inline body (#2665) - Fix assignment to environment variables in Jupyter Notebooks (#2642) - Add `flake8-simplify` and `flake8-comprehensions` plugins (#2653) - Fix determination of f-string expression spans (#2654) diff --git a/src/black/nodes.py b/src/black/nodes.py index 36dd1890511..437051d3f6d 100644 --- a/src/black/nodes.py +++ b/src/black/nodes.py @@ -52,6 +52,8 @@ syms.with_stmt, syms.funcdef, syms.classdef, + syms.match_stmt, + syms.case_block, } STANDALONE_COMMENT: Final = 153 token.tok_name[STANDALONE_COMMENT] = "STANDALONE_COMMENT" diff --git a/tests/data/pattern_matching_style.py b/tests/data/pattern_matching_style.py new file mode 100644 index 00000000000..c1c0aeedb70 --- /dev/null +++ b/tests/data/pattern_matching_style.py @@ -0,0 +1,27 @@ +match something: + case b(): print(1+1) + case c( + very_complex=True, + perhaps_even_loooooooooooooooooooooooooooooooooooooong=- 1 + ): print(1) + case c( + very_complex=True, + perhaps_even_loooooooooooooooooooooooooooooooooooooong=-1 + ): print(2) + case a: pass + +# output + +match something: + case b(): + print(1 + 1) + case c( + very_complex=True, perhaps_even_loooooooooooooooooooooooooooooooooooooong=-1 + ): + print(1) + case c( + very_complex=True, perhaps_even_loooooooooooooooooooooooooooooooooooooong=-1 + ): + print(2) + case a: + pass diff --git a/tests/test_format.py b/tests/test_format.py index f97d7165b1a..d44be1e8712 100644 --- a/tests/test_format.py +++ b/tests/test_format.py @@ -74,6 +74,7 @@ "pattern_matching_simple", "pattern_matching_complex", "pattern_matching_extras", + "pattern_matching_style", "parenthesized_context_managers", ]
pypa__pipenv-505
Allow file:// uris as pipenv paths - Any reason not to? - PR incoming
[ { "content": "# -*- coding: utf-8 -*-\nimport os\nimport hashlib\nimport tempfile\n\nfrom piptools.resolver import Resolver\nfrom piptools.repositories.pypi import PyPIRepository\nfrom piptools.scripts.compile import get_pip_command\nfrom piptools import logging\n\nimport requests\nimport parse\nimport pip\nimport six\n\n# List of version control systems we support.\nVCS_LIST = ('git', 'svn', 'hg', 'bzr')\nFILE_LIST = ('http://', 'https://', 'ftp://')\n\nrequests = requests.session()\n\n\nclass PipCommand(pip.basecommand.Command):\n \"\"\"Needed for pip-tools.\"\"\"\n name = 'PipCommand'\n\n\ndef shellquote(s):\n \"\"\"Prepares a string for the shell (on Windows too!)\"\"\"\n return '\"' + s.replace(\"'\", \"'\\\\''\") + '\"'\n\n\ndef clean_pkg_version(version):\n \"\"\"Uses pip to prepare a package version string, from our internal version.\"\"\"\n return six.u(pep440_version(str(version).replace('==', '')))\n\n\ndef resolve_deps(deps, sources=None, verbose=False):\n \"\"\"Given a list of dependencies, return a resolved list of dependencies,\n using pip-tools -- and their hashes, using the warehouse API / pip.\n \"\"\"\n\n constraints = []\n\n for dep in deps:\n if dep.startswith('-e '):\n constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])\n else:\n constraint = pip.req.InstallRequirement.from_line(dep)\n constraints.append(constraint)\n\n pip_command = get_pip_command()\n\n pip_args = []\n\n if sources:\n pip_args.extend(['-i', sources[0]['url']])\n\n pip_options, _ = pip_command.parse_args(pip_args)\n\n pypi = PyPIRepository(pip_options=pip_options, session=requests)\n\n if verbose:\n logging.log.verbose = True\n\n resolver = Resolver(constraints=constraints, repository=pypi)\n results = []\n\n # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages\n resolved_tree = resolver.resolve()\n\n for result in resolved_tree:\n name = pep423_name(result.name)\n version = clean_pkg_version(result.specifier)\n\n collected_hashes = []\n\n try:\n # Grab the hashes from the new warehouse API.\n r = requests.get('https://pypi.org/pypi/{0}/json'.format(name))\n api_releases = r.json()['releases']\n\n cleaned_releases = {}\n for api_version, api_info in api_releases.items():\n cleaned_releases[clean_pkg_version(api_version)] = api_info\n\n for release in cleaned_releases[version]:\n collected_hashes.append(release['digests']['sha256'])\n\n collected_hashes = ['sha256:' + s for s in collected_hashes]\n\n # Collect un-collectable hashes.\n if not collected_hashes:\n collected_hashes = list(list(resolver.resolve_hashes([result]).items())[0][1])\n\n except (ValueError, KeyError):\n pass\n\n results.append({'name': name, 'version': version, 'hashes': collected_hashes})\n\n return results\n\n\ndef format_toml(data):\n \"\"\"Pretty-formats a given toml string.\"\"\"\n\n data = data.split('\\n')\n for i, line in enumerate(data):\n if i > 0:\n if line.startswith('['):\n data[i] = '\\n{0}'.format(line)\n\n return '\\n'.join(data)\n\n\ndef multi_split(s, split):\n \"\"\"Splits on multiple given separators.\"\"\"\n\n for r in split:\n s = s.replace(r, '|')\n\n return [i for i in s.split('|') if len(i) > 0]\n\n\ndef convert_deps_from_pip(dep):\n \"\"\"\"Converts a pip-formatted dependency to a Pipfile-formatted one.\"\"\"\n\n dependency = {}\n\n import requirements\n req = [r for r in requirements.parse(dep)][0]\n\n # File installs.\n if req.uri and not req.vcs:\n\n # Assign a package name to the file, last 7 of it's sha256 hex digest.\n req.name = hashlib.sha256(req.uri.encode('utf-8')).hexdigest()\n req.name = req.name[len(req.name) - 7:]\n\n # {file: uri} TOML (spec 3 I guess...)\n dependency[req.name] = {'file': req.uri}\n\n # VCS Installs.\n if req.vcs:\n if req.name is None:\n raise ValueError('pipenv requires an #egg fragment for version controlled '\n 'dependencies. Please install remote dependency '\n 'in the form {0}#egg=<package-name>.'.format(req.uri))\n\n # Crop off the git+, etc part.\n dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]}\n\n # Add --editable, if it's there.\n if req.editable:\n dependency[req.name].update({'editable': True})\n\n # Add the specifier, if it was provided.\n if req.revision:\n dependency[req.name].update({'ref': req.revision})\n\n elif req.specs or req.extras:\n\n specs = None\n # Comparison operators: e.g. Django>1.10\n if req.specs:\n r = multi_split(dep, '=<>')\n specs = dep[len(r[0]):]\n dependency[req.name] = specs\n\n # Extras: e.g. requests[socks]\n if req.extras:\n dependency[req.name] = {'extras': req.extras}\n\n if specs:\n dependency[req.name].update({'version': specs})\n\n # Bare dependencies: e.g. requests\n else:\n dependency[dep] = '*'\n\n return dependency\n\n\ndef convert_deps_to_pip(deps, r=True):\n \"\"\"\"Converts a Pipfile-formatted dependency to a pip-formatted one.\"\"\"\n\n dependencies = []\n\n for dep in deps.keys():\n\n # Default (e.g. '>1.10').\n extra = deps[dep] if isinstance(deps[dep], six.string_types) else ''\n version = ''\n\n # Get rid of '*'.\n if deps[dep] == '*' or str(extra) == '{}':\n extra = ''\n\n hash = ''\n # Support for single hash (spec 1).\n if 'hash' in deps[dep]:\n hash = ' --hash={0}'.format(deps[dep]['hash'])\n\n # Support for multiple hashes (spec 2).\n if 'hashes' in deps[dep]:\n hash = '{0} '.format(''.join([' --hash={0} '.format(h) for h in deps[dep]['hashes']]))\n\n # Support for extras (e.g. requests[socks])\n if 'extras' in deps[dep]:\n extra = '[{0}]'.format(deps[dep]['extras'][0])\n\n if 'version' in deps[dep]:\n version = deps[dep]['version']\n\n # Support for version control\n maybe_vcs = [vcs for vcs in VCS_LIST if vcs in deps[dep]]\n vcs = maybe_vcs[0] if maybe_vcs else None\n\n # Support for files.\n if 'file' in deps[dep]:\n dep = deps[dep]['file']\n\n if vcs:\n extra = '{0}+{1}'.format(vcs, deps[dep][vcs])\n\n # Support for @refs.\n if 'ref' in deps[dep]:\n extra += '@{0}'.format(deps[dep]['ref'])\n\n extra += '#egg={0}'.format(dep)\n\n # Support for editable.\n if 'editable' in deps[dep]:\n # Support for --egg.\n dep = '-e '\n else:\n dep = ''\n\n dependencies.append('{0}{1}{2}{3}'.format(dep, extra, version, hash))\n\n if not r:\n return dependencies\n\n # Write requirements.txt to tmp directory.\n f = tempfile.NamedTemporaryFile(suffix='-requirements.txt', delete=False)\n f.write('\\n'.join(dependencies).encode('utf-8'))\n return f.name\n\n\ndef mkdir_p(newdir):\n \"\"\"works the way a good mkdir should :)\n - already exists, silently complete\n - regular file in the way, raise an exception\n - parent directory(ies) does not exist, make them as well\n From: http://code.activestate.com/recipes/82465-a-friendly-mkdir/\n \"\"\"\n\n if os.path.isdir(newdir):\n pass\n elif os.path.isfile(newdir):\n raise OSError(\"a file with the same name as the desired dir, '{0}', already exists.\".format(newdir))\n else:\n head, tail = os.path.split(newdir)\n if head and not os.path.isdir(head):\n mkdir_p(head)\n if tail:\n os.mkdir(newdir)\n\n\ndef is_required_version(version, specified_version):\n \"\"\"Check to see if there's a hard requirement for version\n number provided in the Pipfile.\n \"\"\"\n\n # Certain packages may be defined with multiple values.\n if isinstance(specified_version, dict):\n specified_version = specified_version.get('version', '')\n if specified_version.startswith('=='):\n return version.strip() == specified_version.split('==')[1].strip()\n return True\n\n\ndef is_vcs(pipfile_entry):\n \"\"\"Determine if dictionary entry from Pipfile is for a vcs dependency.\"\"\"\n\n if isinstance(pipfile_entry, dict):\n return any(key for key in pipfile_entry.keys() if key in VCS_LIST)\n return False\n\n\ndef is_file(package):\n \"\"\"Determine if a package name is for a File dependency.\"\"\"\n for start in FILE_LIST:\n if package.startswith(start):\n return True\n\n return False\n\n\ndef pep440_version(version):\n \"\"\"Normalize version to PEP 440 standards\"\"\"\n\n # Use pip built-in version parser.\n return str(pip.index.parse_version(version))\n\n\ndef pep423_name(name):\n \"\"\"Normalize package name to PEP 423 style standard.\"\"\"\n\n return name.lower().replace('_', '-')\n\n\ndef proper_case(package_name):\n \"\"\"Properly case project name from pypi.org.\"\"\"\n\n # Hit the simple API.\n r = requests.get('https://pypi.org/pypi/{0}/json'.format(package_name), timeout=0.3, stream=True)\n if not r.ok:\n raise IOError('Unable to find package {0} in PyPI repository.'.format(package_name))\n\n r = parse.parse('https://pypi.org/pypi/{name}/json', r.url)\n good_name = r['name']\n\n return good_name\n\n\ndef split_vcs(split_file):\n \"\"\"Split VCS dependencies out from file.\"\"\"\n\n if 'packages' in split_file or 'dev-packages' in split_file:\n sections = ('packages', 'dev-packages')\n elif 'default' in split_file or 'develop' in split_file:\n sections = ('default', 'develop')\n\n # For each vcs entry in a given section, move it to section-vcs.\n for section in sections:\n entries = split_file.get(section, {})\n vcs_dict = dict((k, entries.pop(k)) for k in list(entries.keys()) if is_vcs(entries[k]))\n split_file[section + '-vcs'] = vcs_dict\n\n return split_file\n\n\ndef recase_file(file_dict):\n \"\"\"Recase file before writing to output.\"\"\"\n\n if 'packages' in file_dict or 'dev-packages' in file_dict:\n sections = ('packages', 'dev-packages')\n elif 'default' in file_dict or 'develop' in file_dict:\n sections = ('default', 'develop')\n\n for section in sections:\n file_section = file_dict.get(section, {})\n\n # Try to properly case each key if we can.\n for key in list(file_section.keys()):\n try:\n cased_key = proper_case(key)\n except IOError:\n cased_key = key\n file_section[cased_key] = file_section.pop(key)\n\n return file_dict\n\n\ndef walk_up(bottom):\n \"\"\"Mimic os.walk, but walk 'up' instead of down the directory tree.\n From: https://gist.github.com/zdavkeos/1098474\n \"\"\"\n\n bottom = os.path.realpath(bottom)\n\n # Get files in current dir.\n try:\n names = os.listdir(bottom)\n except Exception:\n return\n\n dirs, nondirs = [], []\n for name in names:\n if os.path.isdir(os.path.join(bottom, name)):\n dirs.append(name)\n else:\n nondirs.append(name)\n\n yield bottom, dirs, nondirs\n\n new_path = os.path.realpath(os.path.join(bottom, '..'))\n\n # See if we are at the top.\n if new_path == bottom:\n return\n\n for x in walk_up(new_path):\n yield x\n\n\ndef find_requirements(max_depth=3):\n \"\"\"Returns the path of a Pipfile in parent directories.\"\"\"\n\n i = 0\n for c, d, f in walk_up(os.getcwd()):\n i += 1\n\n if i < max_depth:\n if 'requirements.txt':\n r = os.path.join(c, 'requirements.txt')\n if os.path.isfile(r):\n return r\n raise RuntimeError('No requirements.txt found!')\n", "path": "pipenv/utils.py" } ]
[ { "content": "# -*- coding: utf-8 -*-\nimport os\nimport hashlib\nimport tempfile\n\nfrom piptools.resolver import Resolver\nfrom piptools.repositories.pypi import PyPIRepository\nfrom piptools.scripts.compile import get_pip_command\nfrom piptools import logging\n\nimport requests\nimport parse\nimport pip\nimport six\n\n# List of version control systems we support.\nVCS_LIST = ('git', 'svn', 'hg', 'bzr')\nFILE_LIST = ('http://', 'https://', 'ftp://', 'file:///')\n\nrequests = requests.session()\n\n\nclass PipCommand(pip.basecommand.Command):\n \"\"\"Needed for pip-tools.\"\"\"\n name = 'PipCommand'\n\n\ndef shellquote(s):\n \"\"\"Prepares a string for the shell (on Windows too!)\"\"\"\n return '\"' + s.replace(\"'\", \"'\\\\''\") + '\"'\n\n\ndef clean_pkg_version(version):\n \"\"\"Uses pip to prepare a package version string, from our internal version.\"\"\"\n return six.u(pep440_version(str(version).replace('==', '')))\n\n\ndef resolve_deps(deps, sources=None, verbose=False):\n \"\"\"Given a list of dependencies, return a resolved list of dependencies,\n using pip-tools -- and their hashes, using the warehouse API / pip.\n \"\"\"\n\n constraints = []\n\n for dep in deps:\n if dep.startswith('-e '):\n constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):])\n else:\n constraint = pip.req.InstallRequirement.from_line(dep)\n constraints.append(constraint)\n\n pip_command = get_pip_command()\n\n pip_args = []\n\n if sources:\n pip_args.extend(['-i', sources[0]['url']])\n\n pip_options, _ = pip_command.parse_args(pip_args)\n\n pypi = PyPIRepository(pip_options=pip_options, session=requests)\n\n if verbose:\n logging.log.verbose = True\n\n resolver = Resolver(constraints=constraints, repository=pypi)\n results = []\n\n # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages\n resolved_tree = resolver.resolve()\n\n for result in resolved_tree:\n name = pep423_name(result.name)\n version = clean_pkg_version(result.specifier)\n\n collected_hashes = []\n\n try:\n # Grab the hashes from the new warehouse API.\n r = requests.get('https://pypi.org/pypi/{0}/json'.format(name))\n api_releases = r.json()['releases']\n\n cleaned_releases = {}\n for api_version, api_info in api_releases.items():\n cleaned_releases[clean_pkg_version(api_version)] = api_info\n\n for release in cleaned_releases[version]:\n collected_hashes.append(release['digests']['sha256'])\n\n collected_hashes = ['sha256:' + s for s in collected_hashes]\n\n # Collect un-collectable hashes.\n if not collected_hashes:\n collected_hashes = list(list(resolver.resolve_hashes([result]).items())[0][1])\n\n except (ValueError, KeyError):\n pass\n\n results.append({'name': name, 'version': version, 'hashes': collected_hashes})\n\n return results\n\n\ndef format_toml(data):\n \"\"\"Pretty-formats a given toml string.\"\"\"\n\n data = data.split('\\n')\n for i, line in enumerate(data):\n if i > 0:\n if line.startswith('['):\n data[i] = '\\n{0}'.format(line)\n\n return '\\n'.join(data)\n\n\ndef multi_split(s, split):\n \"\"\"Splits on multiple given separators.\"\"\"\n\n for r in split:\n s = s.replace(r, '|')\n\n return [i for i in s.split('|') if len(i) > 0]\n\n\ndef convert_deps_from_pip(dep):\n \"\"\"\"Converts a pip-formatted dependency to a Pipfile-formatted one.\"\"\"\n\n dependency = {}\n\n import requirements\n req = [r for r in requirements.parse(dep)][0]\n\n # File installs.\n if req.uri and not req.vcs:\n\n # Assign a package name to the file, last 7 of it's sha256 hex digest.\n req.name = hashlib.sha256(req.uri.encode('utf-8')).hexdigest()\n req.name = req.name[len(req.name) - 7:]\n\n # {file: uri} TOML (spec 3 I guess...)\n dependency[req.name] = {'file': req.uri}\n\n # VCS Installs.\n if req.vcs:\n if req.name is None:\n raise ValueError('pipenv requires an #egg fragment for version controlled '\n 'dependencies. Please install remote dependency '\n 'in the form {0}#egg=<package-name>.'.format(req.uri))\n\n # Crop off the git+, etc part.\n dependency[req.name] = {req.vcs: req.uri[len(req.vcs) + 1:]}\n\n # Add --editable, if it's there.\n if req.editable:\n dependency[req.name].update({'editable': True})\n\n # Add the specifier, if it was provided.\n if req.revision:\n dependency[req.name].update({'ref': req.revision})\n\n elif req.specs or req.extras:\n\n specs = None\n # Comparison operators: e.g. Django>1.10\n if req.specs:\n r = multi_split(dep, '=<>')\n specs = dep[len(r[0]):]\n dependency[req.name] = specs\n\n # Extras: e.g. requests[socks]\n if req.extras:\n dependency[req.name] = {'extras': req.extras}\n\n if specs:\n dependency[req.name].update({'version': specs})\n\n # Bare dependencies: e.g. requests\n else:\n dependency[dep] = '*'\n\n return dependency\n\n\ndef convert_deps_to_pip(deps, r=True):\n \"\"\"\"Converts a Pipfile-formatted dependency to a pip-formatted one.\"\"\"\n\n dependencies = []\n\n for dep in deps.keys():\n\n # Default (e.g. '>1.10').\n extra = deps[dep] if isinstance(deps[dep], six.string_types) else ''\n version = ''\n\n # Get rid of '*'.\n if deps[dep] == '*' or str(extra) == '{}':\n extra = ''\n\n hash = ''\n # Support for single hash (spec 1).\n if 'hash' in deps[dep]:\n hash = ' --hash={0}'.format(deps[dep]['hash'])\n\n # Support for multiple hashes (spec 2).\n if 'hashes' in deps[dep]:\n hash = '{0} '.format(''.join([' --hash={0} '.format(h) for h in deps[dep]['hashes']]))\n\n # Support for extras (e.g. requests[socks])\n if 'extras' in deps[dep]:\n extra = '[{0}]'.format(deps[dep]['extras'][0])\n\n if 'version' in deps[dep]:\n version = deps[dep]['version']\n\n # Support for version control\n maybe_vcs = [vcs for vcs in VCS_LIST if vcs in deps[dep]]\n vcs = maybe_vcs[0] if maybe_vcs else None\n\n # Support for files.\n if 'file' in deps[dep]:\n dep = deps[dep]['file']\n\n if vcs:\n extra = '{0}+{1}'.format(vcs, deps[dep][vcs])\n\n # Support for @refs.\n if 'ref' in deps[dep]:\n extra += '@{0}'.format(deps[dep]['ref'])\n\n extra += '#egg={0}'.format(dep)\n\n # Support for editable.\n if 'editable' in deps[dep]:\n # Support for --egg.\n dep = '-e '\n else:\n dep = ''\n\n dependencies.append('{0}{1}{2}{3}'.format(dep, extra, version, hash))\n\n if not r:\n return dependencies\n\n # Write requirements.txt to tmp directory.\n f = tempfile.NamedTemporaryFile(suffix='-requirements.txt', delete=False)\n f.write('\\n'.join(dependencies).encode('utf-8'))\n return f.name\n\n\ndef mkdir_p(newdir):\n \"\"\"works the way a good mkdir should :)\n - already exists, silently complete\n - regular file in the way, raise an exception\n - parent directory(ies) does not exist, make them as well\n From: http://code.activestate.com/recipes/82465-a-friendly-mkdir/\n \"\"\"\n\n if os.path.isdir(newdir):\n pass\n elif os.path.isfile(newdir):\n raise OSError(\"a file with the same name as the desired dir, '{0}', already exists.\".format(newdir))\n else:\n head, tail = os.path.split(newdir)\n if head and not os.path.isdir(head):\n mkdir_p(head)\n if tail:\n os.mkdir(newdir)\n\n\ndef is_required_version(version, specified_version):\n \"\"\"Check to see if there's a hard requirement for version\n number provided in the Pipfile.\n \"\"\"\n\n # Certain packages may be defined with multiple values.\n if isinstance(specified_version, dict):\n specified_version = specified_version.get('version', '')\n if specified_version.startswith('=='):\n return version.strip() == specified_version.split('==')[1].strip()\n return True\n\n\ndef is_vcs(pipfile_entry):\n \"\"\"Determine if dictionary entry from Pipfile is for a vcs dependency.\"\"\"\n\n if isinstance(pipfile_entry, dict):\n return any(key for key in pipfile_entry.keys() if key in VCS_LIST)\n return False\n\n\ndef is_file(package):\n \"\"\"Determine if a package name is for a File dependency.\"\"\"\n for start in FILE_LIST:\n if package.startswith(start):\n return True\n\n return False\n\n\ndef pep440_version(version):\n \"\"\"Normalize version to PEP 440 standards\"\"\"\n\n # Use pip built-in version parser.\n return str(pip.index.parse_version(version))\n\n\ndef pep423_name(name):\n \"\"\"Normalize package name to PEP 423 style standard.\"\"\"\n\n return name.lower().replace('_', '-')\n\n\ndef proper_case(package_name):\n \"\"\"Properly case project name from pypi.org.\"\"\"\n\n # Hit the simple API.\n r = requests.get('https://pypi.org/pypi/{0}/json'.format(package_name), timeout=0.3, stream=True)\n if not r.ok:\n raise IOError('Unable to find package {0} in PyPI repository.'.format(package_name))\n\n r = parse.parse('https://pypi.org/pypi/{name}/json', r.url)\n good_name = r['name']\n\n return good_name\n\n\ndef split_vcs(split_file):\n \"\"\"Split VCS dependencies out from file.\"\"\"\n\n if 'packages' in split_file or 'dev-packages' in split_file:\n sections = ('packages', 'dev-packages')\n elif 'default' in split_file or 'develop' in split_file:\n sections = ('default', 'develop')\n\n # For each vcs entry in a given section, move it to section-vcs.\n for section in sections:\n entries = split_file.get(section, {})\n vcs_dict = dict((k, entries.pop(k)) for k in list(entries.keys()) if is_vcs(entries[k]))\n split_file[section + '-vcs'] = vcs_dict\n\n return split_file\n\n\ndef recase_file(file_dict):\n \"\"\"Recase file before writing to output.\"\"\"\n\n if 'packages' in file_dict or 'dev-packages' in file_dict:\n sections = ('packages', 'dev-packages')\n elif 'default' in file_dict or 'develop' in file_dict:\n sections = ('default', 'develop')\n\n for section in sections:\n file_section = file_dict.get(section, {})\n\n # Try to properly case each key if we can.\n for key in list(file_section.keys()):\n try:\n cased_key = proper_case(key)\n except IOError:\n cased_key = key\n file_section[cased_key] = file_section.pop(key)\n\n return file_dict\n\n\ndef walk_up(bottom):\n \"\"\"Mimic os.walk, but walk 'up' instead of down the directory tree.\n From: https://gist.github.com/zdavkeos/1098474\n \"\"\"\n\n bottom = os.path.realpath(bottom)\n\n # Get files in current dir.\n try:\n names = os.listdir(bottom)\n except Exception:\n return\n\n dirs, nondirs = [], []\n for name in names:\n if os.path.isdir(os.path.join(bottom, name)):\n dirs.append(name)\n else:\n nondirs.append(name)\n\n yield bottom, dirs, nondirs\n\n new_path = os.path.realpath(os.path.join(bottom, '..'))\n\n # See if we are at the top.\n if new_path == bottom:\n return\n\n for x in walk_up(new_path):\n yield x\n\n\ndef find_requirements(max_depth=3):\n \"\"\"Returns the path of a Pipfile in parent directories.\"\"\"\n\n i = 0\n for c, d, f in walk_up(os.getcwd()):\n i += 1\n\n if i < max_depth:\n if 'requirements.txt':\n r = os.path.join(c, 'requirements.txt')\n if os.path.isfile(r):\n return r\n raise RuntimeError('No requirements.txt found!')\n", "path": "pipenv/utils.py" } ]
diff --git a/pipenv/utils.py b/pipenv/utils.py index b81e35d0a4..59423a3a03 100644 --- a/pipenv/utils.py +++ b/pipenv/utils.py @@ -15,7 +15,7 @@ # List of version control systems we support. VCS_LIST = ('git', 'svn', 'hg', 'bzr') -FILE_LIST = ('http://', 'https://', 'ftp://') +FILE_LIST = ('http://', 'https://', 'ftp://', 'file:///') requests = requests.session()
Gallopsled__pwntools-244
`pwnlib.tubes.tube.recvrepeat()` and `pwnlib.tubes.tube.recvall()` should never raise `EOFError` If the connection is closed while calling these functions, we should simply return the received data.
[ { "content": "# -*- coding: utf-8 -*-\nfrom .buffer import Buffer\nfrom .timeout import Timeout\nfrom .. import context, term, atexit\nfrom ..util import misc, fiddling\nfrom ..context import context\nimport re, threading, sys, time, subprocess, logging, string\n\nlog = logging.getLogger(__name__)\n\nclass tube(Timeout):\n \"\"\"\n Container of all the tube functions common to sockets, TTYs and SSH connetions.\n \"\"\"\n\n #: Delimiter to use for :meth:`sendline`, :meth:`recvline`,\n #: and related functions.\n newline = '\\n'\n\n def __init__(self, timeout=None):\n # assert type(self) == tube\n\n # assert isinstance(self, tube), (id(type(self)), id(tube))\n super(tube, self).__init__(timeout)\n self.buffer = Buffer()\n atexit.register(self.close)\n\n # Functions based on functions from subclasses\n def recv(self, numb = 2**20, timeout = None):\n r\"\"\"recv(numb = 2**31, timeout = None) -> str\n\n Receives up to `numb` bytes of data from the tube, and returns\n as soon as any quantity of data is available.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Raises:\n exceptions.EOFError: The connection is closed\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n >>> t = tube()\n >>> # Fake a data source\n >>> t.recv_raw = lambda n: 'Hello, world'\n >>> t.recv() == 'Hello, world'\n True\n >>> t.unrecv('Woohoo')\n >>> t.recv() == 'Woohoo'\n True\n >>> context.log_level = 'debug'\n >>> _ = t.recv() # doctest: +ELLIPSIS\n [...] Received 0xc bytes:\n 'Hello, world'\n >>> context.clear()\n\n \"\"\"\n return self._recv(numb, timeout) or ''\n\n def unrecv(self, data):\n \"\"\"unrecv(data)\n\n Puts the specified data back at the beginning of the receive\n buffer.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: 'hello'\n >>> t.recv()\n 'hello'\n >>> t.recv()\n 'hello'\n >>> t.unrecv('world')\n >>> t.recv()\n 'world'\n >>> t.recv()\n 'hello'\n \"\"\"\n self.buffer.unget(data)\n\n def _fillbuffer(self, timeout = None):\n \"\"\"_fillbuffer(timeout = None)\n\n Fills the internal buffer from the pipe, by calling\n :meth:`recv_raw` exactly once.\n\n Returns:\n\n The bytes of data received, or ``''`` if no data was received.\n\n Examples:\n\n >>> t = tube()\n >>> t.recv_raw = lambda *a: 'abc'\n >>> len(t.buffer)\n 0\n >>> t._fillbuffer()\n 'abc'\n >>> len(t.buffer)\n 3\n \"\"\"\n data = ''\n\n with self.countdown(timeout):\n data = self.recv_raw(2**20)\n\n if data and log.isEnabledFor(logging.DEBUG):\n log.debug('Received %#x bytes:' % len(data))\n\n if all(c in string.printable for c in data):\n for line in data.splitlines(True):\n log.indented(repr(line), level=logging.DEBUG)\n else:\n log.indented(fiddling.hexdump(data))\n\n if data:\n self.buffer.add(data)\n\n return data\n\n\n def _recv(self, numb = 2**20, timeout = None):\n \"\"\"_recv(numb = 2**20, timeout = None) -> str\n\n Recieves one chunk of from the internal buffer or from the OS if the\n buffer is empty.\n \"\"\"\n data = ''\n\n # No buffered data, could not put anything in the buffer\n # before timeout.\n if not self.buffer and not self._fillbuffer(timeout):\n return ''\n\n return self.buffer.get(numb)\n\n def recvpred(self, pred, timeout = None):\n \"\"\"recvpred(pred, timeout = None) -> str\n\n Receives one byte at a time from the tube, until ``pred(bytes)``\n evaluates to True.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n pred(callable): Function to call, with the currently-accumulated data.\n timeout(int): Timeout for the operation\n\n Raises:\n exceptions.EOFError: The connection is closed\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n \"\"\"\n\n data = ''\n\n with self.countdown(timeout):\n while not pred(data):\n try:\n res = self.recv(1)\n except:\n self.unrecv(data)\n return ''\n\n if res:\n data += res\n else:\n self.unrecv(data)\n return ''\n\n return data\n\n def recvn(self, numb, timeout = None):\n \"\"\"recvn(numb, timeout = None) -> str\n\n Recieves exactly `n` bytes.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Raises:\n exceptions.EOFError: The connection closed before the request could be satisfied\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> data = 'hello world'\n >>> t.recv_raw = lambda *a: data\n >>> t.recvn(len(data)) == data\n True\n >>> t.recvn(len(data)+1) == data + data[0]\n True\n >>> t.recv_raw = lambda *a: None\n >>> # The remaining data is buffered\n >>> t.recv() == data[1:]\n True\n \"\"\"\n\n # Keep track of how much data has been received\n # It will be pasted together at the end if a\n # timeout does not occur, or put into the tube buffer.\n with self.countdown(timeout):\n while self.timeout and len(self.buffer) < numb:\n self._fillbuffer()\n\n return self.buffer.get(numb)\n\n def recvuntil(self, delims, drop=False, timeout = None):\n \"\"\"recvuntil(delims, timeout = None) -> str\n\n Recieve data until one of `delims` is encountered.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n arguments:\n delims(str,tuple): String of delimiters characters, or list of delimiter strings.\n drop(bool): Drop the ending. If ``True`` it is removed from the end of the return value.\n\n Raises:\n exceptions.EOFError: The connection closed before the request could be satisfied\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Hello World!\"\n >>> t.recvuntil(' ')\n 'Hello '\n >>> t.clean(0)\n >>> # Matches on 'o' in 'Hello'\n >>> t.recvuntil(tuple(' Wor'))\n 'Hello'\n >>> t.clean(0)\n >>> # Matches expressly full string\n >>> t.recvuntil(' Wor')\n 'Hello Wor'\n >>> t.clean(0)\n >>> # Matches on full string, drops match\n >>> t.recvuntil(' Wor', drop=True)\n 'Hello'\n\n >>> # Try with regex special characters\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Hello|World\"\n >>> t.recvuntil('|', drop=True)\n 'Hello'\n\n \"\"\"\n # Convert string into list of characters\n if not hasattr(delims, '__iter__'):\n delims = (delims,)\n\n def escape_regex_special(sz):\n specials = '\\\\/.*+?|()[]{}^$'\n for s in specials:\n sz = sz.replace(s, '\\\\' + s)\n return sz\n\n delims = map(escape_regex_special, delims)\n expr = re.compile('(%s)' % '|'.join(delims))\n data = ''\n\n with self.countdown(timeout):\n while self.timeout:\n try:\n res = self.recv()\n except:\n self.unrecv(data)\n raise\n\n if res:\n data += res\n if not res:\n self.unrecv(data)\n return ''\n\n match = expr.search(data)\n if match:\n # Re-queue evrything after the match\n self.unrecv(data[match.end():])\n\n # If we're dropping the match, return everything up to start\n if drop:\n return data[:match.start()]\n return data[:match.end()]\n\n return ''\n\n def recvlines(self, numlines, keep = False, timeout = None):\n r\"\"\"recvlines(numlines, keep = False, timeout = None) -> str list\n\n Recieve up to ``numlines`` lines.\n\n A \"line\" is any sequence of bytes terminated by the byte sequence\n set by :attr:`newline`, which defaults to ``'\\n'``.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n numlines(int): Maximum number of lines to receive\n keep(bool): Keep newlines at the end of each line (``False``).\n timeout(int): Maximum timeout\n\n Raises:\n exceptions.EOFError: The connection closed before the request could be satisfied\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: '\\n'\n >>> t.recvlines(3)\n ['', '', '']\n >>> t.recv_raw = lambda n: 'Foo\\nBar\\nBaz\\n'\n >>> t.recvlines(3)\n ['Foo', 'Bar', 'Baz']\n >>> t.recvlines(3, True)\n ['Foo\\n', 'Bar\\n', 'Baz\\n']\n \"\"\"\n lines = []\n with self.countdown(timeout):\n for _ in xrange(numlines):\n try:\n # We must set 'keep' to True here so that we can\n # restore the original, unmodified data to the buffer\n # in the event of a timeout.\n res = self.recvline(keep=True, timeout=timeout)\n except:\n self.unrecv(''.join(lines))\n raise\n\n if res:\n lines.append(res)\n else:\n break\n\n if not keep:\n lines = [line.rstrip('\\n') for line in lines]\n\n return lines\n\n def recvline(self, keep = True, timeout = None):\n r\"\"\"recvline(keep = True) -> str\n\n Receive a single line from the tube.\n\n A \"line\" is any sequence of bytes terminated by the byte sequence\n set in :attr:`newline`, which defaults to ``'\\n'``.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n keep(bool): Keep the line ending (``True``).\n timeout(int): Timeout\n\n Return:\n All bytes received over the tube until the first\n newline ``'\\n'`` is received. Optionally retains\n the ending.\n\n Examples:\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: 'Foo\\nBar\\r\\nBaz\\n'\n >>> t.recvline()\n 'Foo\\n'\n >>> t.recvline()\n 'Bar\\r\\n'\n >>> t.recvline(keep = False)\n 'Baz'\n >>> t.newline = '\\r\\n'\n >>> t.recvline(keep = False)\n 'Foo\\nBar'\n \"\"\"\n return self.recvuntil(self.newline, drop = not keep, timeout = timeout)\n\n def recvline_pred(self, pred, keep = False, timeout = None):\n r\"\"\"recvline_pred(pred, keep = False) -> str\n\n Receive data until ``pred(line)`` returns a truthy value.\n Drop all other data.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n pred(callable): Function to call. Returns the line for which\n this function returns ``True``.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Foo\\nBar\\nBaz\\n\"\n >>> t.recvline_pred(lambda line: line == \"Bar\\n\")\n 'Bar'\n >>> t.recvline_pred(lambda line: line == \"Bar\\n\", keep=True)\n 'Bar\\n'\n >>> t.recvline_pred(lambda line: line == 'Nope!', timeout=0.1)\n ''\n \"\"\"\n\n tmpbuf = Buffer()\n line = ''\n with self.countdown(timeout):\n while self.timeout:\n try:\n line = self.recvline(keep=True)\n except:\n self.buffer.add(tmpbuf)\n raise\n\n if not line:\n self.buffer.add(tmpbuf)\n return ''\n\n if pred(line):\n if not keep:\n line = line[:-len(self.newline)]\n return line\n else:\n tmpbuf.add(line)\n\n return ''\n\n def recvline_startswith(self, delims, keep = False, timeout = None):\n r\"\"\"recvline_startswith(delims, keep = False, timeout = None) -> str\n\n Keep recieving lines until one is found that starts with one of\n `delims`. Returns the last line recieved.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n delims(str,tuple): List of strings to search for, or string of single characters\n keep(bool): Return lines with newlines if ``True``\n timeout(int): Timeout, in seconds\n\n Returns:\n The first line received which starts with a delimiter in ``delims``.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Hello\\nWorld\\nXylophone\\n\"\n >>> t.recvline_startswith(tuple('WXYZ'))\n 'World'\n >>> t.recvline_startswith(tuple('WXYZ'), True)\n 'Xylophone\\n'\n >>> t.recvline_startswith('Wo')\n 'World'\n \"\"\"\n if not hasattr(delims, '__iter__'):\n delims = (delims,)\n\n return self.recvline_pred(lambda line: any(map(line.startswith, delims)),\n keep=keep,\n timeout=timeout)\n\n def recvline_endswith(self, delims, keep = False, timeout = None):\n r\"\"\"recvline_endswith(delims, keep = False, timeout = None) -> str\n\n Keep recieving lines until one is found that starts with one of\n `delims`. Returns the last line recieved.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n See :meth:`recvline_startswith` for more details.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: 'Foo\\nBar\\nBaz\\nKaboodle\\n'\n >>> t.recvline_endswith('r')\n 'Bar'\n >>> t.recvline_endswith(tuple('abcde'), True)\n 'Kaboodle\\n'\n >>> t.recvline_endswith('oodle')\n 'Kaboodle'\n \"\"\"\n if not hasattr(delims, '__iter__'):\n delims = (delims,)\n\n delims = tuple(delim + self.newline for delim in delims)\n\n return self.recvline_pred(lambda line: any(map(line.endswith, delims)),\n keep=keep,\n timeout=timeout)\n\n def recvregex(self, regex, exact = False, timeout = None):\n \"\"\"recvregex(regex, exact = False, timeout = None) -> str\n\n Wrapper around :func:`recvpred`, which will return when a regex\n matches the string in the buffer.\n\n By default :func:`re.RegexObject.search` is used, but if `exact` is\n set to True, then :func:`re.RegexObject.match` will be used instead.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n \"\"\"\n\n if isinstance(regex, (str, unicode)):\n regex = re.compile(regex)\n\n if exact:\n pred = regex.match\n else:\n pred = regex.search\n\n return self.recvpred(pred, timeout = timeout)\n\n def recvline_regex(self, regex, exact = False, keep = False, timeout = None):\n \"\"\"recvregex(regex, exact = False, keep = False,\n timeout = None) -> str\n\n Wrapper around :func:`recvline_pred`, which will return when a regex\n matches a line.\n\n By default :func:`re.RegexObject.search` is used, but if `exact` is\n set to True, then :func:`re.RegexObject.match` will be used instead.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n \"\"\"\n\n if isinstance(regex, (str, unicode)):\n regex = re.compile(regex)\n\n if exact:\n pred = regex.match\n else:\n pred = regex.search\n\n return self.recvline_pred(pred, keep = keep, timeout = timeout)\n\n def recvrepeat(self, timeout = None):\n \"\"\"recvrepeat()\n\n Receives data until a timeout or EOF is reached.\n\n Examples:\n\n >>> data = [\n ... 'd',\n ... '', # simulate timeout\n ... 'c',\n ... 'b',\n ... 'a',\n ... ]\n >>> def delayrecv(n, data=data):\n ... return data.pop()\n >>> t = tube()\n >>> t.recv_raw = delayrecv\n >>> t.recvrepeat(0.2)\n 'abc'\n >>> t.recv()\n 'd'\n \"\"\"\n\n while self._fillbuffer(timeout=timeout):\n pass\n\n return self.buffer.get()\n\n def recvall(self):\n \"\"\"recvall() -> str\n\n Receives data until EOF is reached.\n \"\"\"\n\n with log.waitfor('Recieving all data') as h:\n l = len(self.buffer)\n with self.local('inf'):\n data = 'yay truthy strings'\n\n try:\n while self._fillbuffer():\n h.status(misc.size(len(self.buffer)))\n except EOFError:\n pass\n\n h.success(\"Done (%s)\" % misc.size(l))\n self.close()\n\n return self.buffer.get()\n\n def send(self, data):\n \"\"\"send(data)\n\n Sends data.\n\n If log level ``DEBUG`` is enabled, also prints out the data\n received.\n\n If it is not possible to send anymore because of a closed\n connection, it raises ``exceptions.EOFError``\n\n Examples:\n\n >>> def p(x): print repr(x)\n >>> t = tube()\n >>> t.send_raw = p\n >>> t.send('hello')\n 'hello'\n \"\"\"\n\n if log.isEnabledFor(logging.DEBUG):\n log.debug('Sent %#x bytes:' % len(data))\n if all(c in string.printable for c in data):\n for line in data.splitlines(True):\n log.indented(repr(line), level=logging.DEBUG)\n else:\n log.indented(fiddling.hexdump(data))\n self.send_raw(data)\n\n def sendline(self, line):\n r\"\"\"sendline(data)\n\n Shorthand for ``t.send(data + t.newline)``.\n\n Examples:\n\n >>> def p(x): print repr(x)\n >>> t = tube()\n >>> t.send_raw = p\n >>> t.sendline('hello')\n 'hello\\n'\n >>> t.newline = '\\r\\n'\n >>> t.sendline('hello')\n 'hello\\r\\n'\n \"\"\"\n\n self.send(line + self.newline)\n\n def sendafter(self, delim, data, timeout = None):\n \"\"\"sendafter(delim, data, timeout = None) -> str\n\n A combination of ``recvuntil(delim, timeout)`` and ``send(data)``.\n \"\"\"\n\n res = self.recvuntil(delim, timeout)\n self.send(data)\n return res\n\n def sendlineafter(self, delim, data, timeout = None):\n \"\"\"sendlineafter(delim, data, timeout = None) -> str\n\n A combination of ``recvuntil(delim, timeout)`` and ``sendline(data)``.\"\"\"\n\n res = self.recvuntil(delim, timeout)\n self.sendline(data)\n return res\n\n def sendthen(self, delim, data, timeout = None):\n \"\"\"sendthen(delim, data, timeout = None) -> str\n\n A combination of ``send(data)`` and ``recvuntil(delim, timeout)``.\"\"\"\n\n self.send(data)\n return self.recvuntil(delim, timeout)\n\n def sendlinethen(self, delim, data, timeout = None):\n \"\"\"sendlinethen(delim, data, timeout = None) -> str\n\n A combination of ``sendline(data)`` and ``recvuntil(delim, timeout)``.\"\"\"\n\n self.send(data + self.newline)\n return self.recvuntil(delim, timeout)\n\n def interactive(self, prompt = term.text.bold_red('$') + ' '):\n \"\"\"interactive(prompt = pwnlib.term.text.bold_red('$') + ' ')\n\n Does simultaneous reading and writing to the tube. In principle this just\n connects the tube to standard in and standard out, but in practice this\n is much more usable, since we are using :mod:`pwnlib.term` to print a\n floating prompt.\n\n Thus it only works in while in :data:`pwnlib.term.term_mode`.\n \"\"\"\n\n log.info('Switching to interactive mode')\n\n go = threading.Event()\n def recv_thread():\n while not go.isSet():\n try:\n cur = self.recv(timeout = 0.05)\n if cur:\n sys.stdout.write(cur)\n sys.stdout.flush()\n except EOFError:\n log.info('Got EOF while reading in interactive')\n break\n\n t = context.thread(target = recv_thread)\n t.daemon = True\n t.start()\n\n try:\n while not go.isSet():\n if term.term_mode:\n data = term.readline.readline(prompt = prompt, float = True)\n else:\n data = sys.stdin.read(1)\n\n if data:\n try:\n self.send(data)\n except EOFError:\n go.set()\n log.info('Got EOF while sending in interactive')\n else:\n go.set()\n except KeyboardInterrupt:\n log.info('Interrupted')\n go.set()\n\n while t.is_alive():\n t.join(timeout = 0.1)\n\n def clean(self, timeout = 0.05):\n \"\"\"clean(timeout = 0.05)\n\n Removes all the buffered data from a tube by calling\n :meth:`pwnlib.tubes.tube.tube.recv` with a low timeout until it fails.\n\n If ``timeout`` is zero, only cached data will be cleared.\n\n Note: If timeout is set to zero, the underlying network is\n not actually polled; only the internal buffer is cleared.\n\n Examples:\n\n >>> t = tube()\n >>> t.unrecv('clean me up')\n >>> t.clean(0)\n >>> len(t.buffer)\n 0\n \"\"\"\n\n # Clear the internal buffer early, so that _recv()\n # does not loop over it and concatenate unnecessarily.\n self.buffer.get()\n\n data = 'demo'\n while timeout and data:\n data = self.recv(timeout = timeout)\n\n def clean_and_log(self, timeout = 0.05):\n \"\"\"clean_and_log(timeout = 0.05)\n\n Works exactly as :meth:`pwnlib.tubes.tube.tube.clean`, but logs recieved\n data with :meth:`pwnlib.log.info`.\n\n Examples:\n\n >>> def recv(n, data=['', 'hooray_data']):\n ... while data: return data.pop()\n >>> context.log_level = 'info'\n >>> t = tube()\n >>> t.recv_raw = recv\n >>> t.connected_raw = lambda d: True\n >>> t.fileno = lambda: 1234\n >>> t.clean_and_log() #doctest: +ELLIPSIS\n [...] Cleaning tube (fileno = 1234):\n hooray_data\n >>> context.clear()\n \"\"\"\n\n if self.connected():\n log.info('Cleaning tube (fileno = %d):' % self.fileno())\n log.indented(self.recvrepeat(timeout = timeout))\n\n def connect_input(self, other):\n \"\"\"connect_input(other)\n\n Connects the input of this tube to the output of another tube object.\n\n\n Examples:\n\n >>> def p(x): print x\n >>> def recvone(n, data=['data']):\n ... while data: return data.pop()\n ... raise EOFError\n >>> a = tube()\n >>> b = tube()\n >>> a.recv_raw = recvone\n >>> b.send_raw = p\n >>> a.connected_raw = lambda d: True\n >>> b.connected_raw = lambda d: True\n >>> a.shutdown = lambda d: True\n >>> b.shutdown = lambda d: True\n >>> import time\n >>> _=(b.connect_input(a), time.sleep(0.1))\n data\n \"\"\"\n\n def pump():\n import sys as _sys\n while self.timeout:\n if not (self.connected('send') and other.connected('recv')):\n break\n\n try:\n data = other.recv(timeout = 0.05)\n except EOFError:\n break\n\n if not _sys:\n return\n\n if not data:\n continue\n\n try:\n self.send(data)\n except EOFError:\n break\n\n if not _sys:\n return\n\n self.shutdown('send')\n other.shutdown('recv')\n\n t = context.thread(target = pump)\n t.daemon = True\n t.start()\n\n def connect_output(self, other):\n \"\"\"connect_output(other)\n\n Connects the output of this tube to the input of another tube object.\n\n Examples:\n\n >>> def p(x): print x\n >>> def recvone(n, data=['data']):\n ... while data: return data.pop()\n ... raise EOFError\n >>> a = tube()\n >>> b = tube()\n >>> a.recv_raw = recvone\n >>> b.send_raw = p\n >>> a.connected_raw = lambda d: True\n >>> b.connected_raw = lambda d: True\n >>> a.shutdown = lambda d: True\n >>> b.shutdown = lambda d: True\n >>> _=(a.connect_output(b), time.sleep(0.1))\n data\n \"\"\"\n\n other.connect_input(self)\n\n def connect_both(self, other):\n \"\"\"connect_both(other)\n\n Connects the both ends of this tube object with another tube object.\"\"\"\n\n self.connect_input(other)\n self.connect_output(other)\n\n def spawn_process(self, *args, **kwargs):\n \"\"\"Spawns a new process having this tube as stdin, stdout and stderr.\n\n Takes the same arguments as :class:`subprocess.Popen`.\"\"\"\n\n return subprocess.Popen(\n *args,\n stdin = self.fileno(),\n stdout = self.fileno(),\n stderr = self.fileno(),\n **kwargs\n )\n\n def __lshift__(self, other):\n \"\"\"\n Shorthand for connecting multiple tubes.\n\n See :meth:`connect_input` for more information.\n\n Examples:\n\n The following are equivalent ::\n\n tube_a >> tube.b\n tube_a.connect_input(tube_b)\n\n This is useful when chaining multiple tubes ::\n\n tube_a >> tube_b >> tube_a\n tube_a.connect_input(tube_b)\n tube_b.connect_input(tube_a)\n \"\"\"\n self.connect_input(other)\n return other\n\n def __rshift__(self, other):\n \"\"\"\n Inverse of the ``<<`` operator. See :meth:`__lshift__`.\n\n See :meth:`connect_input` for more information.\n \"\"\"\n self.connect_output(other)\n return other\n\n def __ne__(self, other):\n \"\"\"\n Shorthand for connecting tubes to eachother.\n\n The following are equivalent ::\n\n a >> b >> a\n a <> b\n\n See :meth:`connect_input` for more information.\n \"\"\"\n self << other << self\n\n def wait_for_close(self):\n \"\"\"Waits until the tube is closed.\"\"\"\n\n while self.connected():\n time.sleep(0.05)\n\n def can_recv(self, timeout = 0):\n \"\"\"can_recv(timeout = 0) -> bool\n\n Returns True, if there is data available within `timeout` seconds.\n\n Examples:\n\n >>> import time\n >>> t = tube()\n >>> t.can_recv_raw = lambda *a: False\n >>> t.can_recv()\n False\n >>> _=t.unrecv('data')\n >>> t.can_recv()\n True\n >>> _=t.recv()\n >>> t.can_recv()\n False\n \"\"\"\n\n return bool(self.buffer or self.can_recv_raw(timeout))\n\n def settimeout(self, timeout):\n \"\"\"settimeout(timeout)\n\n Set the timeout for receiving operations. If the string \"default\"\n is given, then :data:`context.timeout` will be used. If None is given,\n then there will be no timeout.\n\n Examples:\n\n >>> t = tube()\n >>> t.settimeout_raw = lambda t: None\n >>> t.settimeout(3)\n >>> t.timeout == 3\n True\n \"\"\"\n\n self.timeout = timeout\n self.settimeout_raw(self.timeout)\n\n\n shutdown_directions = {\n 'in': 'recv',\n 'read': 'recv',\n 'recv': 'recv',\n 'out': 'send',\n 'write': 'send',\n 'send': 'send',\n }\n\n connected_directions = shutdown_directions.copy()\n connected_directions['any'] = 'any'\n\n def shutdown(self, direction = \"send\"):\n \"\"\"shutdown(direction = \"send\")\n\n Closes the tube for futher reading or writing depending on `direction`.\n\n Args:\n direction(str): Which direction to close; \"in\", \"read\" or \"recv\"\n closes the tube in the ingoing direction, \"out\", \"write\" or \"send\"\n closes it in the outgoing direction.\n\n Returns:\n :const:`None`\n\n Examples:\n\n >>> def p(x): print x\n >>> t = tube()\n >>> t.shutdown_raw = p\n >>> _=map(t.shutdown, ('in', 'read', 'recv', 'out', 'write', 'send'))\n recv\n recv\n recv\n send\n send\n send\n >>> t.shutdown('bad_value') #doctest: +ELLIPSIS\n Traceback (most recent call last):\n ...\n KeyError: \"direction must be in ['in', 'out', 'read', 'recv', 'send', 'write']\"\n \"\"\"\n try:\n direction = self.shutdown_directions[direction]\n except KeyError:\n raise KeyError('direction must be in %r' % sorted(self.shutdown_directions))\n else:\n self.shutdown_raw(self.shutdown_directions[direction])\n\n def connected(self, direction = 'any'):\n \"\"\"connected(direction = 'any') -> bool\n\n Returns True if the tube is connected in the specified direction.\n\n Args:\n direction(str): Can be the string 'any', 'in', 'read', 'recv',\n 'out', 'write', 'send'.\n\n Doctest:\n\n >>> def p(x): print x\n >>> t = tube()\n >>> t.connected_raw = p\n >>> _=map(t.connected, ('any', 'in', 'read', 'recv', 'out', 'write', 'send'))\n any\n recv\n recv\n recv\n send\n send\n send\n >>> t.connected('bad_value') #doctest: +ELLIPSIS\n Traceback (most recent call last):\n ...\n KeyError: \"direction must be in ['any', 'in', 'out', 'read', 'recv', 'send', 'write']\"\n \"\"\"\n try:\n direction = self.connected_directions[direction]\n except KeyError:\n raise KeyError('direction must be in %r' % sorted(self.connected_directions))\n else:\n return self.connected_raw(direction)\n\n def __enter__(self):\n \"\"\"Permit use of 'with' to control scoping and closing sessions.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> def p(x): print x\n >>> t.close = lambda: p(\"Closed!\")\n >>> with t: pass\n Closed!\n \"\"\"\n return self\n\n def __exit__(self, type, value, traceback):\n \"\"\"Handles closing for 'with' statement\n\n See :meth:`__enter__`\n \"\"\"\n self.close()\n\n # The minimal interface to be implemented by a child\n def recv_raw(self, numb):\n \"\"\"recv_raw(numb) -> str\n\n Should not be called directly. Receives data without using the buffer\n on the object.\n\n Unless there is a timeout or closed connection, this should always\n return data. In case of a timeout, it should return None, in case\n of a closed connection it should raise an ``exceptions.EOFError``.\n \"\"\"\n\n raise EOFError('Not implemented')\n\n def send_raw(self, data):\n \"\"\"send_raw(data)\n\n Should not be called directly. Sends data to the tube.\n\n Should return ``exceptions.EOFError``, if it is unable to send any\n more, because of a close tube.\n \"\"\"\n\n raise EOFError('Not implemented')\n\n def settimeout_raw(self, timeout):\n \"\"\"settimeout_raw(timeout)\n\n Should not be called directly. Sets the timeout for\n the tube.\n \"\"\"\n\n raise NotImplementedError()\n\n def timeout_change(self):\n \"\"\"\n Informs the raw layer of the tube that the timeout has changed.\n\n Should not be called directly.\n\n Inherited from :class:`Timeout`.\n \"\"\"\n try:\n self.settimeout_raw(self.timeout)\n except NotImplementedError:\n pass\n\n def can_recv_raw(self, timeout):\n \"\"\"can_recv_raw(timeout) -> bool\n\n Should not be called directly. Returns True, if\n there is data available within the timeout, but\n ignores the buffer on the object.\n \"\"\"\n\n raise NotImplementedError()\n\n def connected_raw(self, direction):\n \"\"\"connected(direction = 'any') -> bool\n\n Should not be called directly. Returns True iff the\n tube is connected in the given direction.\n \"\"\"\n\n raise NotImplementedError()\n\n def close(self):\n \"\"\"close()\n\n Closes the tube.\n \"\"\"\n pass\n # Ideally we could:\n # raise NotImplementedError()\n # But this causes issues with the unit tests.\n\n def fileno(self):\n \"\"\"fileno() -> int\n\n Returns the file number used for reading.\n \"\"\"\n\n raise NotImplementedError()\n\n def shutdown_raw(self, direction):\n \"\"\"shutdown_raw(direction)\n\n Should not be called directly. Closes the tube for further reading or\n writing.\n \"\"\"\n\n raise NotImplementedError()\n", "path": "pwnlib/tubes/tube.py" } ]
[ { "content": "# -*- coding: utf-8 -*-\nfrom .buffer import Buffer\nfrom .timeout import Timeout\nfrom .. import context, term, atexit\nfrom ..util import misc, fiddling\nfrom ..context import context\nimport re, threading, sys, time, subprocess, logging, string\n\nlog = logging.getLogger(__name__)\n\nclass tube(Timeout):\n \"\"\"\n Container of all the tube functions common to sockets, TTYs and SSH connetions.\n \"\"\"\n\n #: Delimiter to use for :meth:`sendline`, :meth:`recvline`,\n #: and related functions.\n newline = '\\n'\n\n def __init__(self, timeout=None):\n # assert type(self) == tube\n\n # assert isinstance(self, tube), (id(type(self)), id(tube))\n super(tube, self).__init__(timeout)\n self.buffer = Buffer()\n atexit.register(self.close)\n\n # Functions based on functions from subclasses\n def recv(self, numb = 2**20, timeout = None):\n r\"\"\"recv(numb = 2**31, timeout = None) -> str\n\n Receives up to `numb` bytes of data from the tube, and returns\n as soon as any quantity of data is available.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Raises:\n exceptions.EOFError: The connection is closed\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n >>> t = tube()\n >>> # Fake a data source\n >>> t.recv_raw = lambda n: 'Hello, world'\n >>> t.recv() == 'Hello, world'\n True\n >>> t.unrecv('Woohoo')\n >>> t.recv() == 'Woohoo'\n True\n >>> context.log_level = 'debug'\n >>> _ = t.recv() # doctest: +ELLIPSIS\n [...] Received 0xc bytes:\n 'Hello, world'\n >>> context.clear()\n\n \"\"\"\n return self._recv(numb, timeout) or ''\n\n def unrecv(self, data):\n \"\"\"unrecv(data)\n\n Puts the specified data back at the beginning of the receive\n buffer.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: 'hello'\n >>> t.recv()\n 'hello'\n >>> t.recv()\n 'hello'\n >>> t.unrecv('world')\n >>> t.recv()\n 'world'\n >>> t.recv()\n 'hello'\n \"\"\"\n self.buffer.unget(data)\n\n def _fillbuffer(self, timeout = None):\n \"\"\"_fillbuffer(timeout = None)\n\n Fills the internal buffer from the pipe, by calling\n :meth:`recv_raw` exactly once.\n\n Returns:\n\n The bytes of data received, or ``''`` if no data was received.\n\n Examples:\n\n >>> t = tube()\n >>> t.recv_raw = lambda *a: 'abc'\n >>> len(t.buffer)\n 0\n >>> t._fillbuffer()\n 'abc'\n >>> len(t.buffer)\n 3\n \"\"\"\n data = ''\n\n with self.countdown(timeout):\n data = self.recv_raw(2**20)\n\n if data and log.isEnabledFor(logging.DEBUG):\n log.debug('Received %#x bytes:' % len(data))\n\n if all(c in string.printable for c in data):\n for line in data.splitlines(True):\n log.indented(repr(line), level=logging.DEBUG)\n else:\n log.indented(fiddling.hexdump(data))\n\n if data:\n self.buffer.add(data)\n\n return data\n\n\n def _recv(self, numb = 2**20, timeout = None):\n \"\"\"_recv(numb = 2**20, timeout = None) -> str\n\n Recieves one chunk of from the internal buffer or from the OS if the\n buffer is empty.\n \"\"\"\n data = ''\n\n # No buffered data, could not put anything in the buffer\n # before timeout.\n if not self.buffer and not self._fillbuffer(timeout):\n return ''\n\n return self.buffer.get(numb)\n\n def recvpred(self, pred, timeout = None):\n \"\"\"recvpred(pred, timeout = None) -> str\n\n Receives one byte at a time from the tube, until ``pred(bytes)``\n evaluates to True.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n pred(callable): Function to call, with the currently-accumulated data.\n timeout(int): Timeout for the operation\n\n Raises:\n exceptions.EOFError: The connection is closed\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n \"\"\"\n\n data = ''\n\n with self.countdown(timeout):\n while not pred(data):\n try:\n res = self.recv(1)\n except:\n self.unrecv(data)\n return ''\n\n if res:\n data += res\n else:\n self.unrecv(data)\n return ''\n\n return data\n\n def recvn(self, numb, timeout = None):\n \"\"\"recvn(numb, timeout = None) -> str\n\n Recieves exactly `n` bytes.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Raises:\n exceptions.EOFError: The connection closed before the request could be satisfied\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> data = 'hello world'\n >>> t.recv_raw = lambda *a: data\n >>> t.recvn(len(data)) == data\n True\n >>> t.recvn(len(data)+1) == data + data[0]\n True\n >>> t.recv_raw = lambda *a: None\n >>> # The remaining data is buffered\n >>> t.recv() == data[1:]\n True\n \"\"\"\n\n # Keep track of how much data has been received\n # It will be pasted together at the end if a\n # timeout does not occur, or put into the tube buffer.\n with self.countdown(timeout):\n while self.timeout and len(self.buffer) < numb:\n self._fillbuffer()\n\n return self.buffer.get(numb)\n\n def recvuntil(self, delims, drop=False, timeout = None):\n \"\"\"recvuntil(delims, timeout = None) -> str\n\n Recieve data until one of `delims` is encountered.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n arguments:\n delims(str,tuple): String of delimiters characters, or list of delimiter strings.\n drop(bool): Drop the ending. If ``True`` it is removed from the end of the return value.\n\n Raises:\n exceptions.EOFError: The connection closed before the request could be satisfied\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Hello World!\"\n >>> t.recvuntil(' ')\n 'Hello '\n >>> t.clean(0)\n >>> # Matches on 'o' in 'Hello'\n >>> t.recvuntil(tuple(' Wor'))\n 'Hello'\n >>> t.clean(0)\n >>> # Matches expressly full string\n >>> t.recvuntil(' Wor')\n 'Hello Wor'\n >>> t.clean(0)\n >>> # Matches on full string, drops match\n >>> t.recvuntil(' Wor', drop=True)\n 'Hello'\n\n >>> # Try with regex special characters\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Hello|World\"\n >>> t.recvuntil('|', drop=True)\n 'Hello'\n\n \"\"\"\n # Convert string into list of characters\n if not hasattr(delims, '__iter__'):\n delims = (delims,)\n\n def escape_regex_special(sz):\n specials = '\\\\/.*+?|()[]{}'\n for s in specials:\n sz = sz.replace(s, '\\\\' + s)\n return sz\n\n delims = map(escape_regex_special, delims)\n expr = re.compile('(%s)' % '|'.join(delims))\n data = ''\n\n with self.countdown(timeout):\n while self.timeout:\n try:\n res = self.recv()\n except:\n self.unrecv(data)\n raise\n\n if res:\n data += res\n if not res:\n self.unrecv(data)\n return ''\n\n match = expr.search(data)\n if match:\n # Re-queue evrything after the match\n self.unrecv(data[match.end():])\n\n # If we're dropping the match, return everything up to start\n if drop:\n return data[:match.start()]\n return data[:match.end()]\n\n return ''\n\n def recvlines(self, numlines, keep = False, timeout = None):\n r\"\"\"recvlines(numlines, keep = False, timeout = None) -> str list\n\n Recieve up to ``numlines`` lines.\n\n A \"line\" is any sequence of bytes terminated by the byte sequence\n set by :attr:`newline`, which defaults to ``'\\n'``.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n numlines(int): Maximum number of lines to receive\n keep(bool): Keep newlines at the end of each line (``False``).\n timeout(int): Maximum timeout\n\n Raises:\n exceptions.EOFError: The connection closed before the request could be satisfied\n\n Returns:\n A string containing bytes received from the socket,\n or ``''`` if a timeout occurred while waiting.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: '\\n'\n >>> t.recvlines(3)\n ['', '', '']\n >>> t.recv_raw = lambda n: 'Foo\\nBar\\nBaz\\n'\n >>> t.recvlines(3)\n ['Foo', 'Bar', 'Baz']\n >>> t.recvlines(3, True)\n ['Foo\\n', 'Bar\\n', 'Baz\\n']\n \"\"\"\n lines = []\n with self.countdown(timeout):\n for _ in xrange(numlines):\n try:\n # We must set 'keep' to True here so that we can\n # restore the original, unmodified data to the buffer\n # in the event of a timeout.\n res = self.recvline(keep=True, timeout=timeout)\n except:\n self.unrecv(''.join(lines))\n raise\n\n if res:\n lines.append(res)\n else:\n break\n\n if not keep:\n lines = [line.rstrip('\\n') for line in lines]\n\n return lines\n\n def recvline(self, keep = True, timeout = None):\n r\"\"\"recvline(keep = True) -> str\n\n Receive a single line from the tube.\n\n A \"line\" is any sequence of bytes terminated by the byte sequence\n set in :attr:`newline`, which defaults to ``'\\n'``.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n keep(bool): Keep the line ending (``True``).\n timeout(int): Timeout\n\n Return:\n All bytes received over the tube until the first\n newline ``'\\n'`` is received. Optionally retains\n the ending.\n\n Examples:\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: 'Foo\\nBar\\r\\nBaz\\n'\n >>> t.recvline()\n 'Foo\\n'\n >>> t.recvline()\n 'Bar\\r\\n'\n >>> t.recvline(keep = False)\n 'Baz'\n >>> t.newline = '\\r\\n'\n >>> t.recvline(keep = False)\n 'Foo\\nBar'\n \"\"\"\n return self.recvuntil(self.newline, drop = not keep, timeout = timeout)\n\n def recvline_pred(self, pred, keep = False, timeout = None):\n r\"\"\"recvline_pred(pred, keep = False) -> str\n\n Receive data until ``pred(line)`` returns a truthy value.\n Drop all other data.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n pred(callable): Function to call. Returns the line for which\n this function returns ``True``.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Foo\\nBar\\nBaz\\n\"\n >>> t.recvline_pred(lambda line: line == \"Bar\\n\")\n 'Bar'\n >>> t.recvline_pred(lambda line: line == \"Bar\\n\", keep=True)\n 'Bar\\n'\n >>> t.recvline_pred(lambda line: line == 'Nope!', timeout=0.1)\n ''\n \"\"\"\n\n tmpbuf = Buffer()\n line = ''\n with self.countdown(timeout):\n while self.timeout:\n try:\n line = self.recvline(keep=True)\n except:\n self.buffer.add(tmpbuf)\n raise\n\n if not line:\n self.buffer.add(tmpbuf)\n return ''\n\n if pred(line):\n if not keep:\n line = line[:-len(self.newline)]\n return line\n else:\n tmpbuf.add(line)\n\n return ''\n\n def recvline_startswith(self, delims, keep = False, timeout = None):\n r\"\"\"recvline_startswith(delims, keep = False, timeout = None) -> str\n\n Keep recieving lines until one is found that starts with one of\n `delims`. Returns the last line recieved.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n Arguments:\n delims(str,tuple): List of strings to search for, or string of single characters\n keep(bool): Return lines with newlines if ``True``\n timeout(int): Timeout, in seconds\n\n Returns:\n The first line received which starts with a delimiter in ``delims``.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: \"Hello\\nWorld\\nXylophone\\n\"\n >>> t.recvline_startswith(tuple('WXYZ'))\n 'World'\n >>> t.recvline_startswith(tuple('WXYZ'), True)\n 'Xylophone\\n'\n >>> t.recvline_startswith('Wo')\n 'World'\n \"\"\"\n if not hasattr(delims, '__iter__'):\n delims = (delims,)\n\n return self.recvline_pred(lambda line: any(map(line.startswith, delims)),\n keep=keep,\n timeout=timeout)\n\n def recvline_endswith(self, delims, keep = False, timeout = None):\n r\"\"\"recvline_endswith(delims, keep = False, timeout = None) -> str\n\n Keep recieving lines until one is found that starts with one of\n `delims`. Returns the last line recieved.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n\n See :meth:`recvline_startswith` for more details.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> t.recv_raw = lambda n: 'Foo\\nBar\\nBaz\\nKaboodle\\n'\n >>> t.recvline_endswith('r')\n 'Bar'\n >>> t.recvline_endswith(tuple('abcde'), True)\n 'Kaboodle\\n'\n >>> t.recvline_endswith('oodle')\n 'Kaboodle'\n \"\"\"\n if not hasattr(delims, '__iter__'):\n delims = (delims,)\n\n delims = tuple(delim + self.newline for delim in delims)\n\n return self.recvline_pred(lambda line: any(map(line.endswith, delims)),\n keep=keep,\n timeout=timeout)\n\n def recvregex(self, regex, exact = False, timeout = None):\n \"\"\"recvregex(regex, exact = False, timeout = None) -> str\n\n Wrapper around :func:`recvpred`, which will return when a regex\n matches the string in the buffer.\n\n By default :func:`re.RegexObject.search` is used, but if `exact` is\n set to True, then :func:`re.RegexObject.match` will be used instead.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n \"\"\"\n\n if isinstance(regex, (str, unicode)):\n regex = re.compile(regex)\n\n if exact:\n pred = regex.match\n else:\n pred = regex.search\n\n return self.recvpred(pred, timeout = timeout)\n\n def recvline_regex(self, regex, exact = False, keep = False, timeout = None):\n \"\"\"recvregex(regex, exact = False, keep = False,\n timeout = None) -> str\n\n Wrapper around :func:`recvline_pred`, which will return when a regex\n matches a line.\n\n By default :func:`re.RegexObject.search` is used, but if `exact` is\n set to True, then :func:`re.RegexObject.match` will be used instead.\n\n If the request is not satisfied before ``timeout`` seconds pass,\n all data is buffered and an empty string (``''``) is returned.\n \"\"\"\n\n if isinstance(regex, (str, unicode)):\n regex = re.compile(regex)\n\n if exact:\n pred = regex.match\n else:\n pred = regex.search\n\n return self.recvline_pred(pred, keep = keep, timeout = timeout)\n\n def recvrepeat(self, timeout = None):\n \"\"\"recvrepeat()\n\n Receives data until a timeout or EOF is reached.\n\n Examples:\n\n >>> data = [\n ... 'd',\n ... '', # simulate timeout\n ... 'c',\n ... 'b',\n ... 'a',\n ... ]\n >>> def delayrecv(n, data=data):\n ... return data.pop()\n >>> t = tube()\n >>> t.recv_raw = delayrecv\n >>> t.recvrepeat(0.2)\n 'abc'\n >>> t.recv()\n 'd'\n \"\"\"\n\n try:\n while self._fillbuffer(timeout=timeout):\n pass\n except EOFError:\n pass\n\n return self.buffer.get()\n\n def recvall(self):\n \"\"\"recvall() -> str\n\n Receives data until EOF is reached.\n \"\"\"\n\n with log.waitfor('Recieving all data') as h:\n l = len(self.buffer)\n with self.local('inf'):\n data = 'yay truthy strings'\n\n try:\n while self._fillbuffer():\n h.status(misc.size(len(self.buffer)))\n except EOFError:\n pass\n\n h.success(\"Done (%s)\" % misc.size(l))\n self.close()\n\n return self.buffer.get()\n\n def send(self, data):\n \"\"\"send(data)\n\n Sends data.\n\n If log level ``DEBUG`` is enabled, also prints out the data\n received.\n\n If it is not possible to send anymore because of a closed\n connection, it raises ``exceptions.EOFError``\n\n Examples:\n\n >>> def p(x): print repr(x)\n >>> t = tube()\n >>> t.send_raw = p\n >>> t.send('hello')\n 'hello'\n \"\"\"\n\n if log.isEnabledFor(logging.DEBUG):\n log.debug('Sent %#x bytes:' % len(data))\n if all(c in string.printable for c in data):\n for line in data.splitlines(True):\n log.indented(repr(line), level=logging.DEBUG)\n else:\n log.indented(fiddling.hexdump(data))\n self.send_raw(data)\n\n def sendline(self, line):\n r\"\"\"sendline(data)\n\n Shorthand for ``t.send(data + t.newline)``.\n\n Examples:\n\n >>> def p(x): print repr(x)\n >>> t = tube()\n >>> t.send_raw = p\n >>> t.sendline('hello')\n 'hello\\n'\n >>> t.newline = '\\r\\n'\n >>> t.sendline('hello')\n 'hello\\r\\n'\n \"\"\"\n\n self.send(line + self.newline)\n\n def sendafter(self, delim, data, timeout = None):\n \"\"\"sendafter(delim, data, timeout = None) -> str\n\n A combination of ``recvuntil(delim, timeout)`` and ``send(data)``.\n \"\"\"\n\n res = self.recvuntil(delim, timeout)\n self.send(data)\n return res\n\n def sendlineafter(self, delim, data, timeout = None):\n \"\"\"sendlineafter(delim, data, timeout = None) -> str\n\n A combination of ``recvuntil(delim, timeout)`` and ``sendline(data)``.\"\"\"\n\n res = self.recvuntil(delim, timeout)\n self.sendline(data)\n return res\n\n def sendthen(self, delim, data, timeout = None):\n \"\"\"sendthen(delim, data, timeout = None) -> str\n\n A combination of ``send(data)`` and ``recvuntil(delim, timeout)``.\"\"\"\n\n self.send(data)\n return self.recvuntil(delim, timeout)\n\n def sendlinethen(self, delim, data, timeout = None):\n \"\"\"sendlinethen(delim, data, timeout = None) -> str\n\n A combination of ``sendline(data)`` and ``recvuntil(delim, timeout)``.\"\"\"\n\n self.send(data + self.newline)\n return self.recvuntil(delim, timeout)\n\n def interactive(self, prompt = term.text.bold_red('$') + ' '):\n \"\"\"interactive(prompt = pwnlib.term.text.bold_red('$') + ' ')\n\n Does simultaneous reading and writing to the tube. In principle this just\n connects the tube to standard in and standard out, but in practice this\n is much more usable, since we are using :mod:`pwnlib.term` to print a\n floating prompt.\n\n Thus it only works in while in :data:`pwnlib.term.term_mode`.\n \"\"\"\n\n log.info('Switching to interactive mode')\n\n go = threading.Event()\n def recv_thread():\n while not go.isSet():\n try:\n cur = self.recv(timeout = 0.05)\n if cur:\n sys.stdout.write(cur)\n sys.stdout.flush()\n except EOFError:\n log.info('Got EOF while reading in interactive')\n break\n\n t = context.thread(target = recv_thread)\n t.daemon = True\n t.start()\n\n try:\n while not go.isSet():\n if term.term_mode:\n data = term.readline.readline(prompt = prompt, float = True)\n else:\n data = sys.stdin.read(1)\n\n if data:\n try:\n self.send(data)\n except EOFError:\n go.set()\n log.info('Got EOF while sending in interactive')\n else:\n go.set()\n except KeyboardInterrupt:\n log.info('Interrupted')\n go.set()\n\n while t.is_alive():\n t.join(timeout = 0.1)\n\n def clean(self, timeout = 0.05):\n \"\"\"clean(timeout = 0.05)\n\n Removes all the buffered data from a tube by calling\n :meth:`pwnlib.tubes.tube.tube.recv` with a low timeout until it fails.\n\n If ``timeout`` is zero, only cached data will be cleared.\n\n Note: If timeout is set to zero, the underlying network is\n not actually polled; only the internal buffer is cleared.\n\n Examples:\n\n >>> t = tube()\n >>> t.unrecv('clean me up')\n >>> t.clean(0)\n >>> len(t.buffer)\n 0\n \"\"\"\n\n # Clear the internal buffer early, so that _recv()\n # does not loop over it and concatenate unnecessarily.\n self.buffer.get()\n\n data = 'demo'\n while timeout and data:\n data = self.recv(timeout = timeout)\n\n def clean_and_log(self, timeout = 0.05):\n \"\"\"clean_and_log(timeout = 0.05)\n\n Works exactly as :meth:`pwnlib.tubes.tube.tube.clean`, but logs recieved\n data with :meth:`pwnlib.log.info`.\n\n Examples:\n\n >>> def recv(n, data=['', 'hooray_data']):\n ... while data: return data.pop()\n >>> context.log_level = 'info'\n >>> t = tube()\n >>> t.recv_raw = recv\n >>> t.connected_raw = lambda d: True\n >>> t.fileno = lambda: 1234\n >>> t.clean_and_log() #doctest: +ELLIPSIS\n [...] Cleaning tube (fileno = 1234):\n hooray_data\n >>> context.clear()\n \"\"\"\n\n if self.connected():\n log.info('Cleaning tube (fileno = %d):' % self.fileno())\n log.indented(self.recvrepeat(timeout = timeout))\n\n def connect_input(self, other):\n \"\"\"connect_input(other)\n\n Connects the input of this tube to the output of another tube object.\n\n\n Examples:\n\n >>> def p(x): print x\n >>> def recvone(n, data=['data']):\n ... while data: return data.pop()\n ... raise EOFError\n >>> a = tube()\n >>> b = tube()\n >>> a.recv_raw = recvone\n >>> b.send_raw = p\n >>> a.connected_raw = lambda d: True\n >>> b.connected_raw = lambda d: True\n >>> a.shutdown = lambda d: True\n >>> b.shutdown = lambda d: True\n >>> import time\n >>> _=(b.connect_input(a), time.sleep(0.1))\n data\n \"\"\"\n\n def pump():\n import sys as _sys\n while self.timeout:\n if not (self.connected('send') and other.connected('recv')):\n break\n\n try:\n data = other.recv(timeout = 0.05)\n except EOFError:\n break\n\n if not _sys:\n return\n\n if not data:\n continue\n\n try:\n self.send(data)\n except EOFError:\n break\n\n if not _sys:\n return\n\n self.shutdown('send')\n other.shutdown('recv')\n\n t = context.thread(target = pump)\n t.daemon = True\n t.start()\n\n def connect_output(self, other):\n \"\"\"connect_output(other)\n\n Connects the output of this tube to the input of another tube object.\n\n Examples:\n\n >>> def p(x): print x\n >>> def recvone(n, data=['data']):\n ... while data: return data.pop()\n ... raise EOFError\n >>> a = tube()\n >>> b = tube()\n >>> a.recv_raw = recvone\n >>> b.send_raw = p\n >>> a.connected_raw = lambda d: True\n >>> b.connected_raw = lambda d: True\n >>> a.shutdown = lambda d: True\n >>> b.shutdown = lambda d: True\n >>> _=(a.connect_output(b), time.sleep(0.1))\n data\n \"\"\"\n\n other.connect_input(self)\n\n def connect_both(self, other):\n \"\"\"connect_both(other)\n\n Connects the both ends of this tube object with another tube object.\"\"\"\n\n self.connect_input(other)\n self.connect_output(other)\n\n def spawn_process(self, *args, **kwargs):\n \"\"\"Spawns a new process having this tube as stdin, stdout and stderr.\n\n Takes the same arguments as :class:`subprocess.Popen`.\"\"\"\n\n return subprocess.Popen(\n *args,\n stdin = self.fileno(),\n stdout = self.fileno(),\n stderr = self.fileno(),\n **kwargs\n )\n\n def __lshift__(self, other):\n \"\"\"\n Shorthand for connecting multiple tubes.\n\n See :meth:`connect_input` for more information.\n\n Examples:\n\n The following are equivalent ::\n\n tube_a >> tube.b\n tube_a.connect_input(tube_b)\n\n This is useful when chaining multiple tubes ::\n\n tube_a >> tube_b >> tube_a\n tube_a.connect_input(tube_b)\n tube_b.connect_input(tube_a)\n \"\"\"\n self.connect_input(other)\n return other\n\n def __rshift__(self, other):\n \"\"\"\n Inverse of the ``<<`` operator. See :meth:`__lshift__`.\n\n See :meth:`connect_input` for more information.\n \"\"\"\n self.connect_output(other)\n return other\n\n def __ne__(self, other):\n \"\"\"\n Shorthand for connecting tubes to eachother.\n\n The following are equivalent ::\n\n a >> b >> a\n a <> b\n\n See :meth:`connect_input` for more information.\n \"\"\"\n self << other << self\n\n def wait_for_close(self):\n \"\"\"Waits until the tube is closed.\"\"\"\n\n while self.connected():\n time.sleep(0.05)\n\n def can_recv(self, timeout = 0):\n \"\"\"can_recv(timeout = 0) -> bool\n\n Returns True, if there is data available within `timeout` seconds.\n\n Examples:\n\n >>> import time\n >>> t = tube()\n >>> t.can_recv_raw = lambda *a: False\n >>> t.can_recv()\n False\n >>> _=t.unrecv('data')\n >>> t.can_recv()\n True\n >>> _=t.recv()\n >>> t.can_recv()\n False\n \"\"\"\n\n return bool(self.buffer or self.can_recv_raw(timeout))\n\n def settimeout(self, timeout):\n \"\"\"settimeout(timeout)\n\n Set the timeout for receiving operations. If the string \"default\"\n is given, then :data:`context.timeout` will be used. If None is given,\n then there will be no timeout.\n\n Examples:\n\n >>> t = tube()\n >>> t.settimeout_raw = lambda t: None\n >>> t.settimeout(3)\n >>> t.timeout == 3\n True\n \"\"\"\n\n self.timeout = timeout\n self.settimeout_raw(self.timeout)\n\n\n shutdown_directions = {\n 'in': 'recv',\n 'read': 'recv',\n 'recv': 'recv',\n 'out': 'send',\n 'write': 'send',\n 'send': 'send',\n }\n\n connected_directions = shutdown_directions.copy()\n connected_directions['any'] = 'any'\n\n def shutdown(self, direction = \"send\"):\n \"\"\"shutdown(direction = \"send\")\n\n Closes the tube for futher reading or writing depending on `direction`.\n\n Args:\n direction(str): Which direction to close; \"in\", \"read\" or \"recv\"\n closes the tube in the ingoing direction, \"out\", \"write\" or \"send\"\n closes it in the outgoing direction.\n\n Returns:\n :const:`None`\n\n Examples:\n\n >>> def p(x): print x\n >>> t = tube()\n >>> t.shutdown_raw = p\n >>> _=map(t.shutdown, ('in', 'read', 'recv', 'out', 'write', 'send'))\n recv\n recv\n recv\n send\n send\n send\n >>> t.shutdown('bad_value') #doctest: +ELLIPSIS\n Traceback (most recent call last):\n ...\n KeyError: \"direction must be in ['in', 'out', 'read', 'recv', 'send', 'write']\"\n \"\"\"\n try:\n direction = self.shutdown_directions[direction]\n except KeyError:\n raise KeyError('direction must be in %r' % sorted(self.shutdown_directions))\n else:\n self.shutdown_raw(self.shutdown_directions[direction])\n\n def connected(self, direction = 'any'):\n \"\"\"connected(direction = 'any') -> bool\n\n Returns True if the tube is connected in the specified direction.\n\n Args:\n direction(str): Can be the string 'any', 'in', 'read', 'recv',\n 'out', 'write', 'send'.\n\n Doctest:\n\n >>> def p(x): print x\n >>> t = tube()\n >>> t.connected_raw = p\n >>> _=map(t.connected, ('any', 'in', 'read', 'recv', 'out', 'write', 'send'))\n any\n recv\n recv\n recv\n send\n send\n send\n >>> t.connected('bad_value') #doctest: +ELLIPSIS\n Traceback (most recent call last):\n ...\n KeyError: \"direction must be in ['any', 'in', 'out', 'read', 'recv', 'send', 'write']\"\n \"\"\"\n try:\n direction = self.connected_directions[direction]\n except KeyError:\n raise KeyError('direction must be in %r' % sorted(self.connected_directions))\n else:\n return self.connected_raw(direction)\n\n def __enter__(self):\n \"\"\"Permit use of 'with' to control scoping and closing sessions.\n\n Examples:\n\n .. doctest::\n\n >>> t = tube()\n >>> def p(x): print x\n >>> t.close = lambda: p(\"Closed!\")\n >>> with t: pass\n Closed!\n \"\"\"\n return self\n\n def __exit__(self, type, value, traceback):\n \"\"\"Handles closing for 'with' statement\n\n See :meth:`__enter__`\n \"\"\"\n self.close()\n\n # The minimal interface to be implemented by a child\n def recv_raw(self, numb):\n \"\"\"recv_raw(numb) -> str\n\n Should not be called directly. Receives data without using the buffer\n on the object.\n\n Unless there is a timeout or closed connection, this should always\n return data. In case of a timeout, it should return None, in case\n of a closed connection it should raise an ``exceptions.EOFError``.\n \"\"\"\n\n raise EOFError('Not implemented')\n\n def send_raw(self, data):\n \"\"\"send_raw(data)\n\n Should not be called directly. Sends data to the tube.\n\n Should return ``exceptions.EOFError``, if it is unable to send any\n more, because of a close tube.\n \"\"\"\n\n raise EOFError('Not implemented')\n\n def settimeout_raw(self, timeout):\n \"\"\"settimeout_raw(timeout)\n\n Should not be called directly. Sets the timeout for\n the tube.\n \"\"\"\n\n raise NotImplementedError()\n\n def timeout_change(self):\n \"\"\"\n Informs the raw layer of the tube that the timeout has changed.\n\n Should not be called directly.\n\n Inherited from :class:`Timeout`.\n \"\"\"\n try:\n self.settimeout_raw(self.timeout)\n except NotImplementedError:\n pass\n\n def can_recv_raw(self, timeout):\n \"\"\"can_recv_raw(timeout) -> bool\n\n Should not be called directly. Returns True, if\n there is data available within the timeout, but\n ignores the buffer on the object.\n \"\"\"\n\n raise NotImplementedError()\n\n def connected_raw(self, direction):\n \"\"\"connected(direction = 'any') -> bool\n\n Should not be called directly. Returns True iff the\n tube is connected in the given direction.\n \"\"\"\n\n raise NotImplementedError()\n\n def close(self):\n \"\"\"close()\n\n Closes the tube.\n \"\"\"\n pass\n # Ideally we could:\n # raise NotImplementedError()\n # But this causes issues with the unit tests.\n\n def fileno(self):\n \"\"\"fileno() -> int\n\n Returns the file number used for reading.\n \"\"\"\n\n raise NotImplementedError()\n\n def shutdown_raw(self, direction):\n \"\"\"shutdown_raw(direction)\n\n Should not be called directly. Closes the tube for further reading or\n writing.\n \"\"\"\n\n raise NotImplementedError()\n", "path": "pwnlib/tubes/tube.py" } ]
diff --git a/pwnlib/tubes/tube.py b/pwnlib/tubes/tube.py index c5b597049..9affd498a 100644 --- a/pwnlib/tubes/tube.py +++ b/pwnlib/tubes/tube.py @@ -594,7 +594,10 @@ def recvrepeat(self, timeout = None): 'd' """ - while self._fillbuffer(timeout=timeout): + try: + while self._fillbuffer(timeout=timeout): + pass + except EOFError: pass return self.buffer.get()
conan-io__conan-6333
[bug] New warning in python 3.8 makes some tests fail (line buffering isn't supported in binary mode) <!-- Please don't forget to update the issue title. Include all applicable information to help us reproduce your problem. To help us debug your issue please explain: --> ### Environment Details (include every applicable attribute) * Operating System+version: Arch Linux * Compiler+version: - * Conan version: develop * Python version: python 3.8 ### Steps to reproduce (Include if Applicable) Run some unit tests, for example: ``` nosetests conans.test.functional.settings ``` ### Logs (Executed commands with output) (Include/Attach if Applicable) ``` $ nosetests conans.test.functional.settings ............F............F.F............. ====================================================================== FAIL: test_only_cppstd (conan.conans.test.functional.settings.cppstd.compiler_cppstd_test.UseCompilerCppStdSettingTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/siu/src/extern/conan/conans/test/functional/settings/cppstd/compiler_cppstd_test.py", line 140, in test_only_cppstd self.t.run("info . -s cppstd=14") File "/usr/lib/python3.8/contextlib.py", line 120, in __exit__ next(self.gen) File "/home/siu/src/extern/conan/conans/test/utils/deprecation.py", line 13, in catch_deprecation_warning test_suite.assertEqual(len(w), n) AssertionError: 2 != 1 ====================================================================== FAIL: gcc_8_std_20_test (conan.conans.test.functional.settings.cppstd_test.StdCppTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/siu/src/extern/conan/conans/test/functional/settings/cppstd_test.py", line 47, in gcc_8_std_20_test client.run('create . user/testing -s compiler="gcc" ' File "/usr/lib/python3.8/contextlib.py", line 120, in __exit__ next(self.gen) File "/home/siu/src/extern/conan/conans/test/utils/deprecation.py", line 13, in catch_deprecation_warning test_suite.assertEqual(len(w), n) AssertionError: 2 != 1 ====================================================================== FAIL: use_wrong_setting_for_compiler_test (conan.conans.test.functional.settings.cppstd_test.StdCppTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/siu/src/extern/conan/conans/test/functional/settings/cppstd_test.py", line 23, in use_wrong_setting_for_compiler_test client.run('create . user/testing -s compiler="gcc" ' File "/usr/lib/python3.8/contextlib.py", line 120, in __exit__ next(self.gen) File "/home/siu/src/extern/conan/conans/test/utils/deprecation.py", line 13, in catch_deprecation_warning test_suite.assertEqual(len(w), n) AssertionError: 2 != 1 ---------------------------------------------------------------------- Ran 41 tests in 4.690s FAILED (failures=3) ``` <!-- Your log content should be related to the bug description, it can be: - Conan command output - Server output (Artifactory, conan_server) -->
[ { "content": "import os\nimport platform\nimport re\nfrom subprocess import PIPE, Popen, STDOUT\n\nfrom conans.client.output import Color\nfrom conans.client.tools import detected_os, OSInfo\nfrom conans.client.tools.win import latest_visual_studio_version_installed\nfrom conans.model.version import Version\n\n\ndef _execute(command):\n proc = Popen(command, shell=True, bufsize=1, stdout=PIPE, stderr=STDOUT)\n\n output_buffer = []\n while True:\n line = proc.stdout.readline()\n if not line:\n break\n # output.write(line)\n output_buffer.append(str(line))\n\n proc.communicate()\n return proc.returncode, \"\".join(output_buffer)\n\n\ndef _gcc_compiler(output, compiler_exe=\"gcc\"):\n\n try:\n if platform.system() == \"Darwin\":\n # In Mac OS X check if gcc is a fronted using apple-clang\n _, out = _execute(\"%s --version\" % compiler_exe)\n out = out.lower()\n if \"clang\" in out:\n return None\n\n ret, out = _execute('%s -dumpversion' % compiler_exe)\n if ret != 0:\n return None\n compiler = \"gcc\"\n installed_version = re.search(\"([0-9](\\.[0-9])?)\", out).group()\n # Since GCC 7.1, -dumpversion return the major version number\n # only (\"7\"). We must use -dumpfullversion to get the full version\n # number (\"7.1.1\").\n if installed_version:\n output.success(\"Found %s %s\" % (compiler, installed_version))\n return compiler, installed_version\n except Exception:\n return None\n\n\ndef _clang_compiler(output, compiler_exe=\"clang\"):\n try:\n ret, out = _execute('%s --version' % compiler_exe)\n if ret != 0:\n return None\n if \"Apple\" in out:\n compiler = \"apple-clang\"\n elif \"clang version\" in out:\n compiler = \"clang\"\n installed_version = re.search(\"([0-9]+\\.[0-9])\", out).group()\n if installed_version:\n output.success(\"Found %s %s\" % (compiler, installed_version))\n return compiler, installed_version\n except Exception:\n return None\n\n\ndef _sun_cc_compiler(output, compiler_exe=\"cc\"):\n try:\n _, out = _execute('%s -V' % compiler_exe)\n compiler = \"sun-cc\"\n installed_version = re.search(\"([0-9]+\\.[0-9]+)\", out).group()\n if installed_version:\n output.success(\"Found %s %s\" % (compiler, installed_version))\n return compiler, installed_version\n except Exception:\n return None\n\n\ndef _get_default_compiler(output):\n cc = os.environ.get(\"CC\", \"\")\n cxx = os.environ.get(\"CXX\", \"\")\n if cc or cxx: # Env defined, use them\n output.info(\"CC and CXX: %s, %s \" % (cc or \"None\", cxx or \"None\"))\n command = cc or cxx\n if \"gcc\" in command:\n gcc = _gcc_compiler(output, command)\n if platform.system() == \"Darwin\" and gcc is None:\n output.error(\n \"%s detected as a frontend using apple-clang. Compiler not supported\" % command\n )\n return gcc\n if \"clang\" in command.lower():\n return _clang_compiler(output, command)\n if platform.system() == \"SunOS\" and command.lower() == \"cc\":\n return _sun_cc_compiler(output, command)\n # I am not able to find its version\n output.error(\"Not able to automatically detect '%s' version\" % command)\n return None\n\n if detected_os() == \"Windows\":\n version = latest_visual_studio_version_installed(output)\n vs = ('Visual Studio', version) if version else None\n gcc = _gcc_compiler(output)\n clang = _clang_compiler(output)\n if platform.system() == \"SunOS\":\n sun_cc = _sun_cc_compiler(output)\n\n if detected_os() == \"Windows\":\n return vs or gcc or clang\n elif platform.system() == \"Darwin\":\n return clang or gcc\n elif platform.system() == \"SunOS\":\n return sun_cc or gcc or clang\n else:\n return gcc or clang\n\n\ndef _get_profile_compiler_version(compiler, version, output):\n major = version.split(\".\")[0]\n if compiler == \"clang\" and int(major) >= 8:\n output.info(\"clang>=8, using the major as version\")\n return major\n elif compiler == \"gcc\" and int(major) >= 5:\n output.info(\"gcc>=5, using the major as version\")\n return major\n return version\n\n\ndef _detect_compiler_version(result, output, profile_path):\n try:\n compiler, version = _get_default_compiler(output)\n except Exception:\n compiler, version = None, None\n if not compiler or not version:\n output.error(\"Unable to find a working compiler\")\n else:\n result.append((\"compiler\", compiler))\n result.append((\"compiler.version\",\n _get_profile_compiler_version(compiler, version, output)))\n if compiler == \"apple-clang\":\n result.append((\"compiler.libcxx\", \"libc++\"))\n elif compiler == \"gcc\":\n result.append((\"compiler.libcxx\", \"libstdc++\"))\n if Version(version) >= Version(\"5.1\"):\n profile_name = os.path.basename(profile_path)\n msg = \"\"\"\nConan detected a GCC version > 5 but has adjusted the 'compiler.libcxx' setting to\n'libstdc++' for backwards compatibility.\nYour compiler is likely using the new CXX11 ABI by default (libstdc++11).\n\nIf you want Conan to use the new ABI for the {profile} profile, run:\n\n $ conan profile update settings.compiler.libcxx=libstdc++11 {profile}\n\nOr edit '{profile_path}' and set compiler.libcxx=libstdc++11\n\"\"\".format(profile=profile_name, profile_path=profile_path)\n output.writeln(\"\\n************************* WARNING: GCC OLD ABI COMPATIBILITY \"\n \"***********************\\n %s\\n************************************\"\n \"************************************************\\n\\n\\n\" % msg,\n Color.BRIGHT_RED)\n elif compiler == \"cc\":\n if platform.system() == \"SunOS\":\n result.append((\"compiler.libstdcxx\", \"libstdcxx4\"))\n elif compiler == \"clang\":\n if platform.system() == \"FreeBSD\":\n result.append((\"compiler.libcxx\", \"libc++\"))\n else:\n result.append((\"compiler.libcxx\", \"libstdc++\"))\n elif compiler == \"sun-cc\":\n result.append((\"compiler.libcxx\", \"libCstd\"))\n\n\ndef _detect_os_arch(result, output):\n architectures = {'i386': 'x86',\n 'i686': 'x86',\n 'i86pc': 'x86',\n 'amd64': 'x86_64',\n 'aarch64': 'armv8',\n 'sun4v': 'sparc'}\n the_os = detected_os()\n result.append((\"os\", the_os))\n result.append((\"os_build\", the_os))\n\n platform_machine = platform.machine().lower()\n if platform_machine:\n arch = architectures.get(platform_machine, platform_machine)\n if arch.startswith('arm'):\n for a in (\"armv6\", \"armv7hf\", \"armv7\", \"armv8\"):\n if arch.startswith(a):\n arch = a\n break\n else:\n output.error(\"Your ARM '%s' architecture is probably not defined in settings.yml\\n\"\n \"Please check your conan.conf and settings.yml files\" % arch)\n elif OSInfo().is_aix:\n arch = OSInfo.get_aix_architecture() or arch\n\n result.append((\"arch\", arch))\n result.append((\"arch_build\", arch))\n\n\ndef detect_defaults_settings(output, profile_path):\n \"\"\" try to deduce current machine values without any constraints at all\n :param output: Conan Output instance\n :param profile_path: Conan profile file path\n :return: A list with default settings\n \"\"\"\n result = []\n _detect_os_arch(result, output)\n _detect_compiler_version(result, output, profile_path)\n result.append((\"build_type\", \"Release\"))\n\n return result\n", "path": "conans/client/conf/detect.py" } ]
[ { "content": "import os\nimport platform\nimport re\nfrom subprocess import PIPE, Popen, STDOUT\n\nfrom conans.client.output import Color\nfrom conans.client.tools import detected_os, OSInfo\nfrom conans.client.tools.win import latest_visual_studio_version_installed\nfrom conans.model.version import Version\n\n\ndef _execute(command):\n proc = Popen(command, shell=True, bufsize=1, universal_newlines=True, stdout=PIPE,\n stderr=STDOUT)\n\n output_buffer = []\n while True:\n line = proc.stdout.readline()\n if not line:\n break\n # output.write(line)\n output_buffer.append(str(line))\n\n proc.communicate()\n return proc.returncode, \"\".join(output_buffer)\n\n\ndef _gcc_compiler(output, compiler_exe=\"gcc\"):\n\n try:\n if platform.system() == \"Darwin\":\n # In Mac OS X check if gcc is a fronted using apple-clang\n _, out = _execute(\"%s --version\" % compiler_exe)\n out = out.lower()\n if \"clang\" in out:\n return None\n\n ret, out = _execute('%s -dumpversion' % compiler_exe)\n if ret != 0:\n return None\n compiler = \"gcc\"\n installed_version = re.search(\"([0-9](\\.[0-9])?)\", out).group()\n # Since GCC 7.1, -dumpversion return the major version number\n # only (\"7\"). We must use -dumpfullversion to get the full version\n # number (\"7.1.1\").\n if installed_version:\n output.success(\"Found %s %s\" % (compiler, installed_version))\n return compiler, installed_version\n except Exception:\n return None\n\n\ndef _clang_compiler(output, compiler_exe=\"clang\"):\n try:\n ret, out = _execute('%s --version' % compiler_exe)\n if ret != 0:\n return None\n if \"Apple\" in out:\n compiler = \"apple-clang\"\n elif \"clang version\" in out:\n compiler = \"clang\"\n installed_version = re.search(\"([0-9]+\\.[0-9])\", out).group()\n if installed_version:\n output.success(\"Found %s %s\" % (compiler, installed_version))\n return compiler, installed_version\n except Exception:\n return None\n\n\ndef _sun_cc_compiler(output, compiler_exe=\"cc\"):\n try:\n _, out = _execute('%s -V' % compiler_exe)\n compiler = \"sun-cc\"\n installed_version = re.search(\"([0-9]+\\.[0-9]+)\", out).group()\n if installed_version:\n output.success(\"Found %s %s\" % (compiler, installed_version))\n return compiler, installed_version\n except Exception:\n return None\n\n\ndef _get_default_compiler(output):\n cc = os.environ.get(\"CC\", \"\")\n cxx = os.environ.get(\"CXX\", \"\")\n if cc or cxx: # Env defined, use them\n output.info(\"CC and CXX: %s, %s \" % (cc or \"None\", cxx or \"None\"))\n command = cc or cxx\n if \"gcc\" in command:\n gcc = _gcc_compiler(output, command)\n if platform.system() == \"Darwin\" and gcc is None:\n output.error(\n \"%s detected as a frontend using apple-clang. Compiler not supported\" % command\n )\n return gcc\n if \"clang\" in command.lower():\n return _clang_compiler(output, command)\n if platform.system() == \"SunOS\" and command.lower() == \"cc\":\n return _sun_cc_compiler(output, command)\n # I am not able to find its version\n output.error(\"Not able to automatically detect '%s' version\" % command)\n return None\n\n if detected_os() == \"Windows\":\n version = latest_visual_studio_version_installed(output)\n vs = ('Visual Studio', version) if version else None\n gcc = _gcc_compiler(output)\n clang = _clang_compiler(output)\n if platform.system() == \"SunOS\":\n sun_cc = _sun_cc_compiler(output)\n\n if detected_os() == \"Windows\":\n return vs or gcc or clang\n elif platform.system() == \"Darwin\":\n return clang or gcc\n elif platform.system() == \"SunOS\":\n return sun_cc or gcc or clang\n else:\n return gcc or clang\n\n\ndef _get_profile_compiler_version(compiler, version, output):\n major = version.split(\".\")[0]\n if compiler == \"clang\" and int(major) >= 8:\n output.info(\"clang>=8, using the major as version\")\n return major\n elif compiler == \"gcc\" and int(major) >= 5:\n output.info(\"gcc>=5, using the major as version\")\n return major\n return version\n\n\ndef _detect_compiler_version(result, output, profile_path):\n try:\n compiler, version = _get_default_compiler(output)\n except Exception:\n compiler, version = None, None\n if not compiler or not version:\n output.error(\"Unable to find a working compiler\")\n else:\n result.append((\"compiler\", compiler))\n result.append((\"compiler.version\",\n _get_profile_compiler_version(compiler, version, output)))\n if compiler == \"apple-clang\":\n result.append((\"compiler.libcxx\", \"libc++\"))\n elif compiler == \"gcc\":\n result.append((\"compiler.libcxx\", \"libstdc++\"))\n if Version(version) >= Version(\"5.1\"):\n profile_name = os.path.basename(profile_path)\n msg = \"\"\"\nConan detected a GCC version > 5 but has adjusted the 'compiler.libcxx' setting to\n'libstdc++' for backwards compatibility.\nYour compiler is likely using the new CXX11 ABI by default (libstdc++11).\n\nIf you want Conan to use the new ABI for the {profile} profile, run:\n\n $ conan profile update settings.compiler.libcxx=libstdc++11 {profile}\n\nOr edit '{profile_path}' and set compiler.libcxx=libstdc++11\n\"\"\".format(profile=profile_name, profile_path=profile_path)\n output.writeln(\"\\n************************* WARNING: GCC OLD ABI COMPATIBILITY \"\n \"***********************\\n %s\\n************************************\"\n \"************************************************\\n\\n\\n\" % msg,\n Color.BRIGHT_RED)\n elif compiler == \"cc\":\n if platform.system() == \"SunOS\":\n result.append((\"compiler.libstdcxx\", \"libstdcxx4\"))\n elif compiler == \"clang\":\n if platform.system() == \"FreeBSD\":\n result.append((\"compiler.libcxx\", \"libc++\"))\n else:\n result.append((\"compiler.libcxx\", \"libstdc++\"))\n elif compiler == \"sun-cc\":\n result.append((\"compiler.libcxx\", \"libCstd\"))\n\n\ndef _detect_os_arch(result, output):\n architectures = {'i386': 'x86',\n 'i686': 'x86',\n 'i86pc': 'x86',\n 'amd64': 'x86_64',\n 'aarch64': 'armv8',\n 'sun4v': 'sparc'}\n the_os = detected_os()\n result.append((\"os\", the_os))\n result.append((\"os_build\", the_os))\n\n platform_machine = platform.machine().lower()\n if platform_machine:\n arch = architectures.get(platform_machine, platform_machine)\n if arch.startswith('arm'):\n for a in (\"armv6\", \"armv7hf\", \"armv7\", \"armv8\"):\n if arch.startswith(a):\n arch = a\n break\n else:\n output.error(\"Your ARM '%s' architecture is probably not defined in settings.yml\\n\"\n \"Please check your conan.conf and settings.yml files\" % arch)\n elif OSInfo().is_aix:\n arch = OSInfo.get_aix_architecture() or arch\n\n result.append((\"arch\", arch))\n result.append((\"arch_build\", arch))\n\n\ndef detect_defaults_settings(output, profile_path):\n \"\"\" try to deduce current machine values without any constraints at all\n :param output: Conan Output instance\n :param profile_path: Conan profile file path\n :return: A list with default settings\n \"\"\"\n result = []\n _detect_os_arch(result, output)\n _detect_compiler_version(result, output, profile_path)\n result.append((\"build_type\", \"Release\"))\n\n return result\n", "path": "conans/client/conf/detect.py" } ]
diff --git a/conans/client/conf/detect.py b/conans/client/conf/detect.py index 1b3310d6e2f..de0b8601f72 100644 --- a/conans/client/conf/detect.py +++ b/conans/client/conf/detect.py @@ -10,7 +10,8 @@ def _execute(command): - proc = Popen(command, shell=True, bufsize=1, stdout=PIPE, stderr=STDOUT) + proc = Popen(command, shell=True, bufsize=1, universal_newlines=True, stdout=PIPE, + stderr=STDOUT) output_buffer = [] while True:
dotkom__onlineweb4-973
Add appKom to list of committees in dashboard view AppKom is missing as a committee in the dashboard view. Users can't add a position in that committee. ![screen shot 2014-09-30 at 02 33 48](https://cloud.githubusercontent.com/assets/582580/4451680/c20b1420-4839-11e4-8dfc-b52969d8481c.png)
[ { "content": "# -*- coding: utf-8 -*-\n\nimport datetime\nimport socket\nimport urllib\nimport hashlib\n\nfrom django.conf import settings\nfrom django.contrib.auth.models import AbstractUser\nfrom django.db import models\nfrom django.utils.translation import ugettext as _\nfrom django.utils import timezone\nfrom django.utils.html import strip_tags\n\nimport watson\nimport reversion\n\n# If this list is changed, remember to check that the year property on\n# OnlineUser is still correct!\nFIELD_OF_STUDY_CHOICES = [\n (0, _(u'Gjest')),\n (1, _(u'Bachelor i Informatikk (BIT)')),\n # master degrees take up the interval [10,30]\n (10, _(u'Software (SW)')),\n (11, _(u'Informasjonsforvaltning (DIF)')),\n (12, _(u'Komplekse Datasystemer (KDS)')),\n (13, _(u'Spillteknologi (SPT)')),\n (14, _(u'Intelligente Systemer (IRS)')),\n (15, _(u'Helseinformatikk (MSMEDTEK)')),\n (30, _(u'Annen mastergrad')),\n (80, _(u'PhD')),\n (90, _(u'International')),\n (100, _(u'Annet Onlinemedlem')),\n]\n\nGENDER_CHOICES = [\n (\"male\", _(u\"mann\")),\n (\"female\", _(u\"kvinne\")),\n]\n\nCOMMITTEES = [\n ('hs', _(u'Hovedstyret')),\n ('arrkom', _(u'Arrangementskomiteen')),\n ('bankom', _(u'Bank- og økonomikomiteen')),\n ('bedkom', _(u'Bedriftskomiteen')),\n ('dotkom', _(u'Drifts- og utviklingskomiteen')),\n ('ekskom', _(u'Ekskursjonskomiteen')),\n ('fagkom', _(u'Fag- og kurskomiteen')),\n ('jubkom', _(u'Jubileumskomiteen')),\n ('pangkom', _(u'Pensjonistkomiteen')),\n ('prokom', _(u'Profil-og aviskomiteen')),\n ('trikom', _(u'Trivselskomiteen')),\n ('velkom', _(u'Velkomstkomiteen')),\n]\n\nPOSITIONS = [\n ('medlem', _(u'Medlem')),\n ('leder', _(u'Leder')),\n ('nestleder', _(u'Nestleder')),\n ('okoans', _(u'Økonomiansvarlig')),\n]\n\nclass OnlineUser(AbstractUser):\n\n IMAGE_FOLDER = \"images/profiles\"\n IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.gif', '.png']\n \n # Online related fields\n field_of_study = models.SmallIntegerField(_(u\"studieretning\"), choices=FIELD_OF_STUDY_CHOICES, default=0)\n started_date = models.DateField(_(u\"startet studie\"), default=timezone.now().date())\n compiled = models.BooleanField(_(u\"kompilert\"), default=False)\n\n # Infomail\n infomail = models.BooleanField(_(u\"vil ha infomail\"), default=False)\n\n # Address\n phone_number = models.CharField(_(u\"telefonnummer\"), max_length=20, blank=True, null=True)\n address = models.CharField(_(u\"adresse\"), max_length=100, blank=True, null=True)\n zip_code = models.CharField(_(u\"postnummer\"), max_length=4, blank=True, null=True)\n\n # Other\n allergies = models.TextField(_(u\"allergier\"), blank=True, null=True)\n mark_rules = models.BooleanField(_(u\"godtatt prikkeregler\"), default=False)\n rfid = models.CharField(_(u\"RFID\"), max_length=50, blank=True, null=True)\n nickname = models.CharField(_(u\"nickname\"), max_length=50, blank=True, null=True)\n website = models.URLField(_(u\"hjemmeside\"), blank=True, null=True)\n gender = models.CharField(_(u\"kjønn\"), max_length=10, choices=GENDER_CHOICES, default=\"male\")\n\n # NTNU credentials\n ntnu_username = models.CharField(_(u\"NTNU-brukernavn\"), max_length=10, blank=True, null=True, unique=True)\n\n # TODO checkbox for forwarding of @online.ntnu.no mail\n\n @property\n def is_member(self):\n \"\"\"\n Returns true if the User object is associated with Online.\n \"\"\"\n if self.ntnu_username:\n if AllowedUsername.objects.filter(username=self.ntnu_username.lower()).filter(expiration_date__gte=timezone.now()).count() > 0:\n return True\n return False\n\n def get_full_name(self):\n \"\"\"\n Returns the first_name plus the last_name, with a space in between.\n \"\"\"\n full_name = u'%s %s' % (self.first_name, self.last_name)\n return full_name.strip()\n\n def get_email(self):\n email = self.get_emails().filter(primary = True)\n if email:\n return email[0]\n return None\n\n def get_emails(self):\n return Email.objects.all().filter(user = self)\n\n def in_group(self, group_name):\n return reduce(lambda x,y: x or y.name == group_name, self.groups.all(), False)\n\n @property\n def year(self):\n today = timezone.now().date()\n started = self.started_date\n\n # We say that a year is 360 days incase we are a bit slower to\n # add users one year.\n year = ((today - started).days / 360) + 1\n\n if self.field_of_study == 0 or self.field_of_study == 100: # others\n return 0\n # dont return a bachelor student as 4th or 5th grade\n elif self.field_of_study == 1: # bachelor\n if year > 3:\n return 3\n return year\n elif 10 <= self.field_of_study <= 30: # 10-29 is considered master\n if year >= 2:\n return 5\n return 4\n elif self.field_of_study == 80: # phd\n return year + 5\n elif self.field_of_study == 90: # international\n if year == 1:\n return 1\n return 4\n # If user's field of study is not matched by any of these tests, return -1\n else:\n return -1\n\n @models.permalink\n def get_absolute_url(self):\n return ('profiles_view', None, {'username': self.username})\n\n def __unicode__(self):\n return self.get_full_name()\n\n def save(self, *args, **kwargs):\n if self.ntnu_username == \"\":\n self.ntnu_username = None\n self.username = self.username.lower()\n super(OnlineUser, self).save(*args, **kwargs)\n\n def serializable_object(self):\n if self.privacy.expose_phone_number:\n phone = self.phone_number\n else:\n phone = \"Ikke tilgjengelig\"\n\n return {\n 'id': self.id,\n 'phone': strip_tags(phone),\n 'username': strip_tags(self.username),\n 'value': strip_tags(self.get_full_name()), # typeahead\n 'name': strip_tags(self.get_full_name()),\n 'image': self.get_image_url(75),\n }\n\n def get_image_url(self, size=50):\n default = \"%s%s_%s.png\" % (settings.BASE_URL,\n settings.DEFAULT_PROFILE_PICTURE_PREFIX, self.gender)\n\n gravatar_url = \"https://www.gravatar.com/avatar/\" + hashlib.md5(self.email).hexdigest() + \"?\"\n gravatar_url += urllib.urlencode({'d': default, 's':str(size)})\n return gravatar_url\n\n class Meta:\n ordering = ['first_name', 'last_name']\n verbose_name = _(u\"brukerprofil\")\n verbose_name_plural = _(u\"brukerprofiler\")\n\n\nreversion.register(OnlineUser)\n\n\nclass Email(models.Model):\n user = models.ForeignKey(OnlineUser, related_name=\"email_user\")\n email = models.EmailField(_(u\"epostadresse\"), unique=True)\n primary = models.BooleanField(_(u\"primær\"), default=False)\n verified = models.BooleanField(_(u\"verifisert\"), default=False, editable=False)\n\n def save(self, *args, **kwargs):\n primary_email = self.user.get_email()\n if not primary_email:\n self.primary = True\n elif primary_email.email != self.email:\n self.primary = False\n self.email = self.email.lower()\n if self.primary:\n self.user.email = self.email\n self.user.save()\n super(Email, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return self.email\n\n class Meta:\n verbose_name = _(u\"epostadresse\")\n verbose_name_plural = _(u\"epostadresser\")\n\n\nreversion.register(Email)\n\n\nclass RegisterToken(models.Model):\n user = models.ForeignKey(OnlineUser, related_name=\"register_user\")\n email = models.EmailField(_(u\"epost\"), max_length=254)\n token = models.CharField(_(u\"token\"), max_length=32)\n created = models.DateTimeField(_(u\"opprettet dato\"), editable=False, auto_now_add=True)\n\n @property\n def is_valid(self):\n valid_period = datetime.timedelta(days=1)\n now = timezone.now()\n return now < self.created + valid_period \n\n\nreversion.register(RegisterToken)\n\n\nclass AllowedUsername(models.Model):\n \"\"\"\n Holds usernames that are considered valid members of Online and the time they expire.\n \"\"\"\n username = models.CharField(_(u\"NTNU-brukernavn\"), max_length=10, unique=True)\n registered = models.DateField(_(u\"registrert\"))\n note = models.CharField(_(u\"notat\"), max_length=100)\n description = models.TextField(_(u\"beskrivelse\"), blank=True, null=True)\n expiration_date = models.DateField(_(u\"utløpsdato\"))\n\n @property\n def is_active(self):\n return timezone.now().date() < self.expiration_date\n\n def save(self, *args, **kwargs):\n self.username = self.username.lower()\n super(AllowedUsername, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return self.username\n\n class Meta:\n verbose_name = _(u\"medlem\")\n verbose_name_plural = _(u\"medlemsregister\")\n ordering = (u\"username\",)\n\n\nreversion.register(AllowedUsername)\n\n\nclass Position(models.Model):\n \"\"\"\n Contains a users position in the organization from a given year\n \"\"\"\n period = models.CharField(_(u'periode'), max_length=9, default=\"2013-2014\", blank=False)\n committee = models.CharField(_(u\"komite\"), max_length=10, choices=COMMITTEES, default=\"hs\")\n position = models.CharField(_(u\"stilling\"), max_length=10, choices=POSITIONS, default=\"medlem\")\n user = models.ForeignKey(OnlineUser, related_name='positions', blank=False)\n\n @property\n def print_string(self):\n return '%s: %s(%s)' % (self.period, self.committee, self.position)\n\n def __unicode__(self):\n return self.print_string\n\n class Meta:\n verbose_name = _(u'posisjon')\n verbose_name_plural = _(u'posisjoner')\n ordering = ('user', 'period', )\n\n\nreversion.register(Position)\n\n\nclass SpecialPosition(models.Model):\n \"\"\"\n Special object to represent special positions that typically lasts for life.\n \"\"\"\n position = models.CharField(_(u'Posisjon'), max_length=50, blank=False)\n since_year = models.IntegerField(_(u'Medlem siden'), max_length=4, blank=False)\n user = models.ForeignKey(OnlineUser, related_name='special_positions', blank=False)\n\n def __unicode__(self):\n return '%s, %s' % (self.user.get_full_name(), self.position)\n\n class Meta:\n verbose_name = _(u'spesialposisjon')\n verbose_name_plural = _(u'spesialposisjoner')\n ordering = ('user', 'since_year',)\n\n\nreversion.register(SpecialPosition)\n\n\n# Register OnlineUser in watson index for searching\nwatson.register(OnlineUser, fields=('first_name', 'last_name', 'ntnu_username', 'nickname'))\n", "path": "apps/authentication/models.py" } ]
[ { "content": "# -*- coding: utf-8 -*-\n\nimport datetime\nimport socket\nimport urllib\nimport hashlib\n\nfrom django.conf import settings\nfrom django.contrib.auth.models import AbstractUser\nfrom django.db import models\nfrom django.utils.translation import ugettext as _\nfrom django.utils import timezone\nfrom django.utils.html import strip_tags\n\nimport watson\n\n\n# If this list is changed, remember to check that the year property on\n# OnlineUser is still correct!\nFIELD_OF_STUDY_CHOICES = [\n (0, _(u'Gjest')),\n (1, _(u'Bachelor i Informatikk (BIT)')),\n # master degrees take up the interval [10,30]\n (10, _(u'Software (SW)')),\n (11, _(u'Informasjonsforvaltning (DIF)')),\n (12, _(u'Komplekse Datasystemer (KDS)')),\n (13, _(u'Spillteknologi (SPT)')),\n (14, _(u'Intelligente Systemer (IRS)')),\n (15, _(u'Helseinformatikk (MSMEDTEK)')),\n (30, _(u'Annen mastergrad')),\n (80, _(u'PhD')),\n (90, _(u'International')),\n (100, _(u'Annet Onlinemedlem')),\n]\n\nGENDER_CHOICES = [\n (\"male\", _(u\"mann\")),\n (\"female\", _(u\"kvinne\")),\n]\n\nCOMMITTEES = [\n ('hs', _(u'Hovedstyret')),\n ('arrkom', _(u'Arrangementskomiteen')),\n ('bankom', _(u'Bank- og økonomikomiteen')),\n ('bedkom', _(u'Bedriftskomiteen')),\n ('dotkom', _(u'Drifts- og utviklingskomiteen')),\n ('ekskom', _(u'Ekskursjonskomiteen')),\n ('fagkom', _(u'Fag- og kurskomiteen')),\n ('jubkom', _(u'Jubileumskomiteen')),\n ('pangkom', _(u'Pensjonistkomiteen')),\n ('prokom', _(u'Profil-og aviskomiteen')),\n ('trikom', _(u'Trivselskomiteen')),\n ('velkom', _(u'Velkomstkomiteen')),\n ('appkom', _(u'Applikasjonskomiteen')),\n]\n\nPOSITIONS = [\n ('medlem', _(u'Medlem')),\n ('leder', _(u'Leder')),\n ('nestleder', _(u'Nestleder')),\n ('okoans', _(u'Økonomiansvarlig')),\n]\n\nclass OnlineUser(AbstractUser):\n\n IMAGE_FOLDER = \"images/profiles\"\n IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.gif', '.png']\n \n # Online related fields\n field_of_study = models.SmallIntegerField(_(u\"studieretning\"), choices=FIELD_OF_STUDY_CHOICES, default=0)\n started_date = models.DateField(_(u\"startet studie\"), default=timezone.now().date())\n compiled = models.BooleanField(_(u\"kompilert\"), default=False)\n\n # Email\n infomail = models.BooleanField(_(u\"vil ha infomail\"), default=True)\n\n # Address\n phone_number = models.CharField(_(u\"telefonnummer\"), max_length=20, blank=True, null=True)\n address = models.CharField(_(u\"adresse\"), max_length=100, blank=True, null=True)\n zip_code = models.CharField(_(u\"postnummer\"), max_length=4, blank=True, null=True)\n\n # Other\n allergies = models.TextField(_(u\"allergier\"), blank=True, null=True)\n mark_rules = models.BooleanField(_(u\"godtatt prikkeregler\"), default=False)\n rfid = models.CharField(_(u\"RFID\"), max_length=50, blank=True, null=True)\n nickname = models.CharField(_(u\"nickname\"), max_length=50, blank=True, null=True)\n website = models.URLField(_(u\"hjemmeside\"), blank=True, null=True)\n gender = models.CharField(_(u\"kjønn\"), max_length=10, choices=GENDER_CHOICES, default=\"male\")\n\n # NTNU credentials\n ntnu_username = models.CharField(_(u\"NTNU-brukernavn\"), max_length=10, blank=True, null=True, unique=True)\n\n # TODO checkbox for forwarding of @online.ntnu.no mail\n\n @property\n def is_member(self):\n \"\"\"\n Returns true if the User object is associated with Online.\n \"\"\"\n if self.ntnu_username:\n if AllowedUsername.objects.filter(username=self.ntnu_username.lower()).filter(expiration_date__gte=timezone.now()).count() > 0:\n return True\n return False\n\n def get_full_name(self):\n \"\"\"\n Returns the first_name plus the last_name, with a space in between.\n \"\"\"\n full_name = u'%s %s' % (self.first_name, self.last_name)\n return full_name.strip()\n\n def get_email(self):\n email = self.get_emails().filter(primary = True)\n if email:\n return email[0]\n return None\n\n def get_emails(self):\n return Email.objects.all().filter(user = self)\n\n def in_group(self, group_name):\n return reduce(lambda x,y: x or y.name == group_name, self.groups.all(), False)\n\n @property\n def year(self):\n today = timezone.now().date()\n started = self.started_date\n\n # We say that a year is 360 days incase we are a bit slower to\n # add users one year.\n year = ((today - started).days / 360) + 1\n\n if self.field_of_study == 0 or self.field_of_study == 100: # others\n return 0\n # dont return a bachelor student as 4th or 5th grade\n elif self.field_of_study == 1: # bachelor\n if year > 3:\n return 3\n return year\n elif 10 <= self.field_of_study <= 30: # 10-29 is considered master\n if year >= 2:\n return 5\n return 4\n elif self.field_of_study == 80: # phd\n return year + 5\n elif self.field_of_study == 90: # international\n if year == 1:\n return 1\n return 4\n # If user's field of study is not matched by any of these tests, return -1\n else:\n return -1\n\n @models.permalink\n def get_absolute_url(self):\n return ('profiles_view', None, {'username': self.username})\n\n def __unicode__(self):\n return self.get_full_name()\n\n def save(self, *args, **kwargs):\n if self.ntnu_username == \"\":\n self.ntnu_username = None\n super(OnlineUser, self).save(*args, **kwargs)\n\n def serializable_object(self):\n if self.privacy.expose_phone_number:\n phone = self.phone_number\n else:\n phone = \"Ikke tilgjengelig\"\n\n return {\n 'id': self.id,\n 'phone': strip_tags(phone),\n 'username': strip_tags(self.username),\n 'value': strip_tags(self.get_full_name()), # typeahead\n 'name': strip_tags(self.get_full_name()),\n 'image': self.get_image_url(75),\n }\n\n def get_image_url(self, size=50):\n default = \"%s%s_%s.png\" % (settings.BASE_URL,\n settings.DEFAULT_PROFILE_PICTURE_PREFIX, self.gender)\n\n gravatar_url = \"https://www.gravatar.com/avatar/\" + hashlib.md5(self.email).hexdigest() + \"?\"\n gravatar_url += urllib.urlencode({'d': default, 's':str(size)})\n return gravatar_url\n\n class Meta:\n ordering = ['first_name', 'last_name']\n verbose_name = _(u\"brukerprofil\")\n verbose_name_plural = _(u\"brukerprofiler\")\n\n\nclass Email(models.Model):\n user = models.ForeignKey(OnlineUser, related_name=\"email_user\")\n email = models.EmailField(_(u\"epostadresse\"), unique=True)\n primary = models.BooleanField(_(u\"primær\"), default=False)\n verified = models.BooleanField(_(u\"verifisert\"), default=False, editable=False)\n\n def save(self, *args, **kwargs):\n primary_email = self.user.get_email()\n if not primary_email:\n self.primary = True\n elif primary_email.email != self.email:\n self.primary = False\n self.email = self.email.lower()\n if self.primary:\n self.user.email = self.email\n self.user.save()\n super(Email, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return self.email\n\n class Meta:\n verbose_name = _(u\"epostadresse\")\n verbose_name_plural = _(u\"epostadresser\")\n\n\nclass RegisterToken(models.Model):\n user = models.ForeignKey(OnlineUser, related_name=\"register_user\")\n email = models.EmailField(_(u\"epost\"), max_length=254)\n token = models.CharField(_(u\"token\"), max_length=32)\n created = models.DateTimeField(_(u\"opprettet dato\"), editable=False, auto_now_add=True)\n\n @property\n def is_valid(self):\n valid_period = datetime.timedelta(days=1)\n now = timezone.now()\n return now < self.created + valid_period \n\n\nclass AllowedUsername(models.Model):\n \"\"\"\n Holds usernames that are considered valid members of Online and the time they expire.\n \"\"\"\n username = models.CharField(_(u\"NTNU-brukernavn\"), max_length=10, unique=True)\n registered = models.DateField(_(u\"registrert\"))\n note = models.CharField(_(u\"notat\"), max_length=100)\n description = models.TextField(_(u\"beskrivelse\"), blank=True, null=True)\n expiration_date = models.DateField(_(u\"utløpsdato\"))\n\n @property\n def is_active(self):\n return timezone.now().date() < self.expiration_date\n\n def save(self, *args, **kwargs):\n self.username = self.username.lower()\n super(AllowedUsername, self).save(*args, **kwargs)\n\n def __unicode__(self):\n return self.username\n\n class Meta:\n verbose_name = _(u\"medlem\")\n verbose_name_plural = _(u\"medlemsregister\")\n ordering = (u\"username\",)\n\n\nclass Position(models.Model):\n \"\"\"\n Contains a users position in the organization from a given year\n \"\"\"\n period = models.CharField(_(u'periode'), max_length=9, default=\"2013-2014\", blank=False)\n committee = models.CharField(_(u\"komite\"), max_length=10, choices=COMMITTEES, default=\"hs\")\n position = models.CharField(_(u\"stilling\"), max_length=10, choices=POSITIONS, default=\"medlem\")\n user = models.ForeignKey(OnlineUser, related_name='positions', blank=False)\n\n @property\n def print_string(self):\n return '%s: %s(%s)' % (self.period, self.committee, self.position)\n\n def __unicode__(self):\n return self.print_string\n\n class Meta:\n verbose_name = _(u'posisjon')\n verbose_name_plural = _(u'posisjoner')\n ordering = ('user', 'period', )\n\n\nclass SpecialPosition(models.Model):\n \"\"\"\n Special object to represent special positions that typically lasts for life.\n \"\"\"\n position = models.CharField(_(u'Posisjon'), max_length=50, blank=False)\n since_year = models.IntegerField(_(u'Medlem siden'), max_length=4, blank=False)\n user = models.ForeignKey(OnlineUser, related_name='special_positions', blank=False)\n\n def __unicode__(self):\n return '%s, %s' % (self.user.get_full_name(), self.position)\n\n class Meta:\n verbose_name = _(u'spesialposisjon')\n verbose_name_plural = _(u'spesialposisjoner')\n ordering = ('user', 'since_year',)\n\n\n\n# Register OnlineUser in watson index for searching\nwatson.register(OnlineUser, fields=('first_name', 'last_name', 'ntnu_username', 'nickname'))\n", "path": "apps/authentication/models.py" } ]
diff --git a/apps/authentication/models.py b/apps/authentication/models.py index 101fa474f..f407da014 100644 --- a/apps/authentication/models.py +++ b/apps/authentication/models.py @@ -51,6 +51,7 @@ ('prokom', _(u'Profil-og aviskomiteen')), ('trikom', _(u'Trivselskomiteen')), ('velkom', _(u'Velkomstkomiteen')), + ('appkom', _(u'Applikasjonskomiteen')), ] POSITIONS = [ diff --git a/templates/frontpage.html b/templates/frontpage.html index 11dbf0e04..1dc1f4def 100755 --- a/templates/frontpage.html +++ b/templates/frontpage.html @@ -274,6 +274,11 @@ <h2 id="about-heading">OM ONLINE</h2> <div class="tab-pane" id="trikom"> {% filter markdown %} {% chunk 'om_trikom' %} +{% endfilter %} + </div> + <div class="tab-pane" id="appkom"> +{% filter markdown %} +{% chunk 'om_appkom' %} {% endfilter %} </div> <div class="tab-pane" id="ekskom"> @@ -324,6 +329,7 @@ <h2 id="about-heading">OM ONLINE</h2> <li><a href="#prokom">ProKom</a></li> <li><a href="#trikom">TriKom</a></li> <li class="nav-header">Nodekomiteer</li> + <li><a href="#appkom">AppKom</a></li> <li><a href="#ekskom">EksKom</a></li> <li><a href="#jubkom">JubKom</a></li> <li><a href="#velkom">VelKom</a></li>
getmoto__moto-431
SQS MD5 Hashing Issues I've started using Moto as a standalone server to aid testing a PHP stack. I've discovered that once I create a message which contains encapsulated (escaped) JSON - it starts to fail with the AWS PHP SDK, although it works fine with Boto2. The issue appears to be in and around the calculation of the MD5 sum. I suspect Boto must be ignoring the MD5, otherwise I'd think it would be reporting the same problem. The exception I get from PHP SDK: ``` PHP Warning: Uncaught Aws\Sqs\Exception\SqsException: AWS Error Code: , Status Code: , AWS Request ID: , AWS Error Type: , AWS Error Message: Body MD5 mismatch for array ( 'MessageId' => '97f171c9-b7a5-b764-f3e0-4234555f509f', 'ReceiptHandle' => 'nntoxkpevzvvbvbvylufszndstdeplilaxnckhsceeztjvmdqtzpxptfoeyndfgscncydyntjilbppbgsrwlldsjpksxklybpayijnoewirfexullvcdtmbvuablunaykrqomudptfmnznriseoegwopnaxidtwwsmoikjndpaxilaicgcbpisdpt', 'MD5OfBody' => '08ab38f810e137a6cce4990c3952be77', 'Body' => '{ ``` Trying to reproduce that MD5 of the body using the same body contents from a json file: PHP: ``` php > $body = file_get_contents(__DIR__ . '/test.json'); php > echo md5($body); 6d8dc937d72f4cdfad4b76be545dda6b ``` Python: ``` >>> import hashlib >>> with open('git_src/api/data/sqs/ses/temp_bounce.json') as myfile: ... data=myfile.read() >>> hashlib.md5(data).hexdigest() '6d8dc937d72f4cdfad4b76be545dda6b' >>> from xml.sax.saxutils import escape >>> hashlib.md5(escape(data).encode('utf-8')).hexdigest() '08ab38f810e137a6cce4990c3952be77' ``` So it seems the XML escaping is causing the problem. Before I put together a PR I'll confirm how the real AWS SQS service calculates this MD5.
[ { "content": "from __future__ import unicode_literals\n\nimport hashlib\nimport time\nimport re\nfrom xml.sax.saxutils import escape\n\nimport boto.sqs\n\nfrom moto.core import BaseBackend\nfrom moto.core.utils import camelcase_to_underscores, get_random_message_id\nfrom .utils import generate_receipt_handle, unix_time_millis\nfrom .exceptions import (\n ReceiptHandleIsInvalid,\n MessageNotInflight\n)\n\nDEFAULT_ACCOUNT_ID = 123456789012\n\n\nclass Message(object):\n def __init__(self, message_id, body):\n self.id = message_id\n self._body = body\n self.message_attributes = {}\n self.receipt_handle = None\n self.sender_id = DEFAULT_ACCOUNT_ID\n self.sent_timestamp = None\n self.approximate_first_receive_timestamp = None\n self.approximate_receive_count = 0\n self.visible_at = 0\n self.delayed_until = 0\n\n @property\n def md5(self):\n body_md5 = hashlib.md5()\n body_md5.update(self.body.encode('utf-8'))\n return body_md5.hexdigest()\n\n @property\n def body(self):\n return escape(self._body)\n\n def mark_sent(self, delay_seconds=None):\n self.sent_timestamp = unix_time_millis()\n if delay_seconds:\n self.delay(delay_seconds=delay_seconds)\n\n def mark_received(self, visibility_timeout=None):\n \"\"\"\n When a message is received we will set the first receive timestamp,\n tap the ``approximate_receive_count`` and the ``visible_at`` time.\n \"\"\"\n if visibility_timeout:\n visibility_timeout = int(visibility_timeout)\n else:\n visibility_timeout = 0\n\n if not self.approximate_first_receive_timestamp:\n self.approximate_first_receive_timestamp = unix_time_millis()\n\n self.approximate_receive_count += 1\n\n # Make message visible again in the future unless its\n # destroyed.\n if visibility_timeout:\n self.change_visibility(visibility_timeout)\n\n self.receipt_handle = generate_receipt_handle()\n\n def change_visibility(self, visibility_timeout):\n # We're dealing with milliseconds internally\n visibility_timeout_msec = int(visibility_timeout) * 1000\n self.visible_at = unix_time_millis() + visibility_timeout_msec\n\n def delay(self, delay_seconds):\n delay_msec = int(delay_seconds) * 1000\n self.delayed_until = unix_time_millis() + delay_msec\n\n @property\n def visible(self):\n current_time = unix_time_millis()\n if current_time > self.visible_at:\n return True\n return False\n\n @property\n def delayed(self):\n current_time = unix_time_millis()\n if current_time < self.delayed_until:\n return True\n return False\n\n\nclass Queue(object):\n camelcase_attributes = ['ApproximateNumberOfMessages',\n 'ApproximateNumberOfMessagesDelayed',\n 'ApproximateNumberOfMessagesNotVisible',\n 'CreatedTimestamp',\n 'DelaySeconds',\n 'LastModifiedTimestamp',\n 'MaximumMessageSize',\n 'MessageRetentionPeriod',\n 'QueueArn',\n 'ReceiveMessageWaitTimeSeconds',\n 'VisibilityTimeout',\n 'WaitTimeSeconds']\n\n def __init__(self, name, visibility_timeout, wait_time_seconds, region):\n self.name = name\n self.visibility_timeout = visibility_timeout or 30\n self.region = region\n\n # wait_time_seconds will be set to immediate return messages\n self.wait_time_seconds = wait_time_seconds or 0\n self._messages = []\n\n now = time.time()\n\n self.created_timestamp = now\n self.delay_seconds = 0\n self.last_modified_timestamp = now\n self.maximum_message_size = 64 << 10\n self.message_retention_period = 86400 * 4 # four days\n self.queue_arn = 'arn:aws:sqs:sqs.us-east-1:123456789012:%s' % self.name\n self.receive_message_wait_time_seconds = 0\n\n @classmethod\n def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):\n properties = cloudformation_json['Properties']\n\n sqs_backend = sqs_backends[region_name]\n return sqs_backend.create_queue(\n name=properties['QueueName'],\n visibility_timeout=properties.get('VisibilityTimeout'),\n wait_time_seconds=properties.get('WaitTimeSeconds')\n )\n\n @classmethod\n def update_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):\n properties = cloudformation_json['Properties']\n queue_name = properties['QueueName']\n\n sqs_backend = sqs_backends[region_name]\n queue = sqs_backend.get_queue(queue_name)\n if 'VisibilityTimeout' in properties:\n queue.visibility_timeout = int(properties['VisibilityTimeout'])\n\n if 'WaitTimeSeconds' in properties:\n queue.wait_time_seconds = int(properties['WaitTimeSeconds'])\n return queue\n\n @classmethod\n def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):\n properties = cloudformation_json['Properties']\n queue_name = properties['QueueName']\n sqs_backend = sqs_backends[region_name]\n sqs_backend.delete_queue(queue_name)\n\n @property\n def approximate_number_of_messages_delayed(self):\n return len([m for m in self._messages if m.delayed])\n\n @property\n def approximate_number_of_messages_not_visible(self):\n return len([m for m in self._messages if not m.visible])\n\n @property\n def approximate_number_of_messages(self):\n return len(self.messages)\n\n @property\n def physical_resource_id(self):\n return self.name\n\n @property\n def attributes(self):\n result = {}\n for attribute in self.camelcase_attributes:\n result[attribute] = getattr(self, camelcase_to_underscores(attribute))\n return result\n\n @property\n def url(self):\n return \"http://sqs.{0}.amazonaws.com/123456789012/{1}\".format(self.region, self.name)\n\n @property\n def messages(self):\n return [message for message in self._messages if message.visible and not message.delayed]\n\n def add_message(self, message):\n self._messages.append(message)\n\n def get_cfn_attribute(self, attribute_name):\n from moto.cloudformation.exceptions import UnformattedGetAttTemplateException\n if attribute_name == 'Arn':\n return self.queue_arn\n elif attribute_name == 'QueueName':\n return self.name\n raise UnformattedGetAttTemplateException()\n\n\nclass SQSBackend(BaseBackend):\n def __init__(self, region_name):\n self.region_name = region_name\n self.queues = {}\n super(SQSBackend, self).__init__()\n\n def reset(self):\n region_name = self.region_name\n self.__dict__ = {}\n self.__init__(region_name)\n\n def create_queue(self, name, visibility_timeout, wait_time_seconds):\n queue = self.queues.get(name)\n if queue is None:\n queue = Queue(name, visibility_timeout, wait_time_seconds, self.region_name)\n self.queues[name] = queue\n return queue\n\n def list_queues(self, queue_name_prefix):\n re_str = '.*'\n if queue_name_prefix:\n re_str = '^{0}.*'.format(queue_name_prefix)\n prefix_re = re.compile(re_str)\n qs = []\n for name, q in self.queues.items():\n if prefix_re.search(name):\n qs.append(q)\n return qs\n\n def get_queue(self, queue_name):\n return self.queues.get(queue_name, None)\n\n def delete_queue(self, queue_name):\n if queue_name in self.queues:\n return self.queues.pop(queue_name)\n return False\n\n def set_queue_attribute(self, queue_name, key, value):\n queue = self.get_queue(queue_name)\n setattr(queue, key, value)\n return queue\n\n def send_message(self, queue_name, message_body, message_attributes=None, delay_seconds=None):\n\n queue = self.get_queue(queue_name)\n\n if delay_seconds:\n delay_seconds = int(delay_seconds)\n else:\n delay_seconds = queue.delay_seconds\n\n message_id = get_random_message_id()\n message = Message(message_id, message_body)\n\n if message_attributes:\n message.message_attributes = message_attributes\n\n message.mark_sent(\n delay_seconds=delay_seconds\n )\n\n queue.add_message(message)\n\n return message\n\n def receive_messages(self, queue_name, count, wait_seconds_timeout):\n \"\"\"\n Attempt to retrieve visible messages from a queue.\n\n If a message was read by client and not deleted it is considered to be\n \"inflight\" and cannot be read. We make attempts to obtain ``count``\n messages but we may return less if messages are in-flight or there\n are simple not enough messages in the queue.\n\n :param string queue_name: The name of the queue to read from.\n :param int count: The maximum amount of messages to retrieve.\n \"\"\"\n queue = self.get_queue(queue_name)\n result = []\n\n polling_end = time.time() + wait_seconds_timeout\n\n # queue.messages only contains visible messages\n while True:\n for message in queue.messages:\n message.mark_received(\n visibility_timeout=queue.visibility_timeout\n )\n result.append(message)\n if len(result) >= count:\n break\n\n if result or time.time() > polling_end:\n break\n\n return result\n\n def delete_message(self, queue_name, receipt_handle):\n queue = self.get_queue(queue_name)\n new_messages = []\n for message in queue._messages:\n # Only delete message if it is not visible and the reciept_handle\n # matches.\n if message.receipt_handle == receipt_handle:\n continue\n new_messages.append(message)\n queue._messages = new_messages\n\n def change_message_visibility(self, queue_name, receipt_handle, visibility_timeout):\n queue = self.get_queue(queue_name)\n for message in queue._messages:\n if message.receipt_handle == receipt_handle:\n if message.visible:\n raise MessageNotInflight\n message.change_visibility(visibility_timeout)\n return\n raise ReceiptHandleIsInvalid\n\n def purge_queue(self, queue_name):\n queue = self.get_queue(queue_name)\n queue._messages = []\n\n\nsqs_backends = {}\nfor region in boto.sqs.regions():\n sqs_backends[region.name] = SQSBackend(region.name)\n", "path": "moto/sqs/models.py" } ]
[ { "content": "from __future__ import unicode_literals\n\nimport hashlib\nimport time\nimport re\nfrom xml.sax.saxutils import escape\n\nimport boto.sqs\n\nfrom moto.core import BaseBackend\nfrom moto.core.utils import camelcase_to_underscores, get_random_message_id\nfrom .utils import generate_receipt_handle, unix_time_millis\nfrom .exceptions import (\n ReceiptHandleIsInvalid,\n MessageNotInflight\n)\n\nDEFAULT_ACCOUNT_ID = 123456789012\n\n\nclass Message(object):\n def __init__(self, message_id, body):\n self.id = message_id\n self._body = body\n self.message_attributes = {}\n self.receipt_handle = None\n self.sender_id = DEFAULT_ACCOUNT_ID\n self.sent_timestamp = None\n self.approximate_first_receive_timestamp = None\n self.approximate_receive_count = 0\n self.visible_at = 0\n self.delayed_until = 0\n\n @property\n def md5(self):\n body_md5 = hashlib.md5()\n body_md5.update(self._body.encode('utf-8'))\n return body_md5.hexdigest()\n\n @property\n def body(self):\n return escape(self._body)\n\n def mark_sent(self, delay_seconds=None):\n self.sent_timestamp = unix_time_millis()\n if delay_seconds:\n self.delay(delay_seconds=delay_seconds)\n\n def mark_received(self, visibility_timeout=None):\n \"\"\"\n When a message is received we will set the first receive timestamp,\n tap the ``approximate_receive_count`` and the ``visible_at`` time.\n \"\"\"\n if visibility_timeout:\n visibility_timeout = int(visibility_timeout)\n else:\n visibility_timeout = 0\n\n if not self.approximate_first_receive_timestamp:\n self.approximate_first_receive_timestamp = unix_time_millis()\n\n self.approximate_receive_count += 1\n\n # Make message visible again in the future unless its\n # destroyed.\n if visibility_timeout:\n self.change_visibility(visibility_timeout)\n\n self.receipt_handle = generate_receipt_handle()\n\n def change_visibility(self, visibility_timeout):\n # We're dealing with milliseconds internally\n visibility_timeout_msec = int(visibility_timeout) * 1000\n self.visible_at = unix_time_millis() + visibility_timeout_msec\n\n def delay(self, delay_seconds):\n delay_msec = int(delay_seconds) * 1000\n self.delayed_until = unix_time_millis() + delay_msec\n\n @property\n def visible(self):\n current_time = unix_time_millis()\n if current_time > self.visible_at:\n return True\n return False\n\n @property\n def delayed(self):\n current_time = unix_time_millis()\n if current_time < self.delayed_until:\n return True\n return False\n\n\nclass Queue(object):\n camelcase_attributes = ['ApproximateNumberOfMessages',\n 'ApproximateNumberOfMessagesDelayed',\n 'ApproximateNumberOfMessagesNotVisible',\n 'CreatedTimestamp',\n 'DelaySeconds',\n 'LastModifiedTimestamp',\n 'MaximumMessageSize',\n 'MessageRetentionPeriod',\n 'QueueArn',\n 'ReceiveMessageWaitTimeSeconds',\n 'VisibilityTimeout',\n 'WaitTimeSeconds']\n\n def __init__(self, name, visibility_timeout, wait_time_seconds, region):\n self.name = name\n self.visibility_timeout = visibility_timeout or 30\n self.region = region\n\n # wait_time_seconds will be set to immediate return messages\n self.wait_time_seconds = wait_time_seconds or 0\n self._messages = []\n\n now = time.time()\n\n self.created_timestamp = now\n self.delay_seconds = 0\n self.last_modified_timestamp = now\n self.maximum_message_size = 64 << 10\n self.message_retention_period = 86400 * 4 # four days\n self.queue_arn = 'arn:aws:sqs:sqs.us-east-1:123456789012:%s' % self.name\n self.receive_message_wait_time_seconds = 0\n\n @classmethod\n def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):\n properties = cloudformation_json['Properties']\n\n sqs_backend = sqs_backends[region_name]\n return sqs_backend.create_queue(\n name=properties['QueueName'],\n visibility_timeout=properties.get('VisibilityTimeout'),\n wait_time_seconds=properties.get('WaitTimeSeconds')\n )\n\n @classmethod\n def update_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):\n properties = cloudformation_json['Properties']\n queue_name = properties['QueueName']\n\n sqs_backend = sqs_backends[region_name]\n queue = sqs_backend.get_queue(queue_name)\n if 'VisibilityTimeout' in properties:\n queue.visibility_timeout = int(properties['VisibilityTimeout'])\n\n if 'WaitTimeSeconds' in properties:\n queue.wait_time_seconds = int(properties['WaitTimeSeconds'])\n return queue\n\n @classmethod\n def delete_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):\n properties = cloudformation_json['Properties']\n queue_name = properties['QueueName']\n sqs_backend = sqs_backends[region_name]\n sqs_backend.delete_queue(queue_name)\n\n @property\n def approximate_number_of_messages_delayed(self):\n return len([m for m in self._messages if m.delayed])\n\n @property\n def approximate_number_of_messages_not_visible(self):\n return len([m for m in self._messages if not m.visible])\n\n @property\n def approximate_number_of_messages(self):\n return len(self.messages)\n\n @property\n def physical_resource_id(self):\n return self.name\n\n @property\n def attributes(self):\n result = {}\n for attribute in self.camelcase_attributes:\n result[attribute] = getattr(self, camelcase_to_underscores(attribute))\n return result\n\n @property\n def url(self):\n return \"http://sqs.{0}.amazonaws.com/123456789012/{1}\".format(self.region, self.name)\n\n @property\n def messages(self):\n return [message for message in self._messages if message.visible and not message.delayed]\n\n def add_message(self, message):\n self._messages.append(message)\n\n def get_cfn_attribute(self, attribute_name):\n from moto.cloudformation.exceptions import UnformattedGetAttTemplateException\n if attribute_name == 'Arn':\n return self.queue_arn\n elif attribute_name == 'QueueName':\n return self.name\n raise UnformattedGetAttTemplateException()\n\n\nclass SQSBackend(BaseBackend):\n def __init__(self, region_name):\n self.region_name = region_name\n self.queues = {}\n super(SQSBackend, self).__init__()\n\n def reset(self):\n region_name = self.region_name\n self.__dict__ = {}\n self.__init__(region_name)\n\n def create_queue(self, name, visibility_timeout, wait_time_seconds):\n queue = self.queues.get(name)\n if queue is None:\n queue = Queue(name, visibility_timeout, wait_time_seconds, self.region_name)\n self.queues[name] = queue\n return queue\n\n def list_queues(self, queue_name_prefix):\n re_str = '.*'\n if queue_name_prefix:\n re_str = '^{0}.*'.format(queue_name_prefix)\n prefix_re = re.compile(re_str)\n qs = []\n for name, q in self.queues.items():\n if prefix_re.search(name):\n qs.append(q)\n return qs\n\n def get_queue(self, queue_name):\n return self.queues.get(queue_name, None)\n\n def delete_queue(self, queue_name):\n if queue_name in self.queues:\n return self.queues.pop(queue_name)\n return False\n\n def set_queue_attribute(self, queue_name, key, value):\n queue = self.get_queue(queue_name)\n setattr(queue, key, value)\n return queue\n\n def send_message(self, queue_name, message_body, message_attributes=None, delay_seconds=None):\n\n queue = self.get_queue(queue_name)\n\n if delay_seconds:\n delay_seconds = int(delay_seconds)\n else:\n delay_seconds = queue.delay_seconds\n\n message_id = get_random_message_id()\n message = Message(message_id, message_body)\n\n if message_attributes:\n message.message_attributes = message_attributes\n\n message.mark_sent(\n delay_seconds=delay_seconds\n )\n\n queue.add_message(message)\n\n return message\n\n def receive_messages(self, queue_name, count, wait_seconds_timeout):\n \"\"\"\n Attempt to retrieve visible messages from a queue.\n\n If a message was read by client and not deleted it is considered to be\n \"inflight\" and cannot be read. We make attempts to obtain ``count``\n messages but we may return less if messages are in-flight or there\n are simple not enough messages in the queue.\n\n :param string queue_name: The name of the queue to read from.\n :param int count: The maximum amount of messages to retrieve.\n \"\"\"\n queue = self.get_queue(queue_name)\n result = []\n\n polling_end = time.time() + wait_seconds_timeout\n\n # queue.messages only contains visible messages\n while True:\n for message in queue.messages:\n message.mark_received(\n visibility_timeout=queue.visibility_timeout\n )\n result.append(message)\n if len(result) >= count:\n break\n\n if result or time.time() > polling_end:\n break\n\n return result\n\n def delete_message(self, queue_name, receipt_handle):\n queue = self.get_queue(queue_name)\n new_messages = []\n for message in queue._messages:\n # Only delete message if it is not visible and the reciept_handle\n # matches.\n if message.receipt_handle == receipt_handle:\n continue\n new_messages.append(message)\n queue._messages = new_messages\n\n def change_message_visibility(self, queue_name, receipt_handle, visibility_timeout):\n queue = self.get_queue(queue_name)\n for message in queue._messages:\n if message.receipt_handle == receipt_handle:\n if message.visible:\n raise MessageNotInflight\n message.change_visibility(visibility_timeout)\n return\n raise ReceiptHandleIsInvalid\n\n def purge_queue(self, queue_name):\n queue = self.get_queue(queue_name)\n queue._messages = []\n\n\nsqs_backends = {}\nfor region in boto.sqs.regions():\n sqs_backends[region.name] = SQSBackend(region.name)\n", "path": "moto/sqs/models.py" } ]
diff --git a/moto/sqs/models.py b/moto/sqs/models.py index bc0a5a4c610c..efb75dd9c40e 100644 --- a/moto/sqs/models.py +++ b/moto/sqs/models.py @@ -34,7 +34,7 @@ def __init__(self, message_id, body): @property def md5(self): body_md5 = hashlib.md5() - body_md5.update(self.body.encode('utf-8')) + body_md5.update(self._body.encode('utf-8')) return body_md5.hexdigest() @property
incuna__django-pgcrypto-fields-78
EmailPGPPublicKeyField does not use the correct mixin As defined in https://github.com/incuna/django-pgcrypto-fields/blob/master/pgcrypto/fields.py#L41 `EmailPGPPublicKeyField` uses the `PGPSymmetricKeyFieldMixin` mixin instead of the `PGPPublicKeyFieldMixin` one.
[ { "content": "from django.db import models\n\nfrom pgcrypto import (\n DIGEST_SQL,\n HMAC_SQL,\n PGP_PUB_ENCRYPT_SQL_WITH_NULLIF,\n PGP_SYM_ENCRYPT_SQL_WITH_NULLIF,\n)\nfrom pgcrypto.lookups import (\n HashLookup,\n)\nfrom pgcrypto.mixins import (\n DecimalPGPFieldMixin,\n get_setting,\n HashMixin,\n PGPPublicKeyFieldMixin,\n PGPSymmetricKeyFieldMixin,\n)\n\n\nclass TextDigestField(HashMixin, models.TextField):\n \"\"\"Text digest field for postgres.\"\"\"\n encrypt_sql = DIGEST_SQL\n\n def get_encrypt_sql(self, connection):\n \"\"\"Get encrypt sql.\"\"\"\n return self.encrypt_sql.format(get_setting(connection, 'PGCRYPTO_KEY'))\n\n\nTextDigestField.register_lookup(HashLookup)\n\n\nclass TextHMACField(HashMixin, models.TextField):\n \"\"\"Text HMAC field for postgres.\"\"\"\n encrypt_sql = HMAC_SQL\n\n\nTextHMACField.register_lookup(HashLookup)\n\n\nclass EmailPGPPublicKeyField(PGPSymmetricKeyFieldMixin, models.EmailField):\n \"\"\"Email PGP public key encrypted field.\"\"\"\n\n\nclass IntegerPGPPublicKeyField(PGPPublicKeyFieldMixin, models.IntegerField):\n \"\"\"Integer PGP public key encrypted field.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'INT4'\n\n\nclass TextPGPPublicKeyField(PGPPublicKeyFieldMixin, models.TextField):\n \"\"\"Text PGP public key encrypted field.\"\"\"\n\n\nclass CharPGPPublicKeyField(PGPPublicKeyFieldMixin, models.CharField):\n \"\"\"Char PGP public key encrypted field.\"\"\"\n\n\nclass DatePGPPublicKeyField(PGPPublicKeyFieldMixin, models.DateField):\n \"\"\"Date PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DATE'\n\n\nclass DateTimePGPPublicKeyField(PGPPublicKeyFieldMixin, models.DateTimeField):\n \"\"\"DateTime PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIMESTAMP'\n\n\nclass EmailPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.EmailField):\n \"\"\"Email PGP symmetric key encrypted field.\"\"\"\n\n\nclass IntegerPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.IntegerField):\n \"\"\"Integer PGP symmetric key encrypted field.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'INT4'\n\n\nclass TextPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.TextField):\n \"\"\"Text PGP symmetric key encrypted field for postgres.\"\"\"\n\n\nclass CharPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.CharField):\n \"\"\"Char PGP symmetric key encrypted field for postgres.\"\"\"\n\n\nclass DatePGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.DateField):\n \"\"\"Date PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DATE'\n\n\nclass DateTimePGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.DateTimeField):\n \"\"\"DateTime PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIMESTAMP'\n\n\nclass DecimalPGPPublicKeyField(DecimalPGPFieldMixin,\n PGPPublicKeyFieldMixin, models.DecimalField):\n \"\"\"Decimal PGP public key encrypted field for postgres.\"\"\"\n\n\nclass DecimalPGPSymmetricKeyField(DecimalPGPFieldMixin,\n PGPSymmetricKeyFieldMixin, models.DecimalField):\n \"\"\"Decimal PGP symmetric key encrypted field for postgres.\"\"\"\n\n\nclass FloatPGPPublicKeyField(PGPPublicKeyFieldMixin, models.FloatField):\n \"\"\"Float PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DOUBLE PRECISION'\n\n\nclass FloatPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.FloatField):\n \"\"\"Float PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DOUBLE PRECISION'\n\n\nclass TimePGPPublicKeyField(PGPPublicKeyFieldMixin, models.TimeField):\n \"\"\"Time PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIME'\n\n\nclass TimePGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.TimeField):\n \"\"\"Float PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIME'\n", "path": "pgcrypto/fields.py" } ]
[ { "content": "from django.db import models\n\nfrom pgcrypto import (\n DIGEST_SQL,\n HMAC_SQL,\n PGP_PUB_ENCRYPT_SQL_WITH_NULLIF,\n PGP_SYM_ENCRYPT_SQL_WITH_NULLIF,\n)\nfrom pgcrypto.lookups import (\n HashLookup,\n)\nfrom pgcrypto.mixins import (\n DecimalPGPFieldMixin,\n get_setting,\n HashMixin,\n PGPPublicKeyFieldMixin,\n PGPSymmetricKeyFieldMixin,\n)\n\n\nclass TextDigestField(HashMixin, models.TextField):\n \"\"\"Text digest field for postgres.\"\"\"\n encrypt_sql = DIGEST_SQL\n\n def get_encrypt_sql(self, connection):\n \"\"\"Get encrypt sql.\"\"\"\n return self.encrypt_sql.format(get_setting(connection, 'PGCRYPTO_KEY'))\n\n\nTextDigestField.register_lookup(HashLookup)\n\n\nclass TextHMACField(HashMixin, models.TextField):\n \"\"\"Text HMAC field for postgres.\"\"\"\n encrypt_sql = HMAC_SQL\n\n\nTextHMACField.register_lookup(HashLookup)\n\n\nclass EmailPGPPublicKeyField(PGPPublicKeyFieldMixin, models.EmailField):\n \"\"\"Email PGP public key encrypted field.\"\"\"\n\n\nclass IntegerPGPPublicKeyField(PGPPublicKeyFieldMixin, models.IntegerField):\n \"\"\"Integer PGP public key encrypted field.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'INT4'\n\n\nclass TextPGPPublicKeyField(PGPPublicKeyFieldMixin, models.TextField):\n \"\"\"Text PGP public key encrypted field.\"\"\"\n\n\nclass CharPGPPublicKeyField(PGPPublicKeyFieldMixin, models.CharField):\n \"\"\"Char PGP public key encrypted field.\"\"\"\n\n\nclass DatePGPPublicKeyField(PGPPublicKeyFieldMixin, models.DateField):\n \"\"\"Date PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DATE'\n\n\nclass DateTimePGPPublicKeyField(PGPPublicKeyFieldMixin, models.DateTimeField):\n \"\"\"DateTime PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIMESTAMP'\n\n\nclass EmailPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.EmailField):\n \"\"\"Email PGP symmetric key encrypted field.\"\"\"\n\n\nclass IntegerPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.IntegerField):\n \"\"\"Integer PGP symmetric key encrypted field.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'INT4'\n\n\nclass TextPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.TextField):\n \"\"\"Text PGP symmetric key encrypted field for postgres.\"\"\"\n\n\nclass CharPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.CharField):\n \"\"\"Char PGP symmetric key encrypted field for postgres.\"\"\"\n\n\nclass DatePGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.DateField):\n \"\"\"Date PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DATE'\n\n\nclass DateTimePGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.DateTimeField):\n \"\"\"DateTime PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIMESTAMP'\n\n\nclass DecimalPGPPublicKeyField(DecimalPGPFieldMixin,\n PGPPublicKeyFieldMixin, models.DecimalField):\n \"\"\"Decimal PGP public key encrypted field for postgres.\"\"\"\n\n\nclass DecimalPGPSymmetricKeyField(DecimalPGPFieldMixin,\n PGPSymmetricKeyFieldMixin, models.DecimalField):\n \"\"\"Decimal PGP symmetric key encrypted field for postgres.\"\"\"\n\n\nclass FloatPGPPublicKeyField(PGPPublicKeyFieldMixin, models.FloatField):\n \"\"\"Float PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DOUBLE PRECISION'\n\n\nclass FloatPGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.FloatField):\n \"\"\"Float PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'DOUBLE PRECISION'\n\n\nclass TimePGPPublicKeyField(PGPPublicKeyFieldMixin, models.TimeField):\n \"\"\"Time PGP public key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_PUB_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIME'\n\n\nclass TimePGPSymmetricKeyField(PGPSymmetricKeyFieldMixin, models.TimeField):\n \"\"\"Float PGP symmetric key encrypted field for postgres.\"\"\"\n encrypt_sql = PGP_SYM_ENCRYPT_SQL_WITH_NULLIF\n cast_type = 'TIME'\n", "path": "pgcrypto/fields.py" } ]
diff --git a/pgcrypto/fields.py b/pgcrypto/fields.py index af9f201..a05d553 100644 --- a/pgcrypto/fields.py +++ b/pgcrypto/fields.py @@ -38,7 +38,7 @@ class TextHMACField(HashMixin, models.TextField): TextHMACField.register_lookup(HashLookup) -class EmailPGPPublicKeyField(PGPSymmetricKeyFieldMixin, models.EmailField): +class EmailPGPPublicKeyField(PGPPublicKeyFieldMixin, models.EmailField): """Email PGP public key encrypted field."""
Kinto__kinto-885
Crash when querystring contains null character ``` python Python 2.7.12 (default, Jul 1 2016, 15:12:24) [GCC 5.4.0 20160609] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import requests >>> requests.get(u"http://localhost:8888/v1/buckets?_since=\u0000", auth=("user","pass")) <Response [500]> >>> ``` ``` File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 442, in rendered_view result = view(context, request) File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view response = view(request) File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/cornice/service.py", line 489, in wrapper response = view_() File "/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/__init__.py", line 254, in collection_get File "/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/__init__.py", line 1123, in _extract_filters # In base resource, PATCH only hit storage if no data has changed. File "/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/__init__.py", line 913, in _extract_filters 'location': 'querystring', File "/home/mathieu/Code/Mozilla/kinto/kinto/core/utils.py", line 149, in native_value try: File "/usr/lib/python2.7/ast.py", line 49, in literal_eval node_or_string = parse(node_or_string, mode='eval') File "/usr/lib/python2.7/ast.py", line 37, in parse return compile(source, filename, mode, PyCF_ONLY_AST) TypeError: compile() expected string without null bytes lang=None uid=35e9c5ff6b7d9d89e0c52f8d1da20e2965747ddc812d01b895592a1e98dc1aad ``` Catching `TypeError` in `core.utils.native_value()` should be enough Crash when querystring contains null character ``` python Python 2.7.12 (default, Jul 1 2016, 15:12:24) [GCC 5.4.0 20160609] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import requests >>> requests.get(u"http://localhost:8888/v1/buckets?_since=\u0000", auth=("user","pass")) <Response [500]> >>> ``` ``` File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 442, in rendered_view result = view(context, request) File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/pyramid/viewderivers.py", line 147, in _requestonly_view response = view(request) File "/home/mathieu/Code/Mozilla/kinto/.venv/local/lib/python2.7/site-packages/cornice/service.py", line 489, in wrapper response = view_() File "/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/__init__.py", line 254, in collection_get File "/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/__init__.py", line 1123, in _extract_filters # In base resource, PATCH only hit storage if no data has changed. File "/home/mathieu/Code/Mozilla/kinto/kinto/core/resource/__init__.py", line 913, in _extract_filters 'location': 'querystring', File "/home/mathieu/Code/Mozilla/kinto/kinto/core/utils.py", line 149, in native_value try: File "/usr/lib/python2.7/ast.py", line 49, in literal_eval node_or_string = parse(node_or_string, mode='eval') File "/usr/lib/python2.7/ast.py", line 37, in parse return compile(source, filename, mode, PyCF_ONLY_AST) TypeError: compile() expected string without null bytes lang=None uid=35e9c5ff6b7d9d89e0c52f8d1da20e2965747ddc812d01b895592a1e98dc1aad ``` Catching `TypeError` in `core.utils.native_value()` should be enough
[ { "content": "import ast\nimport hashlib\nimport hmac\nimport jsonpatch\nimport os\nimport re\nimport six\nimport threading\nimport time\nfrom base64 import b64decode, b64encode\nfrom binascii import hexlify\nfrom six.moves.urllib import parse as urlparse\nfrom enum import Enum\n\n# ujson is not installable with pypy\ntry: # pragma: no cover\n import ujson as json # NOQA\n\n def json_serializer(v, **kw):\n return json.dumps(v, escape_forward_slashes=False)\n\nexcept ImportError: # pragma: no cover\n import json # NOQA\n\n json_serializer = json.dumps\n\ntry:\n # Register psycopg2cffi as psycopg2\n from psycopg2cffi import compat\nexcept ImportError: # pragma: no cover\n pass\nelse: # pragma: no cover\n compat.register()\n\ntry:\n import sqlalchemy\nexcept ImportError: # pragma: no cover\n sqlalchemy = None\n\nfrom pyramid import httpexceptions\nfrom pyramid.interfaces import IRoutesMapper\nfrom pyramid.request import Request, apply_request_extensions\nfrom pyramid.settings import aslist\nfrom pyramid.view import render_view_to_response\nfrom cornice import cors\nfrom colander import null\n\n\ndef strip_whitespace(v):\n \"\"\"Remove whitespace, newlines, and tabs from the beginning/end\n of a string.\n\n :param str v: the string to strip.\n :rtype: str\n \"\"\"\n return v.strip(' \\t\\n\\r') if v is not null else v\n\n\ndef msec_time():\n \"\"\"Return current epoch time in milliseconds.\n\n :rtype: int\n \"\"\"\n return int(time.time() * 1000.0) # floor\n\n\ndef classname(obj):\n \"\"\"Get a classname from an object.\n\n :rtype: str\n \"\"\"\n return obj.__class__.__name__.lower()\n\n\ndef merge_dicts(a, b):\n \"\"\"Merge b into a recursively, without overwriting values.\n\n :param dict a: the dict that will be altered with values of `b`.\n \"\"\"\n for k, v in b.items():\n if isinstance(v, dict):\n merge_dicts(a.setdefault(k, {}), v)\n else:\n a.setdefault(k, v)\n\n\ndef recursive_update_dict(root, changes, ignores=()):\n \"\"\"Update recursively all the entries from a dict and it's children dicts.\n\n :param dict root: root dictionary\n :param dict changes: dictonary where changes should be made (default=root)\n :returns dict newd: dictionary with removed entries of val.\n \"\"\"\n if isinstance(changes, dict):\n for k, v in changes.items():\n if isinstance(v, dict):\n if k not in root:\n root[k] = {}\n recursive_update_dict(root[k], v, ignores)\n elif v in ignores:\n if k in root:\n root.pop(k)\n else:\n root[k] = v\n\n\ndef synchronized(method):\n \"\"\"Class method decorator to make sure two threads do not execute some code\n at the same time (c.f Java ``synchronized`` keyword).\n\n The decorator installs a mutex on the class instance.\n \"\"\"\n def decorated(self, *args, **kwargs):\n try:\n lock = getattr(self, '__lock__')\n except AttributeError:\n lock = threading.RLock()\n setattr(self, '__lock__', lock)\n\n lock.acquire()\n try:\n result = method(self, *args, **kwargs)\n finally:\n lock.release()\n return result\n return decorated\n\n\ndef random_bytes_hex(bytes_length):\n \"\"\"Return a hexstring of bytes_length cryptographic-friendly random bytes.\n\n :param int bytes_length: number of random bytes.\n :rtype: str\n \"\"\"\n return hexlify(os.urandom(bytes_length)).decode('utf-8')\n\n\ndef native_value(value):\n \"\"\"Convert string value to native python values.\n\n :param str value: value to interprete.\n :returns: the value coerced to python type\n \"\"\"\n if isinstance(value, six.string_types):\n if value.lower() in ['on', 'true', 'yes']:\n value = True\n elif value.lower() in ['off', 'false', 'no']:\n value = False\n try:\n return ast.literal_eval(value)\n except (ValueError, SyntaxError):\n pass\n return value\n\n\ndef read_env(key, value):\n \"\"\"Read the setting key from environment variables.\n\n :param key: the setting name\n :param value: default value if undefined in environment\n :returns: the value from environment, coerced to python type\n \"\"\"\n envkey = key.replace('.', '_').replace('-', '_').upper()\n return native_value(os.getenv(envkey, value))\n\n\ndef encode64(content, encoding='utf-8'):\n \"\"\"Encode some content in base64.\n\n :rtype: str\n \"\"\"\n return b64encode(content.encode(encoding)).decode(encoding)\n\n\ndef decode64(encoded_content, encoding='utf-8'):\n \"\"\"Decode some base64 encoded content.\n\n :rtype: str\n \"\"\"\n return b64decode(encoded_content.encode(encoding)).decode(encoding)\n\n\ndef hmac_digest(secret, message, encoding='utf-8'):\n \"\"\"Return hex digest of a message HMAC using secret\"\"\"\n if isinstance(secret, six.text_type):\n secret = secret.encode(encoding)\n return hmac.new(secret,\n message.encode(encoding),\n hashlib.sha256).hexdigest()\n\n\ndef dict_subset(d, keys):\n \"\"\"Return a dict with the specified keys\"\"\"\n result = {}\n\n for key in keys:\n if '.' in key:\n field, subfield = key.split('.', 1)\n if isinstance(d.get(field), dict):\n subvalue = dict_subset(d[field], [subfield])\n result.setdefault(field, {}).update(subvalue)\n elif field in d:\n result[field] = d[field]\n else:\n if key in d:\n result[key] = d[key]\n\n return result\n\n\nclass COMPARISON(Enum):\n LT = '<'\n MIN = '>='\n MAX = '<='\n NOT = '!='\n EQ = '=='\n GT = '>'\n IN = 'in'\n EXCLUDE = 'exclude'\n LIKE = 'like'\n\n\ndef reapply_cors(request, response):\n \"\"\"Reapply cors headers to the new response with regards to the request.\n\n We need to re-apply the CORS checks done by Cornice, in case we're\n recreating the response from scratch.\n\n \"\"\"\n service = request.current_service\n if service:\n request.info['cors_checked'] = False\n cors.apply_cors_post_request(service, request, response)\n response = cors.ensure_origin(service, request, response)\n else:\n # No existing service is concerned, and Cornice is not implied.\n origin = request.headers.get('Origin')\n if origin:\n settings = request.registry.settings\n allowed_origins = set(aslist(settings['cors_origins']))\n required_origins = {'*', decode_header(origin)}\n if allowed_origins.intersection(required_origins):\n origin = encode_header(origin)\n response.headers['Access-Control-Allow-Origin'] = origin\n\n # Import service here because kinto.core import utils\n from kinto.core import Service\n if Service.default_cors_headers:\n headers = ','.join(Service.default_cors_headers)\n response.headers['Access-Control-Expose-Headers'] = headers\n return response\n\n\ndef current_service(request):\n \"\"\"Return the Cornice service matching the specified request.\n\n :returns: the service or None if unmatched.\n :rtype: cornice.Service\n \"\"\"\n if request.matched_route:\n services = request.registry.cornice_services\n pattern = request.matched_route.pattern\n try:\n service = services[pattern]\n except KeyError:\n return None\n else:\n return service\n\n\ndef current_resource_name(request):\n \"\"\"Return the name used when the kinto.core resource was registered along its\n viewset.\n\n :returns: the resource identifier.\n :rtype: str\n \"\"\"\n service = current_service(request)\n resource_name = service.viewset.get_name(service.resource)\n return resource_name\n\n\ndef build_request(original, dict_obj):\n \"\"\"\n Transform a dict object into a :class:`pyramid.request.Request` object.\n\n It sets a ``parent`` attribute on the resulting request assigned with\n the `original` request specified.\n\n :param original: the original request.\n :param dict_obj: a dict object with the sub-request specifications.\n \"\"\"\n api_prefix = '/%s' % original.upath_info.split('/')[1]\n path = dict_obj['path']\n if not path.startswith(api_prefix):\n path = api_prefix + path\n\n path = path.encode('utf-8')\n\n method = dict_obj.get('method') or 'GET'\n\n headers = dict(original.headers)\n headers.update(**dict_obj.get('headers') or {})\n # Body can have different length, do not use original header.\n headers.pop('Content-Length', None)\n\n payload = dict_obj.get('body') or ''\n\n # Payload is always a dict (from ``BatchRequestSchema.body``).\n # Send it as JSON for subrequests.\n if isinstance(payload, dict):\n headers['Content-Type'] = encode_header(\n 'application/json; charset=utf-8')\n payload = json.dumps(payload)\n\n if six.PY3: # pragma: no cover\n path = path.decode('latin-1')\n\n request = Request.blank(path=path,\n headers=headers,\n POST=payload,\n method=method)\n request.registry = original.registry\n apply_request_extensions(request)\n\n # This is used to distinguish subrequests from direct incoming requests.\n # See :func:`kinto.core.initialization.setup_logging()`\n request.parent = original\n\n return request\n\n\ndef build_response(response, request):\n \"\"\"\n Transform a :class:`pyramid.response.Response` object into a serializable\n dict.\n\n :param response: a response object, returned by Pyramid.\n :param request: the request that was used to get the response.\n \"\"\"\n dict_obj = {}\n dict_obj['path'] = urlparse.unquote(request.path)\n dict_obj['status'] = response.status_code\n dict_obj['headers'] = dict(response.headers)\n\n body = ''\n if request.method != 'HEAD':\n # XXX : Pyramid should not have built response body for HEAD!\n try:\n body = response.json\n except ValueError:\n body = response.body\n dict_obj['body'] = body\n\n return dict_obj\n\n\ndef follow_subrequest(request, subrequest, **kwargs):\n \"\"\"Run a subrequest (e.g. batch), and follow the redirection if any.\n\n :rtype: tuple\n :returns: the reponse and the redirection request (or `subrequest`\n if no redirection happened.)\n \"\"\"\n try:\n try:\n return request.invoke_subrequest(subrequest, **kwargs), subrequest\n except Exception as e:\n resp = render_view_to_response(e, subrequest)\n if not resp or resp.status_code >= 500:\n raise e\n raise resp\n except httpexceptions.HTTPRedirection as e:\n new_location = e.headers['Location']\n new_request = Request.blank(path=new_location,\n headers=subrequest.headers,\n POST=subrequest.body,\n method=subrequest.method)\n new_request.bound_data = subrequest.bound_data\n new_request.parent = getattr(subrequest, 'parent', None)\n return request.invoke_subrequest(new_request, **kwargs), new_request\n\n\ndef encode_header(value, encoding='utf-8'):\n return _encoded(value, encoding)\n\n\ndef _encoded(value, encoding='utf-8'):\n \"\"\"Make sure the value is of type ``str`` in both PY2 and PY3.\"\"\"\n value_type = type(value)\n if value_type != str:\n # Test for Python3\n if value_type == six.binary_type: # pragma: no cover\n value = value.decode(encoding)\n # Test for Python2\n elif value_type == six.text_type: # pragma: no cover\n value = value.encode(encoding)\n return value\n\n\ndef decode_header(value, encoding='utf-8'):\n \"\"\"Make sure the header is an unicode string.\"\"\"\n if type(value) == six.binary_type:\n value = value.decode(encoding)\n return value\n\n\ndef strip_uri_prefix(path):\n \"\"\"\n Remove potential version prefix in URI.\n \"\"\"\n return re.sub(r'^(/v\\d+)?', '', six.text_type(path))\n\n\ndef view_lookup(request, uri):\n \"\"\"\n Look-up the specified `uri` and return the associated resource name\n along the match dict.\n\n :param request: the current request (used to obtain registry).\n :param uri: a plural or object endpoint URI.\n :rtype: tuple\n :returns: the resource name and the associated matchdict.\n \"\"\"\n api_prefix = '/%s' % request.upath_info.split('/')[1]\n # Path should be bytes in PY2, and unicode in PY3\n path = _encoded(api_prefix + uri)\n\n q = request.registry.queryUtility\n routes_mapper = q(IRoutesMapper)\n\n fakerequest = Request.blank(path=path)\n info = routes_mapper(fakerequest)\n matchdict, route = info['match'], info['route']\n if route is None:\n raise ValueError(\"URI has no route\")\n\n resource_name = route.name.replace('-record', '')\\\n .replace('-collection', '')\n return resource_name, matchdict\n\n\ndef instance_uri(request, resource_name, **params):\n \"\"\"Return the URI for the given resource.\"\"\"\n return strip_uri_prefix(request.route_path('%s-record' % resource_name,\n **params))\n\n\ndef parse_resource(resource):\n \"\"\"Extract the bucket_id and collection_id of the given resource (URI)\n\n :param str resource: a uri formatted /buckets/<bid>/collections/<cid> or <bid>/<cid>.\n :returns: a dictionary with the bucket_id and collection_id of the resource\n \"\"\"\n\n error_msg = \"Resources should be defined as \"\n \"'/buckets/<bid>/collections/<cid>' or '<bid>/<cid>'. \"\n \"with valid collection and bucket ids.\"\n\n from kinto.views import NameGenerator\n id_generator = NameGenerator()\n parts = resource.split('/')\n if len(parts) == 2:\n bucket, collection = parts\n elif len(parts) == 5:\n _, _, bucket, _, collection = parts\n else:\n raise ValueError(error_msg)\n if bucket == '' or collection == '':\n raise ValueError(error_msg)\n if not id_generator.match(bucket) or not id_generator.match(collection):\n raise ValueError(error_msg)\n return {\n 'bucket': bucket,\n 'collection': collection\n }\n\n\ndef apply_json_patch(record, ops):\n \"\"\"\n Apply JSON Patch operations using jsonpatch.\n\n :param record: base record where changes should be applied (not in-place).\n :param list changes: list of JSON patch operations.\n :param bool only_data: param to limit the scope of the patch only to 'data'.\n :returns dict data: patched record data.\n dict permissions: patched record permissions\n \"\"\"\n data = record.copy()\n\n # Permissions should always have read and write fields defined (to allow add)\n permissions = {'read': set(), 'write': set()}\n\n # Get permissions if available on the resource (using SharableResource)\n permissions.update(data.pop('__permissions__', {}))\n\n # Permissions should be mapped as a dict, since jsonpatch doesn't accept\n # sets and lists are mapped as JSON arrays (not indexed by value)\n permissions = {k: {i: i for i in v} for k, v in permissions.items()}\n\n resource = {'data': data, 'permissions': permissions}\n\n # Allow patch permissions without value since key and value are equal on sets\n for op in ops:\n if 'path' in op:\n if op['path'].startswith(('/permissions/read/',\n '/permissions/write/')):\n op['value'] = op['path'].split('/')[-1]\n\n try:\n result = jsonpatch.apply_patch(resource, ops)\n\n except (jsonpatch.JsonPatchException, jsonpatch.JsonPointerException) as e:\n raise ValueError(e)\n\n return result\n", "path": "kinto/core/utils.py" } ]
[ { "content": "import ast\nimport hashlib\nimport hmac\nimport jsonpatch\nimport os\nimport re\nimport six\nimport threading\nimport time\nfrom base64 import b64decode, b64encode\nfrom binascii import hexlify\nfrom six.moves.urllib import parse as urlparse\nfrom enum import Enum\n\n# ujson is not installable with pypy\ntry: # pragma: no cover\n import ujson as json # NOQA\n\n def json_serializer(v, **kw):\n return json.dumps(v, escape_forward_slashes=False)\n\nexcept ImportError: # pragma: no cover\n import json # NOQA\n\n json_serializer = json.dumps\n\ntry:\n # Register psycopg2cffi as psycopg2\n from psycopg2cffi import compat\nexcept ImportError: # pragma: no cover\n pass\nelse: # pragma: no cover\n compat.register()\n\ntry:\n import sqlalchemy\nexcept ImportError: # pragma: no cover\n sqlalchemy = None\n\nfrom pyramid import httpexceptions\nfrom pyramid.interfaces import IRoutesMapper\nfrom pyramid.request import Request, apply_request_extensions\nfrom pyramid.settings import aslist\nfrom pyramid.view import render_view_to_response\nfrom cornice import cors\nfrom colander import null\n\n\ndef strip_whitespace(v):\n \"\"\"Remove whitespace, newlines, and tabs from the beginning/end\n of a string.\n\n :param str v: the string to strip.\n :rtype: str\n \"\"\"\n return v.strip(' \\t\\n\\r') if v is not null else v\n\n\ndef msec_time():\n \"\"\"Return current epoch time in milliseconds.\n\n :rtype: int\n \"\"\"\n return int(time.time() * 1000.0) # floor\n\n\ndef classname(obj):\n \"\"\"Get a classname from an object.\n\n :rtype: str\n \"\"\"\n return obj.__class__.__name__.lower()\n\n\ndef merge_dicts(a, b):\n \"\"\"Merge b into a recursively, without overwriting values.\n\n :param dict a: the dict that will be altered with values of `b`.\n \"\"\"\n for k, v in b.items():\n if isinstance(v, dict):\n merge_dicts(a.setdefault(k, {}), v)\n else:\n a.setdefault(k, v)\n\n\ndef recursive_update_dict(root, changes, ignores=()):\n \"\"\"Update recursively all the entries from a dict and it's children dicts.\n\n :param dict root: root dictionary\n :param dict changes: dictonary where changes should be made (default=root)\n :returns dict newd: dictionary with removed entries of val.\n \"\"\"\n if isinstance(changes, dict):\n for k, v in changes.items():\n if isinstance(v, dict):\n if k not in root:\n root[k] = {}\n recursive_update_dict(root[k], v, ignores)\n elif v in ignores:\n if k in root:\n root.pop(k)\n else:\n root[k] = v\n\n\ndef synchronized(method):\n \"\"\"Class method decorator to make sure two threads do not execute some code\n at the same time (c.f Java ``synchronized`` keyword).\n\n The decorator installs a mutex on the class instance.\n \"\"\"\n def decorated(self, *args, **kwargs):\n try:\n lock = getattr(self, '__lock__')\n except AttributeError:\n lock = threading.RLock()\n setattr(self, '__lock__', lock)\n\n lock.acquire()\n try:\n result = method(self, *args, **kwargs)\n finally:\n lock.release()\n return result\n return decorated\n\n\ndef random_bytes_hex(bytes_length):\n \"\"\"Return a hexstring of bytes_length cryptographic-friendly random bytes.\n\n :param int bytes_length: number of random bytes.\n :rtype: str\n \"\"\"\n return hexlify(os.urandom(bytes_length)).decode('utf-8')\n\n\ndef native_value(value):\n \"\"\"Convert string value to native python values.\n\n :param str value: value to interprete.\n :returns: the value coerced to python type\n \"\"\"\n if isinstance(value, six.string_types):\n if value.lower() in ['on', 'true', 'yes']:\n value = True\n elif value.lower() in ['off', 'false', 'no']:\n value = False\n try:\n return ast.literal_eval(value)\n except (TypeError, ValueError, SyntaxError):\n pass\n return value\n\n\ndef read_env(key, value):\n \"\"\"Read the setting key from environment variables.\n\n :param key: the setting name\n :param value: default value if undefined in environment\n :returns: the value from environment, coerced to python type\n \"\"\"\n envkey = key.replace('.', '_').replace('-', '_').upper()\n return native_value(os.getenv(envkey, value))\n\n\ndef encode64(content, encoding='utf-8'):\n \"\"\"Encode some content in base64.\n\n :rtype: str\n \"\"\"\n return b64encode(content.encode(encoding)).decode(encoding)\n\n\ndef decode64(encoded_content, encoding='utf-8'):\n \"\"\"Decode some base64 encoded content.\n\n :rtype: str\n \"\"\"\n return b64decode(encoded_content.encode(encoding)).decode(encoding)\n\n\ndef hmac_digest(secret, message, encoding='utf-8'):\n \"\"\"Return hex digest of a message HMAC using secret\"\"\"\n if isinstance(secret, six.text_type):\n secret = secret.encode(encoding)\n return hmac.new(secret,\n message.encode(encoding),\n hashlib.sha256).hexdigest()\n\n\ndef dict_subset(d, keys):\n \"\"\"Return a dict with the specified keys\"\"\"\n result = {}\n\n for key in keys:\n if '.' in key:\n field, subfield = key.split('.', 1)\n if isinstance(d.get(field), dict):\n subvalue = dict_subset(d[field], [subfield])\n result.setdefault(field, {}).update(subvalue)\n elif field in d:\n result[field] = d[field]\n else:\n if key in d:\n result[key] = d[key]\n\n return result\n\n\nclass COMPARISON(Enum):\n LT = '<'\n MIN = '>='\n MAX = '<='\n NOT = '!='\n EQ = '=='\n GT = '>'\n IN = 'in'\n EXCLUDE = 'exclude'\n LIKE = 'like'\n\n\ndef reapply_cors(request, response):\n \"\"\"Reapply cors headers to the new response with regards to the request.\n\n We need to re-apply the CORS checks done by Cornice, in case we're\n recreating the response from scratch.\n\n \"\"\"\n service = request.current_service\n if service:\n request.info['cors_checked'] = False\n cors.apply_cors_post_request(service, request, response)\n response = cors.ensure_origin(service, request, response)\n else:\n # No existing service is concerned, and Cornice is not implied.\n origin = request.headers.get('Origin')\n if origin:\n settings = request.registry.settings\n allowed_origins = set(aslist(settings['cors_origins']))\n required_origins = {'*', decode_header(origin)}\n if allowed_origins.intersection(required_origins):\n origin = encode_header(origin)\n response.headers['Access-Control-Allow-Origin'] = origin\n\n # Import service here because kinto.core import utils\n from kinto.core import Service\n if Service.default_cors_headers:\n headers = ','.join(Service.default_cors_headers)\n response.headers['Access-Control-Expose-Headers'] = headers\n return response\n\n\ndef current_service(request):\n \"\"\"Return the Cornice service matching the specified request.\n\n :returns: the service or None if unmatched.\n :rtype: cornice.Service\n \"\"\"\n if request.matched_route:\n services = request.registry.cornice_services\n pattern = request.matched_route.pattern\n try:\n service = services[pattern]\n except KeyError:\n return None\n else:\n return service\n\n\ndef current_resource_name(request):\n \"\"\"Return the name used when the kinto.core resource was registered along its\n viewset.\n\n :returns: the resource identifier.\n :rtype: str\n \"\"\"\n service = current_service(request)\n resource_name = service.viewset.get_name(service.resource)\n return resource_name\n\n\ndef build_request(original, dict_obj):\n \"\"\"\n Transform a dict object into a :class:`pyramid.request.Request` object.\n\n It sets a ``parent`` attribute on the resulting request assigned with\n the `original` request specified.\n\n :param original: the original request.\n :param dict_obj: a dict object with the sub-request specifications.\n \"\"\"\n api_prefix = '/%s' % original.upath_info.split('/')[1]\n path = dict_obj['path']\n if not path.startswith(api_prefix):\n path = api_prefix + path\n\n path = path.encode('utf-8')\n\n method = dict_obj.get('method') or 'GET'\n\n headers = dict(original.headers)\n headers.update(**dict_obj.get('headers') or {})\n # Body can have different length, do not use original header.\n headers.pop('Content-Length', None)\n\n payload = dict_obj.get('body') or ''\n\n # Payload is always a dict (from ``BatchRequestSchema.body``).\n # Send it as JSON for subrequests.\n if isinstance(payload, dict):\n headers['Content-Type'] = encode_header(\n 'application/json; charset=utf-8')\n payload = json.dumps(payload)\n\n if six.PY3: # pragma: no cover\n path = path.decode('latin-1')\n\n request = Request.blank(path=path,\n headers=headers,\n POST=payload,\n method=method)\n request.registry = original.registry\n apply_request_extensions(request)\n\n # This is used to distinguish subrequests from direct incoming requests.\n # See :func:`kinto.core.initialization.setup_logging()`\n request.parent = original\n\n return request\n\n\ndef build_response(response, request):\n \"\"\"\n Transform a :class:`pyramid.response.Response` object into a serializable\n dict.\n\n :param response: a response object, returned by Pyramid.\n :param request: the request that was used to get the response.\n \"\"\"\n dict_obj = {}\n dict_obj['path'] = urlparse.unquote(request.path)\n dict_obj['status'] = response.status_code\n dict_obj['headers'] = dict(response.headers)\n\n body = ''\n if request.method != 'HEAD':\n # XXX : Pyramid should not have built response body for HEAD!\n try:\n body = response.json\n except ValueError:\n body = response.body\n dict_obj['body'] = body\n\n return dict_obj\n\n\ndef follow_subrequest(request, subrequest, **kwargs):\n \"\"\"Run a subrequest (e.g. batch), and follow the redirection if any.\n\n :rtype: tuple\n :returns: the reponse and the redirection request (or `subrequest`\n if no redirection happened.)\n \"\"\"\n try:\n try:\n return request.invoke_subrequest(subrequest, **kwargs), subrequest\n except Exception as e:\n resp = render_view_to_response(e, subrequest)\n if not resp or resp.status_code >= 500:\n raise e\n raise resp\n except httpexceptions.HTTPRedirection as e:\n new_location = e.headers['Location']\n new_request = Request.blank(path=new_location,\n headers=subrequest.headers,\n POST=subrequest.body,\n method=subrequest.method)\n new_request.bound_data = subrequest.bound_data\n new_request.parent = getattr(subrequest, 'parent', None)\n return request.invoke_subrequest(new_request, **kwargs), new_request\n\n\ndef encode_header(value, encoding='utf-8'):\n return _encoded(value, encoding)\n\n\ndef _encoded(value, encoding='utf-8'):\n \"\"\"Make sure the value is of type ``str`` in both PY2 and PY3.\"\"\"\n value_type = type(value)\n if value_type != str:\n # Test for Python3\n if value_type == six.binary_type: # pragma: no cover\n value = value.decode(encoding)\n # Test for Python2\n elif value_type == six.text_type: # pragma: no cover\n value = value.encode(encoding)\n return value\n\n\ndef decode_header(value, encoding='utf-8'):\n \"\"\"Make sure the header is an unicode string.\"\"\"\n if type(value) == six.binary_type:\n value = value.decode(encoding)\n return value\n\n\ndef strip_uri_prefix(path):\n \"\"\"\n Remove potential version prefix in URI.\n \"\"\"\n return re.sub(r'^(/v\\d+)?', '', six.text_type(path))\n\n\ndef view_lookup(request, uri):\n \"\"\"\n Look-up the specified `uri` and return the associated resource name\n along the match dict.\n\n :param request: the current request (used to obtain registry).\n :param uri: a plural or object endpoint URI.\n :rtype: tuple\n :returns: the resource name and the associated matchdict.\n \"\"\"\n api_prefix = '/%s' % request.upath_info.split('/')[1]\n # Path should be bytes in PY2, and unicode in PY3\n path = _encoded(api_prefix + uri)\n\n q = request.registry.queryUtility\n routes_mapper = q(IRoutesMapper)\n\n fakerequest = Request.blank(path=path)\n info = routes_mapper(fakerequest)\n matchdict, route = info['match'], info['route']\n if route is None:\n raise ValueError(\"URI has no route\")\n\n resource_name = route.name.replace('-record', '')\\\n .replace('-collection', '')\n return resource_name, matchdict\n\n\ndef instance_uri(request, resource_name, **params):\n \"\"\"Return the URI for the given resource.\"\"\"\n return strip_uri_prefix(request.route_path('%s-record' % resource_name,\n **params))\n\n\ndef parse_resource(resource):\n \"\"\"Extract the bucket_id and collection_id of the given resource (URI)\n\n :param str resource: a uri formatted /buckets/<bid>/collections/<cid> or <bid>/<cid>.\n :returns: a dictionary with the bucket_id and collection_id of the resource\n \"\"\"\n\n error_msg = \"Resources should be defined as \"\n \"'/buckets/<bid>/collections/<cid>' or '<bid>/<cid>'. \"\n \"with valid collection and bucket ids.\"\n\n from kinto.views import NameGenerator\n id_generator = NameGenerator()\n parts = resource.split('/')\n if len(parts) == 2:\n bucket, collection = parts\n elif len(parts) == 5:\n _, _, bucket, _, collection = parts\n else:\n raise ValueError(error_msg)\n if bucket == '' or collection == '':\n raise ValueError(error_msg)\n if not id_generator.match(bucket) or not id_generator.match(collection):\n raise ValueError(error_msg)\n return {\n 'bucket': bucket,\n 'collection': collection\n }\n\n\ndef apply_json_patch(record, ops):\n \"\"\"\n Apply JSON Patch operations using jsonpatch.\n\n :param record: base record where changes should be applied (not in-place).\n :param list changes: list of JSON patch operations.\n :param bool only_data: param to limit the scope of the patch only to 'data'.\n :returns dict data: patched record data.\n dict permissions: patched record permissions\n \"\"\"\n data = record.copy()\n\n # Permissions should always have read and write fields defined (to allow add)\n permissions = {'read': set(), 'write': set()}\n\n # Get permissions if available on the resource (using SharableResource)\n permissions.update(data.pop('__permissions__', {}))\n\n # Permissions should be mapped as a dict, since jsonpatch doesn't accept\n # sets and lists are mapped as JSON arrays (not indexed by value)\n permissions = {k: {i: i for i in v} for k, v in permissions.items()}\n\n resource = {'data': data, 'permissions': permissions}\n\n # Allow patch permissions without value since key and value are equal on sets\n for op in ops:\n if 'path' in op:\n if op['path'].startswith(('/permissions/read/',\n '/permissions/write/')):\n op['value'] = op['path'].split('/')[-1]\n\n try:\n result = jsonpatch.apply_patch(resource, ops)\n\n except (jsonpatch.JsonPatchException, jsonpatch.JsonPointerException) as e:\n raise ValueError(e)\n\n return result\n", "path": "kinto/core/utils.py" } ]
diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c8234d0f4..c2c39f8b0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -19,6 +19,7 @@ This document describes changes between each past release. **Bug fixes** - Fixed showing of backend type twice in StatsD backend keys (fixes #857) +- Fix crash when querystring parameter contains null string (fixes #882) **Internal changes** diff --git a/kinto/core/utils.py b/kinto/core/utils.py index 26e664294..b87670111 100644 --- a/kinto/core/utils.py +++ b/kinto/core/utils.py @@ -148,7 +148,7 @@ def native_value(value): value = False try: return ast.literal_eval(value) - except (ValueError, SyntaxError): + except (TypeError, ValueError, SyntaxError): pass return value diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py index f136c6cb6..6b49aff4a 100644 --- a/tests/core/test_utils.py +++ b/tests/core/test_utils.py @@ -59,6 +59,9 @@ def test_non_string_values(self): self.assertEqual(native_value(7), 7) self.assertEqual(native_value(True), True) + def test_bad_string_values(self): + self.assertEqual(native_value("\u0000"), "\x00") + class StripWhitespaceTest(unittest.TestCase): def test_removes_all_kinds_of_spaces(self):
celery__celery-8650
Celery exit with non-zero code after Warm Shutdown in Celery 5.3.x ### Discussed in https://github.com/celery/celery/discussions/8539 <div type='discussions-op-text'> <sup>Originally posted by **cinesia** September 27, 2023</sup> We upgraded recently **celery** from **5.2.7** to **5.3.4** and something changed in the default behaviour of a celery worker when it receives a warm shutdown. Before the upgrade, the worker exited with zero code and now the worker exit with non-zero code (1). The code it's the same and nothing changed except the package upgrade. I succeed in reproducing the error in a clean environment where only celery is installed. To reproduce the behaviour: - Create a simple Celery worker tasks.py ```python from celery import Celery app = Celery('tasks') @app.task def add(x, y): return x + y ``` Dockerfile ```Dockerfile # Use an official Python runtime as the base image FROM python:3.9-slim # Set the working directory in the container WORKDIR /app # Copy the dependencies file to the working directory (it has just a line with celery==...) COPY requirements.txt . # Install the dependencies RUN pip install --no-cache-dir -r requirements.txt # Copy the rest of the application's code to the working directory COPY . . # Define the command to run your Celery worker CMD ["celery", "--app=tasks", "worker", "--loglevel=info"] ``` docker-compose ```yaml version: '3.7' services: # Deploy the broker. rabbitmq_server: image: rabbitmq:3-management ports: # Expose the port for the worker to add/get tasks - 5672:5672 # OPTIONAL: Expose the GUI port - 15672:15672 # Deploy the worker worker: # Build using the worker Dockerfile build: context: . dockerfile: Dockerfile # Need to access the database # OPTIONAL: If your worker needs to access your db that is deployed locally, then make the network mode as host. network_mode: host # Pass the rabbitmq_uri as an environment variable in order to connect to our service environment: # NOTE: Below we are using 127.0.0.1 because this container will run on the host network, thus it will have access to the host network. - CELERY_BROKER_URL=amqp://[email protected]:5672// ``` - Open a python console inside the celery container and send a shutdown ```python import celery app = celery.Celery("tasks") app.control.shutdown() ``` If celery == 5.2.7 the container exit with code 0, if celery == 5.3.4 the container exit with code 1. </div>
[ { "content": "\"\"\"Worker remote control command implementations.\"\"\"\nimport io\nimport tempfile\nfrom collections import UserDict, defaultdict, namedtuple\n\nfrom billiard.common import TERM_SIGNAME\nfrom kombu.utils.encoding import safe_repr\n\nfrom celery.exceptions import WorkerShutdown\nfrom celery.platforms import signals as _signals\nfrom celery.utils.functional import maybe_list\nfrom celery.utils.log import get_logger\nfrom celery.utils.serialization import jsonify, strtobool\nfrom celery.utils.time import rate\n\nfrom . import state as worker_state\nfrom .request import Request\n\n__all__ = ('Panel',)\n\nDEFAULT_TASK_INFO_ITEMS = ('exchange', 'routing_key', 'rate_limit')\nlogger = get_logger(__name__)\n\ncontroller_info_t = namedtuple('controller_info_t', [\n 'alias', 'type', 'visible', 'default_timeout',\n 'help', 'signature', 'args', 'variadic',\n])\n\n\ndef ok(value):\n return {'ok': value}\n\n\ndef nok(value):\n return {'error': value}\n\n\nclass Panel(UserDict):\n \"\"\"Global registry of remote control commands.\"\"\"\n\n data = {} # global dict.\n meta = {} # -\"-\n\n @classmethod\n def register(cls, *args, **kwargs):\n if args:\n return cls._register(**kwargs)(*args)\n return cls._register(**kwargs)\n\n @classmethod\n def _register(cls, name=None, alias=None, type='control',\n visible=True, default_timeout=1.0, help=None,\n signature=None, args=None, variadic=None):\n\n def _inner(fun):\n control_name = name or fun.__name__\n _help = help or (fun.__doc__ or '').strip().split('\\n')[0]\n cls.data[control_name] = fun\n cls.meta[control_name] = controller_info_t(\n alias, type, visible, default_timeout,\n _help, signature, args, variadic)\n if alias:\n cls.data[alias] = fun\n return fun\n return _inner\n\n\ndef control_command(**kwargs):\n return Panel.register(type='control', **kwargs)\n\n\ndef inspect_command(**kwargs):\n return Panel.register(type='inspect', **kwargs)\n\n# -- App\n\n\n@inspect_command()\ndef report(state):\n \"\"\"Information about Celery installation for bug reports.\"\"\"\n return ok(state.app.bugreport())\n\n\n@inspect_command(\n alias='dump_conf', # XXX < backwards compatible\n signature='[include_defaults=False]',\n args=[('with_defaults', strtobool)],\n)\ndef conf(state, with_defaults=False, **kwargs):\n \"\"\"List configuration.\"\"\"\n return jsonify(state.app.conf.table(with_defaults=with_defaults),\n keyfilter=_wanted_config_key,\n unknown_type_filter=safe_repr)\n\n\ndef _wanted_config_key(key):\n return isinstance(key, str) and not key.startswith('__')\n\n\n# -- Task\n\n@inspect_command(\n variadic='ids',\n signature='[id1 [id2 [... [idN]]]]',\n)\ndef query_task(state, ids, **kwargs):\n \"\"\"Query for task information by id.\"\"\"\n return {\n req.id: (_state_of_task(req), req.info())\n for req in _find_requests_by_id(maybe_list(ids))\n }\n\n\ndef _find_requests_by_id(ids,\n get_request=worker_state.requests.__getitem__):\n for task_id in ids:\n try:\n yield get_request(task_id)\n except KeyError:\n pass\n\n\ndef _state_of_task(request,\n is_active=worker_state.active_requests.__contains__,\n is_reserved=worker_state.reserved_requests.__contains__):\n if is_active(request):\n return 'active'\n elif is_reserved(request):\n return 'reserved'\n return 'ready'\n\n\n@control_command(\n variadic='task_id',\n signature='[id1 [id2 [... [idN]]]]',\n)\ndef revoke(state, task_id, terminate=False, signal=None, **kwargs):\n \"\"\"Revoke task by task id (or list of ids).\n\n Keyword Arguments:\n terminate (bool): Also terminate the process if the task is active.\n signal (str): Name of signal to use for terminate (e.g., ``KILL``).\n \"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `terminate`:\n # Outside of this scope that is a function.\n # supports list argument since 3.1\n task_ids, task_id = set(maybe_list(task_id) or []), None\n task_ids = _revoke(state, task_ids, terminate, signal, **kwargs)\n if isinstance(task_ids, dict) and 'ok' in task_ids:\n return task_ids\n return ok(f'tasks {task_ids} flagged as revoked')\n\n\n@control_command(\n variadic='headers',\n signature='[key1=value1 [key2=value2 [... [keyN=valueN]]]]',\n)\ndef revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kwargs):\n \"\"\"Revoke task by header (or list of headers).\n\n Keyword Arguments:\n headers(dictionary): Dictionary that contains stamping scheme name as keys and stamps as values.\n If headers is a list, it will be converted to a dictionary.\n terminate (bool): Also terminate the process if the task is active.\n signal (str): Name of signal to use for terminate (e.g., ``KILL``).\n Sample headers input:\n {'mtask_id': [id1, id2, id3]}\n \"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `terminate`:\n # Outside of this scope that is a function.\n # supports list argument since 3.1\n signum = _signals.signum(signal or TERM_SIGNAME)\n\n if isinstance(headers, list):\n headers = {h.split('=')[0]: h.split('=')[1] for h in headers}\n\n for header, stamps in headers.items():\n updated_stamps = maybe_list(worker_state.revoked_stamps.get(header) or []) + list(maybe_list(stamps))\n worker_state.revoked_stamps[header] = updated_stamps\n\n if not terminate:\n return ok(f'headers {headers} flagged as revoked, but not terminated')\n\n active_requests = list(worker_state.active_requests)\n\n terminated_scheme_to_stamps_mapping = defaultdict(set)\n\n # Terminate all running tasks of matching headers\n # Go through all active requests, and check if one of the\n # requests has a stamped header that matches the given headers to revoke\n\n for req in active_requests:\n # Check stamps exist\n if hasattr(req, \"stamps\") and req.stamps:\n # if so, check if any stamps match a revoked stamp\n for expected_header_key, expected_header_value in headers.items():\n if expected_header_key in req.stamps:\n expected_header_value = maybe_list(expected_header_value)\n actual_header = maybe_list(req.stamps[expected_header_key])\n matching_stamps_for_request = set(actual_header) & set(expected_header_value)\n # Check any possible match regardless if the stamps are a sequence or not\n if matching_stamps_for_request:\n terminated_scheme_to_stamps_mapping[expected_header_key].update(matching_stamps_for_request)\n req.terminate(state.consumer.pool, signal=signum)\n\n if not terminated_scheme_to_stamps_mapping:\n return ok(f'headers {headers} were not terminated')\n return ok(f'headers {terminated_scheme_to_stamps_mapping} revoked')\n\n\ndef _revoke(state, task_ids, terminate=False, signal=None, **kwargs):\n size = len(task_ids)\n terminated = set()\n\n worker_state.revoked.update(task_ids)\n if terminate:\n signum = _signals.signum(signal or TERM_SIGNAME)\n for request in _find_requests_by_id(task_ids):\n if request.id not in terminated:\n terminated.add(request.id)\n logger.info('Terminating %s (%s)', request.id, signum)\n request.terminate(state.consumer.pool, signal=signum)\n if len(terminated) >= size:\n break\n\n if not terminated:\n return ok('terminate: tasks unknown')\n return ok('terminate: {}'.format(', '.join(terminated)))\n\n idstr = ', '.join(task_ids)\n logger.info('Tasks flagged as revoked: %s', idstr)\n return task_ids\n\n\n@control_command(\n variadic='task_id',\n args=[('signal', str)],\n signature='<signal> [id1 [id2 [... [idN]]]]'\n)\ndef terminate(state, signal, task_id, **kwargs):\n \"\"\"Terminate task by task id (or list of ids).\"\"\"\n return revoke(state, task_id, terminate=True, signal=signal)\n\n\n@control_command(\n args=[('task_name', str), ('rate_limit', str)],\n signature='<task_name> <rate_limit (e.g., 5/s | 5/m | 5/h)>',\n)\ndef rate_limit(state, task_name, rate_limit, **kwargs):\n \"\"\"Tell worker(s) to modify the rate limit for a task by type.\n\n See Also:\n :attr:`celery.app.task.Task.rate_limit`.\n\n Arguments:\n task_name (str): Type of task to set rate limit for.\n rate_limit (int, str): New rate limit.\n \"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `terminate`:\n # Outside of this scope that is a function.\n try:\n rate(rate_limit)\n except ValueError as exc:\n return nok(f'Invalid rate limit string: {exc!r}')\n\n try:\n state.app.tasks[task_name].rate_limit = rate_limit\n except KeyError:\n logger.error('Rate limit attempt for unknown task %s',\n task_name, exc_info=True)\n return nok('unknown task')\n\n state.consumer.reset_rate_limits()\n\n if not rate_limit:\n logger.info('Rate limits disabled for tasks of type %s', task_name)\n return ok('rate limit disabled successfully')\n\n logger.info('New rate limit for tasks of type %s: %s.',\n task_name, rate_limit)\n return ok('new rate limit set successfully')\n\n\n@control_command(\n args=[('task_name', str), ('soft', float), ('hard', float)],\n signature='<task_name> <soft_secs> [hard_secs]',\n)\ndef time_limit(state, task_name=None, hard=None, soft=None, **kwargs):\n \"\"\"Tell worker(s) to modify the time limit for task by type.\n\n Arguments:\n task_name (str): Name of task to change.\n hard (float): Hard time limit.\n soft (float): Soft time limit.\n \"\"\"\n try:\n task = state.app.tasks[task_name]\n except KeyError:\n logger.error('Change time limit attempt for unknown task %s',\n task_name, exc_info=True)\n return nok('unknown task')\n\n task.soft_time_limit = soft\n task.time_limit = hard\n\n logger.info('New time limits for tasks of type %s: soft=%s hard=%s',\n task_name, soft, hard)\n return ok('time limits set successfully')\n\n\n# -- Events\n\n\n@inspect_command()\ndef clock(state, **kwargs):\n \"\"\"Get current logical clock value.\"\"\"\n return {'clock': state.app.clock.value}\n\n\n@control_command()\ndef election(state, id, topic, action=None, **kwargs):\n \"\"\"Hold election.\n\n Arguments:\n id (str): Unique election id.\n topic (str): Election topic.\n action (str): Action to take for elected actor.\n \"\"\"\n if state.consumer.gossip:\n state.consumer.gossip.election(id, topic, action)\n\n\n@control_command()\ndef enable_events(state):\n \"\"\"Tell worker(s) to send task-related events.\"\"\"\n dispatcher = state.consumer.event_dispatcher\n if dispatcher.groups and 'task' not in dispatcher.groups:\n dispatcher.groups.add('task')\n logger.info('Events of group {task} enabled by remote.')\n return ok('task events enabled')\n return ok('task events already enabled')\n\n\n@control_command()\ndef disable_events(state):\n \"\"\"Tell worker(s) to stop sending task-related events.\"\"\"\n dispatcher = state.consumer.event_dispatcher\n if 'task' in dispatcher.groups:\n dispatcher.groups.discard('task')\n logger.info('Events of group {task} disabled by remote.')\n return ok('task events disabled')\n return ok('task events already disabled')\n\n\n@control_command()\ndef heartbeat(state):\n \"\"\"Tell worker(s) to send event heartbeat immediately.\"\"\"\n logger.debug('Heartbeat requested by remote.')\n dispatcher = state.consumer.event_dispatcher\n dispatcher.send('worker-heartbeat', freq=5, **worker_state.SOFTWARE_INFO)\n\n\n# -- Worker\n\n@inspect_command(visible=False)\ndef hello(state, from_node, revoked=None, **kwargs):\n \"\"\"Request mingle sync-data.\"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `revoked`:\n # Outside of this scope that is a function.\n if from_node != state.hostname:\n logger.info('sync with %s', from_node)\n if revoked:\n worker_state.revoked.update(revoked)\n # Do not send expired items to the other worker.\n worker_state.revoked.purge()\n return {\n 'revoked': worker_state.revoked._data,\n 'clock': state.app.clock.forward(),\n }\n\n\n@inspect_command(default_timeout=0.2)\ndef ping(state, **kwargs):\n \"\"\"Ping worker(s).\"\"\"\n return ok('pong')\n\n\n@inspect_command()\ndef stats(state, **kwargs):\n \"\"\"Request worker statistics/information.\"\"\"\n return state.consumer.controller.stats()\n\n\n@inspect_command(alias='dump_schedule')\ndef scheduled(state, **kwargs):\n \"\"\"List of currently scheduled ETA/countdown tasks.\"\"\"\n return list(_iter_schedule_requests(state.consumer.timer))\n\n\ndef _iter_schedule_requests(timer):\n for waiting in timer.schedule.queue:\n try:\n arg0 = waiting.entry.args[0]\n except (IndexError, TypeError):\n continue\n else:\n if isinstance(arg0, Request):\n yield {\n 'eta': arg0.eta.isoformat() if arg0.eta else None,\n 'priority': waiting.priority,\n 'request': arg0.info(),\n }\n\n\n@inspect_command(alias='dump_reserved')\ndef reserved(state, **kwargs):\n \"\"\"List of currently reserved tasks, not including scheduled/active.\"\"\"\n reserved_tasks = (\n state.tset(worker_state.reserved_requests) -\n state.tset(worker_state.active_requests)\n )\n if not reserved_tasks:\n return []\n return [request.info() for request in reserved_tasks]\n\n\n@inspect_command(alias='dump_active')\ndef active(state, safe=False, **kwargs):\n \"\"\"List of tasks currently being executed.\"\"\"\n return [request.info(safe=safe)\n for request in state.tset(worker_state.active_requests)]\n\n\n@inspect_command(alias='dump_revoked')\ndef revoked(state, **kwargs):\n \"\"\"List of revoked task-ids.\"\"\"\n return list(worker_state.revoked)\n\n\n@inspect_command(\n alias='dump_tasks',\n variadic='taskinfoitems',\n signature='[attr1 [attr2 [... [attrN]]]]',\n)\ndef registered(state, taskinfoitems=None, builtins=False, **kwargs):\n \"\"\"List of registered tasks.\n\n Arguments:\n taskinfoitems (Sequence[str]): List of task attributes to include.\n Defaults to ``exchange,routing_key,rate_limit``.\n builtins (bool): Also include built-in tasks.\n \"\"\"\n reg = state.app.tasks\n taskinfoitems = taskinfoitems or DEFAULT_TASK_INFO_ITEMS\n\n tasks = reg if builtins else (\n task for task in reg if not task.startswith('celery.'))\n\n def _extract_info(task):\n fields = {\n field: str(getattr(task, field, None)) for field in taskinfoitems\n if getattr(task, field, None) is not None\n }\n if fields:\n info = ['='.join(f) for f in fields.items()]\n return '{} [{}]'.format(task.name, ' '.join(info))\n return task.name\n\n return [_extract_info(reg[task]) for task in sorted(tasks)]\n\n\n# -- Debugging\n\n@inspect_command(\n default_timeout=60.0,\n args=[('type', str), ('num', int), ('max_depth', int)],\n signature='[object_type=Request] [num=200 [max_depth=10]]',\n)\ndef objgraph(state, num=200, max_depth=10, type='Request'): # pragma: no cover\n \"\"\"Create graph of uncollected objects (memory-leak debugging).\n\n Arguments:\n num (int): Max number of objects to graph.\n max_depth (int): Traverse at most n levels deep.\n type (str): Name of object to graph. Default is ``\"Request\"``.\n \"\"\"\n try:\n import objgraph as _objgraph\n except ImportError:\n raise ImportError('Requires the objgraph library')\n logger.info('Dumping graph for type %r', type)\n with tempfile.NamedTemporaryFile(prefix='cobjg',\n suffix='.png', delete=False) as fh:\n objects = _objgraph.by_type(type)[:num]\n _objgraph.show_backrefs(\n objects,\n max_depth=max_depth, highlight=lambda v: v in objects,\n filename=fh.name,\n )\n return {'filename': fh.name}\n\n\n@inspect_command()\ndef memsample(state, **kwargs):\n \"\"\"Sample current RSS memory usage.\"\"\"\n from celery.utils.debug import sample_mem\n return sample_mem()\n\n\n@inspect_command(\n args=[('samples', int)],\n signature='[n_samples=10]',\n)\ndef memdump(state, samples=10, **kwargs): # pragma: no cover\n \"\"\"Dump statistics of previous memsample requests.\"\"\"\n from celery.utils import debug\n out = io.StringIO()\n debug.memdump(file=out)\n return out.getvalue()\n\n# -- Pool\n\n\n@control_command(\n args=[('n', int)],\n signature='[N=1]',\n)\ndef pool_grow(state, n=1, **kwargs):\n \"\"\"Grow pool by n processes/threads.\"\"\"\n if state.consumer.controller.autoscaler:\n return nok(\"pool_grow is not supported with autoscale. Adjust autoscale range instead.\")\n else:\n state.consumer.pool.grow(n)\n state.consumer._update_prefetch_count(n)\n return ok('pool will grow')\n\n\n@control_command(\n args=[('n', int)],\n signature='[N=1]',\n)\ndef pool_shrink(state, n=1, **kwargs):\n \"\"\"Shrink pool by n processes/threads.\"\"\"\n if state.consumer.controller.autoscaler:\n return nok(\"pool_shrink is not supported with autoscale. Adjust autoscale range instead.\")\n else:\n state.consumer.pool.shrink(n)\n state.consumer._update_prefetch_count(-n)\n return ok('pool will shrink')\n\n\n@control_command()\ndef pool_restart(state, modules=None, reload=False, reloader=None, **kwargs):\n \"\"\"Restart execution pool.\"\"\"\n if state.app.conf.worker_pool_restarts:\n state.consumer.controller.reload(modules, reload, reloader=reloader)\n return ok('reload started')\n else:\n raise ValueError('Pool restarts not enabled')\n\n\n@control_command(\n args=[('max', int), ('min', int)],\n signature='[max [min]]',\n)\ndef autoscale(state, max=None, min=None):\n \"\"\"Modify autoscale settings.\"\"\"\n autoscaler = state.consumer.controller.autoscaler\n if autoscaler:\n max_, min_ = autoscaler.update(max, min)\n return ok(f'autoscale now max={max_} min={min_}')\n raise ValueError('Autoscale not enabled')\n\n\n@control_command()\ndef shutdown(state, msg='Got shutdown from remote', **kwargs):\n \"\"\"Shutdown worker(s).\"\"\"\n logger.warning(msg)\n raise WorkerShutdown(msg)\n\n\n# -- Queues\n\n@control_command(\n args=[\n ('queue', str),\n ('exchange', str),\n ('exchange_type', str),\n ('routing_key', str),\n ],\n signature='<queue> [exchange [type [routing_key]]]',\n)\ndef add_consumer(state, queue, exchange=None, exchange_type=None,\n routing_key=None, **options):\n \"\"\"Tell worker(s) to consume from task queue by name.\"\"\"\n state.consumer.call_soon(\n state.consumer.add_task_queue,\n queue, exchange, exchange_type or 'direct', routing_key, **options)\n return ok(f'add consumer {queue}')\n\n\n@control_command(\n args=[('queue', str)],\n signature='<queue>',\n)\ndef cancel_consumer(state, queue, **_):\n \"\"\"Tell worker(s) to stop consuming from task queue by name.\"\"\"\n state.consumer.call_soon(\n state.consumer.cancel_task_queue, queue,\n )\n return ok(f'no longer consuming from {queue}')\n\n\n@inspect_command()\ndef active_queues(state):\n \"\"\"List the task queues a worker is currently consuming from.\"\"\"\n if state.consumer.task_consumer:\n return [dict(queue.as_dict(recurse=True))\n for queue in state.consumer.task_consumer.queues]\n return []\n", "path": "celery/worker/control.py" } ]
[ { "content": "\"\"\"Worker remote control command implementations.\"\"\"\nimport io\nimport tempfile\nfrom collections import UserDict, defaultdict, namedtuple\n\nfrom billiard.common import TERM_SIGNAME\nfrom kombu.utils.encoding import safe_repr\n\nfrom celery.exceptions import WorkerShutdown\nfrom celery.platforms import signals as _signals\nfrom celery.utils.functional import maybe_list\nfrom celery.utils.log import get_logger\nfrom celery.utils.serialization import jsonify, strtobool\nfrom celery.utils.time import rate\n\nfrom . import state as worker_state\nfrom .request import Request\n\n__all__ = ('Panel',)\n\nDEFAULT_TASK_INFO_ITEMS = ('exchange', 'routing_key', 'rate_limit')\nlogger = get_logger(__name__)\n\ncontroller_info_t = namedtuple('controller_info_t', [\n 'alias', 'type', 'visible', 'default_timeout',\n 'help', 'signature', 'args', 'variadic',\n])\n\n\ndef ok(value):\n return {'ok': value}\n\n\ndef nok(value):\n return {'error': value}\n\n\nclass Panel(UserDict):\n \"\"\"Global registry of remote control commands.\"\"\"\n\n data = {} # global dict.\n meta = {} # -\"-\n\n @classmethod\n def register(cls, *args, **kwargs):\n if args:\n return cls._register(**kwargs)(*args)\n return cls._register(**kwargs)\n\n @classmethod\n def _register(cls, name=None, alias=None, type='control',\n visible=True, default_timeout=1.0, help=None,\n signature=None, args=None, variadic=None):\n\n def _inner(fun):\n control_name = name or fun.__name__\n _help = help or (fun.__doc__ or '').strip().split('\\n')[0]\n cls.data[control_name] = fun\n cls.meta[control_name] = controller_info_t(\n alias, type, visible, default_timeout,\n _help, signature, args, variadic)\n if alias:\n cls.data[alias] = fun\n return fun\n return _inner\n\n\ndef control_command(**kwargs):\n return Panel.register(type='control', **kwargs)\n\n\ndef inspect_command(**kwargs):\n return Panel.register(type='inspect', **kwargs)\n\n# -- App\n\n\n@inspect_command()\ndef report(state):\n \"\"\"Information about Celery installation for bug reports.\"\"\"\n return ok(state.app.bugreport())\n\n\n@inspect_command(\n alias='dump_conf', # XXX < backwards compatible\n signature='[include_defaults=False]',\n args=[('with_defaults', strtobool)],\n)\ndef conf(state, with_defaults=False, **kwargs):\n \"\"\"List configuration.\"\"\"\n return jsonify(state.app.conf.table(with_defaults=with_defaults),\n keyfilter=_wanted_config_key,\n unknown_type_filter=safe_repr)\n\n\ndef _wanted_config_key(key):\n return isinstance(key, str) and not key.startswith('__')\n\n\n# -- Task\n\n@inspect_command(\n variadic='ids',\n signature='[id1 [id2 [... [idN]]]]',\n)\ndef query_task(state, ids, **kwargs):\n \"\"\"Query for task information by id.\"\"\"\n return {\n req.id: (_state_of_task(req), req.info())\n for req in _find_requests_by_id(maybe_list(ids))\n }\n\n\ndef _find_requests_by_id(ids,\n get_request=worker_state.requests.__getitem__):\n for task_id in ids:\n try:\n yield get_request(task_id)\n except KeyError:\n pass\n\n\ndef _state_of_task(request,\n is_active=worker_state.active_requests.__contains__,\n is_reserved=worker_state.reserved_requests.__contains__):\n if is_active(request):\n return 'active'\n elif is_reserved(request):\n return 'reserved'\n return 'ready'\n\n\n@control_command(\n variadic='task_id',\n signature='[id1 [id2 [... [idN]]]]',\n)\ndef revoke(state, task_id, terminate=False, signal=None, **kwargs):\n \"\"\"Revoke task by task id (or list of ids).\n\n Keyword Arguments:\n terminate (bool): Also terminate the process if the task is active.\n signal (str): Name of signal to use for terminate (e.g., ``KILL``).\n \"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `terminate`:\n # Outside of this scope that is a function.\n # supports list argument since 3.1\n task_ids, task_id = set(maybe_list(task_id) or []), None\n task_ids = _revoke(state, task_ids, terminate, signal, **kwargs)\n if isinstance(task_ids, dict) and 'ok' in task_ids:\n return task_ids\n return ok(f'tasks {task_ids} flagged as revoked')\n\n\n@control_command(\n variadic='headers',\n signature='[key1=value1 [key2=value2 [... [keyN=valueN]]]]',\n)\ndef revoke_by_stamped_headers(state, headers, terminate=False, signal=None, **kwargs):\n \"\"\"Revoke task by header (or list of headers).\n\n Keyword Arguments:\n headers(dictionary): Dictionary that contains stamping scheme name as keys and stamps as values.\n If headers is a list, it will be converted to a dictionary.\n terminate (bool): Also terminate the process if the task is active.\n signal (str): Name of signal to use for terminate (e.g., ``KILL``).\n Sample headers input:\n {'mtask_id': [id1, id2, id3]}\n \"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `terminate`:\n # Outside of this scope that is a function.\n # supports list argument since 3.1\n signum = _signals.signum(signal or TERM_SIGNAME)\n\n if isinstance(headers, list):\n headers = {h.split('=')[0]: h.split('=')[1] for h in headers}\n\n for header, stamps in headers.items():\n updated_stamps = maybe_list(worker_state.revoked_stamps.get(header) or []) + list(maybe_list(stamps))\n worker_state.revoked_stamps[header] = updated_stamps\n\n if not terminate:\n return ok(f'headers {headers} flagged as revoked, but not terminated')\n\n active_requests = list(worker_state.active_requests)\n\n terminated_scheme_to_stamps_mapping = defaultdict(set)\n\n # Terminate all running tasks of matching headers\n # Go through all active requests, and check if one of the\n # requests has a stamped header that matches the given headers to revoke\n\n for req in active_requests:\n # Check stamps exist\n if hasattr(req, \"stamps\") and req.stamps:\n # if so, check if any stamps match a revoked stamp\n for expected_header_key, expected_header_value in headers.items():\n if expected_header_key in req.stamps:\n expected_header_value = maybe_list(expected_header_value)\n actual_header = maybe_list(req.stamps[expected_header_key])\n matching_stamps_for_request = set(actual_header) & set(expected_header_value)\n # Check any possible match regardless if the stamps are a sequence or not\n if matching_stamps_for_request:\n terminated_scheme_to_stamps_mapping[expected_header_key].update(matching_stamps_for_request)\n req.terminate(state.consumer.pool, signal=signum)\n\n if not terminated_scheme_to_stamps_mapping:\n return ok(f'headers {headers} were not terminated')\n return ok(f'headers {terminated_scheme_to_stamps_mapping} revoked')\n\n\ndef _revoke(state, task_ids, terminate=False, signal=None, **kwargs):\n size = len(task_ids)\n terminated = set()\n\n worker_state.revoked.update(task_ids)\n if terminate:\n signum = _signals.signum(signal or TERM_SIGNAME)\n for request in _find_requests_by_id(task_ids):\n if request.id not in terminated:\n terminated.add(request.id)\n logger.info('Terminating %s (%s)', request.id, signum)\n request.terminate(state.consumer.pool, signal=signum)\n if len(terminated) >= size:\n break\n\n if not terminated:\n return ok('terminate: tasks unknown')\n return ok('terminate: {}'.format(', '.join(terminated)))\n\n idstr = ', '.join(task_ids)\n logger.info('Tasks flagged as revoked: %s', idstr)\n return task_ids\n\n\n@control_command(\n variadic='task_id',\n args=[('signal', str)],\n signature='<signal> [id1 [id2 [... [idN]]]]'\n)\ndef terminate(state, signal, task_id, **kwargs):\n \"\"\"Terminate task by task id (or list of ids).\"\"\"\n return revoke(state, task_id, terminate=True, signal=signal)\n\n\n@control_command(\n args=[('task_name', str), ('rate_limit', str)],\n signature='<task_name> <rate_limit (e.g., 5/s | 5/m | 5/h)>',\n)\ndef rate_limit(state, task_name, rate_limit, **kwargs):\n \"\"\"Tell worker(s) to modify the rate limit for a task by type.\n\n See Also:\n :attr:`celery.app.task.Task.rate_limit`.\n\n Arguments:\n task_name (str): Type of task to set rate limit for.\n rate_limit (int, str): New rate limit.\n \"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `terminate`:\n # Outside of this scope that is a function.\n try:\n rate(rate_limit)\n except ValueError as exc:\n return nok(f'Invalid rate limit string: {exc!r}')\n\n try:\n state.app.tasks[task_name].rate_limit = rate_limit\n except KeyError:\n logger.error('Rate limit attempt for unknown task %s',\n task_name, exc_info=True)\n return nok('unknown task')\n\n state.consumer.reset_rate_limits()\n\n if not rate_limit:\n logger.info('Rate limits disabled for tasks of type %s', task_name)\n return ok('rate limit disabled successfully')\n\n logger.info('New rate limit for tasks of type %s: %s.',\n task_name, rate_limit)\n return ok('new rate limit set successfully')\n\n\n@control_command(\n args=[('task_name', str), ('soft', float), ('hard', float)],\n signature='<task_name> <soft_secs> [hard_secs]',\n)\ndef time_limit(state, task_name=None, hard=None, soft=None, **kwargs):\n \"\"\"Tell worker(s) to modify the time limit for task by type.\n\n Arguments:\n task_name (str): Name of task to change.\n hard (float): Hard time limit.\n soft (float): Soft time limit.\n \"\"\"\n try:\n task = state.app.tasks[task_name]\n except KeyError:\n logger.error('Change time limit attempt for unknown task %s',\n task_name, exc_info=True)\n return nok('unknown task')\n\n task.soft_time_limit = soft\n task.time_limit = hard\n\n logger.info('New time limits for tasks of type %s: soft=%s hard=%s',\n task_name, soft, hard)\n return ok('time limits set successfully')\n\n\n# -- Events\n\n\n@inspect_command()\ndef clock(state, **kwargs):\n \"\"\"Get current logical clock value.\"\"\"\n return {'clock': state.app.clock.value}\n\n\n@control_command()\ndef election(state, id, topic, action=None, **kwargs):\n \"\"\"Hold election.\n\n Arguments:\n id (str): Unique election id.\n topic (str): Election topic.\n action (str): Action to take for elected actor.\n \"\"\"\n if state.consumer.gossip:\n state.consumer.gossip.election(id, topic, action)\n\n\n@control_command()\ndef enable_events(state):\n \"\"\"Tell worker(s) to send task-related events.\"\"\"\n dispatcher = state.consumer.event_dispatcher\n if dispatcher.groups and 'task' not in dispatcher.groups:\n dispatcher.groups.add('task')\n logger.info('Events of group {task} enabled by remote.')\n return ok('task events enabled')\n return ok('task events already enabled')\n\n\n@control_command()\ndef disable_events(state):\n \"\"\"Tell worker(s) to stop sending task-related events.\"\"\"\n dispatcher = state.consumer.event_dispatcher\n if 'task' in dispatcher.groups:\n dispatcher.groups.discard('task')\n logger.info('Events of group {task} disabled by remote.')\n return ok('task events disabled')\n return ok('task events already disabled')\n\n\n@control_command()\ndef heartbeat(state):\n \"\"\"Tell worker(s) to send event heartbeat immediately.\"\"\"\n logger.debug('Heartbeat requested by remote.')\n dispatcher = state.consumer.event_dispatcher\n dispatcher.send('worker-heartbeat', freq=5, **worker_state.SOFTWARE_INFO)\n\n\n# -- Worker\n\n@inspect_command(visible=False)\ndef hello(state, from_node, revoked=None, **kwargs):\n \"\"\"Request mingle sync-data.\"\"\"\n # pylint: disable=redefined-outer-name\n # XXX Note that this redefines `revoked`:\n # Outside of this scope that is a function.\n if from_node != state.hostname:\n logger.info('sync with %s', from_node)\n if revoked:\n worker_state.revoked.update(revoked)\n # Do not send expired items to the other worker.\n worker_state.revoked.purge()\n return {\n 'revoked': worker_state.revoked._data,\n 'clock': state.app.clock.forward(),\n }\n\n\n@inspect_command(default_timeout=0.2)\ndef ping(state, **kwargs):\n \"\"\"Ping worker(s).\"\"\"\n return ok('pong')\n\n\n@inspect_command()\ndef stats(state, **kwargs):\n \"\"\"Request worker statistics/information.\"\"\"\n return state.consumer.controller.stats()\n\n\n@inspect_command(alias='dump_schedule')\ndef scheduled(state, **kwargs):\n \"\"\"List of currently scheduled ETA/countdown tasks.\"\"\"\n return list(_iter_schedule_requests(state.consumer.timer))\n\n\ndef _iter_schedule_requests(timer):\n for waiting in timer.schedule.queue:\n try:\n arg0 = waiting.entry.args[0]\n except (IndexError, TypeError):\n continue\n else:\n if isinstance(arg0, Request):\n yield {\n 'eta': arg0.eta.isoformat() if arg0.eta else None,\n 'priority': waiting.priority,\n 'request': arg0.info(),\n }\n\n\n@inspect_command(alias='dump_reserved')\ndef reserved(state, **kwargs):\n \"\"\"List of currently reserved tasks, not including scheduled/active.\"\"\"\n reserved_tasks = (\n state.tset(worker_state.reserved_requests) -\n state.tset(worker_state.active_requests)\n )\n if not reserved_tasks:\n return []\n return [request.info() for request in reserved_tasks]\n\n\n@inspect_command(alias='dump_active')\ndef active(state, safe=False, **kwargs):\n \"\"\"List of tasks currently being executed.\"\"\"\n return [request.info(safe=safe)\n for request in state.tset(worker_state.active_requests)]\n\n\n@inspect_command(alias='dump_revoked')\ndef revoked(state, **kwargs):\n \"\"\"List of revoked task-ids.\"\"\"\n return list(worker_state.revoked)\n\n\n@inspect_command(\n alias='dump_tasks',\n variadic='taskinfoitems',\n signature='[attr1 [attr2 [... [attrN]]]]',\n)\ndef registered(state, taskinfoitems=None, builtins=False, **kwargs):\n \"\"\"List of registered tasks.\n\n Arguments:\n taskinfoitems (Sequence[str]): List of task attributes to include.\n Defaults to ``exchange,routing_key,rate_limit``.\n builtins (bool): Also include built-in tasks.\n \"\"\"\n reg = state.app.tasks\n taskinfoitems = taskinfoitems or DEFAULT_TASK_INFO_ITEMS\n\n tasks = reg if builtins else (\n task for task in reg if not task.startswith('celery.'))\n\n def _extract_info(task):\n fields = {\n field: str(getattr(task, field, None)) for field in taskinfoitems\n if getattr(task, field, None) is not None\n }\n if fields:\n info = ['='.join(f) for f in fields.items()]\n return '{} [{}]'.format(task.name, ' '.join(info))\n return task.name\n\n return [_extract_info(reg[task]) for task in sorted(tasks)]\n\n\n# -- Debugging\n\n@inspect_command(\n default_timeout=60.0,\n args=[('type', str), ('num', int), ('max_depth', int)],\n signature='[object_type=Request] [num=200 [max_depth=10]]',\n)\ndef objgraph(state, num=200, max_depth=10, type='Request'): # pragma: no cover\n \"\"\"Create graph of uncollected objects (memory-leak debugging).\n\n Arguments:\n num (int): Max number of objects to graph.\n max_depth (int): Traverse at most n levels deep.\n type (str): Name of object to graph. Default is ``\"Request\"``.\n \"\"\"\n try:\n import objgraph as _objgraph\n except ImportError:\n raise ImportError('Requires the objgraph library')\n logger.info('Dumping graph for type %r', type)\n with tempfile.NamedTemporaryFile(prefix='cobjg',\n suffix='.png', delete=False) as fh:\n objects = _objgraph.by_type(type)[:num]\n _objgraph.show_backrefs(\n objects,\n max_depth=max_depth, highlight=lambda v: v in objects,\n filename=fh.name,\n )\n return {'filename': fh.name}\n\n\n@inspect_command()\ndef memsample(state, **kwargs):\n \"\"\"Sample current RSS memory usage.\"\"\"\n from celery.utils.debug import sample_mem\n return sample_mem()\n\n\n@inspect_command(\n args=[('samples', int)],\n signature='[n_samples=10]',\n)\ndef memdump(state, samples=10, **kwargs): # pragma: no cover\n \"\"\"Dump statistics of previous memsample requests.\"\"\"\n from celery.utils import debug\n out = io.StringIO()\n debug.memdump(file=out)\n return out.getvalue()\n\n# -- Pool\n\n\n@control_command(\n args=[('n', int)],\n signature='[N=1]',\n)\ndef pool_grow(state, n=1, **kwargs):\n \"\"\"Grow pool by n processes/threads.\"\"\"\n if state.consumer.controller.autoscaler:\n return nok(\"pool_grow is not supported with autoscale. Adjust autoscale range instead.\")\n else:\n state.consumer.pool.grow(n)\n state.consumer._update_prefetch_count(n)\n return ok('pool will grow')\n\n\n@control_command(\n args=[('n', int)],\n signature='[N=1]',\n)\ndef pool_shrink(state, n=1, **kwargs):\n \"\"\"Shrink pool by n processes/threads.\"\"\"\n if state.consumer.controller.autoscaler:\n return nok(\"pool_shrink is not supported with autoscale. Adjust autoscale range instead.\")\n else:\n state.consumer.pool.shrink(n)\n state.consumer._update_prefetch_count(-n)\n return ok('pool will shrink')\n\n\n@control_command()\ndef pool_restart(state, modules=None, reload=False, reloader=None, **kwargs):\n \"\"\"Restart execution pool.\"\"\"\n if state.app.conf.worker_pool_restarts:\n state.consumer.controller.reload(modules, reload, reloader=reloader)\n return ok('reload started')\n else:\n raise ValueError('Pool restarts not enabled')\n\n\n@control_command(\n args=[('max', int), ('min', int)],\n signature='[max [min]]',\n)\ndef autoscale(state, max=None, min=None):\n \"\"\"Modify autoscale settings.\"\"\"\n autoscaler = state.consumer.controller.autoscaler\n if autoscaler:\n max_, min_ = autoscaler.update(max, min)\n return ok(f'autoscale now max={max_} min={min_}')\n raise ValueError('Autoscale not enabled')\n\n\n@control_command()\ndef shutdown(state, msg='Got shutdown from remote', **kwargs):\n \"\"\"Shutdown worker(s).\"\"\"\n logger.warning(msg)\n raise WorkerShutdown(0)\n\n\n# -- Queues\n\n@control_command(\n args=[\n ('queue', str),\n ('exchange', str),\n ('exchange_type', str),\n ('routing_key', str),\n ],\n signature='<queue> [exchange [type [routing_key]]]',\n)\ndef add_consumer(state, queue, exchange=None, exchange_type=None,\n routing_key=None, **options):\n \"\"\"Tell worker(s) to consume from task queue by name.\"\"\"\n state.consumer.call_soon(\n state.consumer.add_task_queue,\n queue, exchange, exchange_type or 'direct', routing_key, **options)\n return ok(f'add consumer {queue}')\n\n\n@control_command(\n args=[('queue', str)],\n signature='<queue>',\n)\ndef cancel_consumer(state, queue, **_):\n \"\"\"Tell worker(s) to stop consuming from task queue by name.\"\"\"\n state.consumer.call_soon(\n state.consumer.cancel_task_queue, queue,\n )\n return ok(f'no longer consuming from {queue}')\n\n\n@inspect_command()\ndef active_queues(state):\n \"\"\"List the task queues a worker is currently consuming from.\"\"\"\n if state.consumer.task_consumer:\n return [dict(queue.as_dict(recurse=True))\n for queue in state.consumer.task_consumer.queues]\n return []\n", "path": "celery/worker/control.py" } ]
diff --git a/celery/worker/control.py b/celery/worker/control.py index 41d059e4116..8cbd92cbd0e 100644 --- a/celery/worker/control.py +++ b/celery/worker/control.py @@ -580,7 +580,7 @@ def autoscale(state, max=None, min=None): def shutdown(state, msg='Got shutdown from remote', **kwargs): """Shutdown worker(s).""" logger.warning(msg) - raise WorkerShutdown(msg) + raise WorkerShutdown(0) # -- Queues
google__osv.dev-731
Missing HTML escaping in advisory description See https://osv.dev/vulnerability/GHSA-prc3-vjfx-vhm9 for example, the XSS example is actually interpreted as HTML and breaks the page.
[ { "content": "# Copyright 2021 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Handlers for the OSV web frontend.\"\"\"\n\nimport json\nimport os\nimport math\n\nfrom flask import abort\nfrom flask import Blueprint\nfrom flask import make_response\nfrom flask import redirect\nfrom flask import render_template\nfrom flask import request\nfrom flask import url_for\nimport markdown2\nfrom urllib import parse\n\nimport cache\nimport osv\nimport rate_limiter\nimport source_mapper\nimport utils\n\nblueprint = Blueprint('frontend_handlers', __name__)\n\n_PAGE_SIZE = 16\n_PAGE_LOOKAHEAD = 4\n_REQUESTS_PER_MIN = 30\n_MAX_LINKING_ALIASES = 8\n\nif utils.is_prod():\n redis_host = os.environ.get('REDISHOST', 'localhost')\n redis_port = int(os.environ.get('REDISPORT', 6379))\n limiter = rate_limiter.RateLimiter(\n redis_host, redis_port, requests_per_min=_REQUESTS_PER_MIN)\n\n @blueprint.before_request\n def check_rate_limit():\n ip_addr = request.headers.get('X-Appengine-User-Ip', 'unknown')\n if not limiter.check_request(ip_addr):\n abort(429)\n\n\[email protected]_request\ndef check_cors_preflight():\n \"\"\"Handle CORS preflight requests.\"\"\"\n if request.method != 'OPTIONS':\n return None\n\n response = make_response()\n response.headers.add('Access-Control-Allow-Origin', 'http://localhost:8080')\n response.headers.add('Access-Control-Allow-Methods', '*')\n response.headers.add('Access-Control-Allow-Headers', '*')\n return response\n\n\[email protected]_request\ndef add_cors_headers(response):\n \"\"\"Add CORS headers.\"\"\"\n response.headers.add('Access-Control-Allow-Origin', 'http://localhost:8080')\n return response\n\n\[email protected]('/v2/')\ndef index_v2():\n return redirect('/')\n\n\[email protected]('/v2/<path:subpath>')\ndef index_v2_with_subpath(subpath):\n return redirect('/' + subpath)\n\n\[email protected]('/')\ndef index():\n return render_template(\n 'home.html', ecosystem_counts=osv_get_ecosystem_counts_cached())\n\n\[email protected]('/about')\ndef about():\n return render_template('about.html')\n\n\[email protected]('/list')\ndef list_vulnerabilities():\n \"\"\"Main page.\"\"\"\n is_turbo_frame = request.headers.get('Turbo-Frame')\n\n # Remove page parameter if not from turbo frame\n if not is_turbo_frame:\n if request.args.get('page', 1) != 1:\n q = parse.parse_qs(request.query_string)\n q.pop(b'page', None)\n return redirect(\n url_for(request.endpoint) + '?' + parse.urlencode(q, True))\n\n query = request.args.get('q', '')\n page = int(request.args.get('page', 1))\n ecosystem = request.args.get('ecosystem')\n results = osv_query(query, page, False, ecosystem)\n\n # Fetch ecosystems by default. As an optimization, skip when rendering page\n # fragments.\n ecosystem_counts = osv_get_ecosystem_counts_cached(\n ) if not is_turbo_frame else None\n\n return render_template(\n 'list.html',\n page=page,\n total_pages=math.ceil(results['total'] / _PAGE_SIZE),\n query=query,\n selected_ecosystem=ecosystem,\n ecosystem_counts=ecosystem_counts,\n vulnerabilities=results['items'])\n\n\[email protected]('/vulnerability/<vuln_id>')\ndef vulnerability(vuln_id):\n \"\"\"Vulnerability page.\"\"\"\n vuln = osv_get_by_id(vuln_id)\n return render_template('vulnerability.html', vulnerability=vuln)\n\n\ndef bug_to_response(bug, detailed=True):\n \"\"\"Convert a Bug entity to a response object.\"\"\"\n response = osv.vulnerability_to_dict(bug.to_vulnerability())\n response.update({\n 'isFixed': bug.is_fixed,\n 'invalid': bug.status == osv.BugStatus.INVALID\n })\n\n if detailed:\n add_links(response)\n add_source_info(bug, response)\n add_related_aliases(bug, response)\n return response\n\n\ndef add_links(bug):\n \"\"\"Add VCS links where possible.\"\"\"\n\n for entry in bug.get('affected', []):\n for i, affected_range in enumerate(entry.get('ranges', [])):\n affected_range['id'] = i\n if affected_range['type'] != 'GIT':\n continue\n\n repo_url = affected_range.get('repo')\n if not repo_url:\n continue\n\n for event in affected_range.get('events', []):\n if event.get('introduced'):\n event['introduced_link'] = _commit_to_link(repo_url,\n event['introduced'])\n continue\n\n if event.get('fixed'):\n event['fixed_link'] = _commit_to_link(repo_url, event['fixed'])\n continue\n\n if event.get('limit'):\n event['limit_link'] = _commit_to_link(repo_url, event['limit'])\n continue\n\n\ndef add_source_info(bug, response):\n \"\"\"Add source information to `response`.\"\"\"\n if bug.source_of_truth == osv.SourceOfTruth.INTERNAL:\n response['source'] = 'INTERNAL'\n return\n\n source_repo = osv.get_source_repository(bug.source)\n if not source_repo or not source_repo.link:\n return\n\n source_path = osv.source_path(source_repo, bug)\n response['source'] = source_repo.link + source_path\n response['source_link'] = response['source']\n\n\ndef add_related_aliases(bug: osv.Bug, response):\n \"\"\"Add links to other osv entries that's related through aliases\"\"\"\n # Add links to other entries if they exist\n aliases = {}\n for alias in bug.aliases:\n # only if there aren't too many, otherwise skip this\n if len(bug.aliases) <= _MAX_LINKING_ALIASES:\n result = bug.get_by_id(alias)\n else:\n result = False\n aliases[alias] = {'exists': result, 'same_alias_entries': []}\n\n # Add links to other entries that have the same alias or references this\n if bug.aliases:\n query = osv.Bug.query(osv.Bug.aliases.IN(bug.aliases + [bug.id()]))\n for other in query:\n if other.id() == bug.id():\n continue\n for other_alias in other.aliases:\n if other_alias in aliases:\n aliases[other_alias]['same_alias_entries'].append(other.id())\n if bug.id() in other.aliases:\n aliases[other.id()] = {'exists': True, 'same_alias_entries': []}\n\n # Remove self if it was added\n aliases.pop(bug.id(), None)\n\n response['aliases'] = [{\n 'alias_id': aid,\n 'exists': ex['exists'],\n 'same_alias_entries': ex['same_alias_entries']\n } for aid, ex in aliases.items()]\n\n\ndef _commit_to_link(repo_url, commit):\n \"\"\"Convert commit to link.\"\"\"\n vcs = source_mapper.get_vcs_viewer_for_url(repo_url)\n if not vcs:\n return None\n\n if ':' not in commit:\n return vcs.get_source_url_for_revision(commit)\n\n commit_parts = commit.split(':')\n if len(commit_parts) != 2:\n return None\n\n start, end = commit_parts\n if start == 'unknown':\n return None\n\n return vcs.get_source_url_for_revision_diff(start, end)\n\n\ndef osv_get_ecosystems():\n \"\"\"Get list of ecosystems.\"\"\"\n query = osv.Bug.query(projection=[osv.Bug.ecosystem], distinct=True)\n return sorted([bug.ecosystem[0] for bug in query if bug.ecosystem],\n key=str.lower)\n\n\n# TODO: Figure out how to skip cache when testing\[email protected](\n timeout=24 * 60 * 60, key_prefix='osv_get_ecosystem_counts')\ndef osv_get_ecosystem_counts_cached():\n \"\"\"Get count of vulnerabilities per ecosystem, cached\"\"\"\n return osv_get_ecosystem_counts()\n\n\ndef osv_get_ecosystem_counts():\n \"\"\"Get count of vulnerabilities per ecosystem.\"\"\"\n counts = {}\n ecosystems = osv_get_ecosystems()\n for ecosystem in ecosystems:\n if ':' in ecosystem:\n # Count by the base ecosystem index. Otherwise we'll overcount as a\n # single entry may refer to multiple sub-ecosystems.\n continue\n\n counts[ecosystem] = osv.Bug.query(\n osv.Bug.ecosystem == ecosystem,\n osv.Bug.public == True, # pylint: disable=singleton-comparison\n osv.Bug.status == osv.BugStatus.PROCESSED).count()\n\n return counts\n\n\ndef osv_query(search_string, page, affected_only, ecosystem):\n \"\"\"Run an OSV query.\"\"\"\n query = osv.Bug.query(osv.Bug.status == osv.BugStatus.PROCESSED,\n osv.Bug.public == True) # pylint: disable=singleton-comparison\n\n if search_string:\n query = query.filter(osv.Bug.search_indices == search_string.lower())\n\n if affected_only:\n query = query.filter(osv.Bug.has_affected == True) # pylint: disable=singleton-comparison\n\n if ecosystem:\n query = query.filter(osv.Bug.ecosystem == ecosystem)\n\n query = query.order(-osv.Bug.last_modified)\n total = query.count()\n results = {\n 'total': total,\n 'items': [],\n }\n\n bugs, _, _ = query.fetch_page(\n page_size=_PAGE_SIZE, offset=(page - 1) * _PAGE_SIZE)\n for bug in bugs:\n results['items'].append(bug_to_response(bug, detailed=False))\n\n return results\n\n\ndef osv_get_by_id(vuln_id):\n \"\"\"Gets bug details from its id. If invalid, aborts the request.\"\"\"\n if not vuln_id:\n abort(400)\n return None\n\n bug = osv.Bug.get_by_id(vuln_id)\n if not bug:\n abort(404)\n return None\n\n if bug.status == osv.BugStatus.UNPROCESSED:\n abort(404)\n return None\n\n if not bug.public:\n abort(403)\n return None\n\n return bug_to_response(bug)\n\n\[email protected]_template_filter('event_type')\ndef event_type(event):\n \"\"\"Get the type from an event.\"\"\"\n if event.get('introduced'):\n return 'Introduced'\n if event.get('fixed'):\n return 'Fixed'\n if event.get('limit'):\n return 'Limit'\n if event.get('last_affected'):\n return 'Last affected'\n\n return None\n\n\[email protected]_template_filter('event_link')\ndef event_link(event):\n \"\"\"Get the link from an event.\"\"\"\n if event.get('introduced_link'):\n return event['introduced_link']\n if event.get('fixed_link'):\n return event['fixed_link']\n if event.get('limit_link'):\n return event['limit_link']\n if event.get('last_affected_link'):\n return event['last_affected_link']\n\n return None\n\n\[email protected]_template_filter('event_value')\ndef event_value(event):\n \"\"\"Get the value from an event.\"\"\"\n if event.get('introduced'):\n return event['introduced']\n if event.get('fixed'):\n return event['fixed']\n if event.get('limit'):\n return event['limit']\n if event.get('last_affected'):\n return event['last_affected']\n\n return None\n\n\[email protected]_template_filter('should_collapse')\ndef should_collapse(affected):\n \"\"\"Whether if we should collapse the package tab bar.\"\"\"\n total_package_length = sum(\n len(entry.get('package', {}).get('name', '')) for entry in affected)\n return total_package_length > 70 or len(affected) > 5\n\n\[email protected]_template_filter('group_versions')\ndef group_versions(versions):\n \"\"\"Group versions by prefix.\"\"\"\n groups = {}\n\n for version in sorted(versions):\n if '.' not in version:\n groups.setdefault('Other', []).append(version)\n continue\n\n label = version.split('.')[0] + '.*'\n groups.setdefault(label, []).append(version)\n\n return groups\n\n\[email protected]_template_filter('markdown')\ndef markdown(text):\n \"\"\"Render markdown.\"\"\"\n if text:\n return markdown2.markdown(text, extras=['fenced-code-blocks'])\n\n return ''\n\n\[email protected]_template_filter('display_json')\ndef display_json(data):\n # We can't use the default `tojson` filter as it's intended for code (and\n # escapes characters like '<' to '\\u003c'). We want to render the JSON for\n # display purposes and use HTML escaping ('&lt;') instead so it's rendered\n # as '<'.\n return json.dumps(data, indent=4)\n\n\[email protected]_template_filter('log')\ndef logarithm(n):\n return math.log(n)\n", "path": "gcp/appengine/frontend_handlers.py" } ]
[ { "content": "# Copyright 2021 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Handlers for the OSV web frontend.\"\"\"\n\nimport json\nimport os\nimport math\n\nfrom flask import abort\nfrom flask import Blueprint\nfrom flask import make_response\nfrom flask import redirect\nfrom flask import render_template\nfrom flask import request\nfrom flask import url_for\nimport markdown2\nfrom urllib import parse\n\nimport cache\nimport osv\nimport rate_limiter\nimport source_mapper\nimport utils\n\nblueprint = Blueprint('frontend_handlers', __name__)\n\n_PAGE_SIZE = 16\n_PAGE_LOOKAHEAD = 4\n_REQUESTS_PER_MIN = 30\n_MAX_LINKING_ALIASES = 8\n\nif utils.is_prod():\n redis_host = os.environ.get('REDISHOST', 'localhost')\n redis_port = int(os.environ.get('REDISPORT', 6379))\n limiter = rate_limiter.RateLimiter(\n redis_host, redis_port, requests_per_min=_REQUESTS_PER_MIN)\n\n @blueprint.before_request\n def check_rate_limit():\n ip_addr = request.headers.get('X-Appengine-User-Ip', 'unknown')\n if not limiter.check_request(ip_addr):\n abort(429)\n\n\[email protected]_request\ndef check_cors_preflight():\n \"\"\"Handle CORS preflight requests.\"\"\"\n if request.method != 'OPTIONS':\n return None\n\n response = make_response()\n response.headers.add('Access-Control-Allow-Origin', 'http://localhost:8080')\n response.headers.add('Access-Control-Allow-Methods', '*')\n response.headers.add('Access-Control-Allow-Headers', '*')\n return response\n\n\[email protected]_request\ndef add_cors_headers(response):\n \"\"\"Add CORS headers.\"\"\"\n response.headers.add('Access-Control-Allow-Origin', 'http://localhost:8080')\n return response\n\n\[email protected]('/v2/')\ndef index_v2():\n return redirect('/')\n\n\[email protected]('/v2/<path:subpath>')\ndef index_v2_with_subpath(subpath):\n return redirect('/' + subpath)\n\n\[email protected]('/')\ndef index():\n return render_template(\n 'home.html', ecosystem_counts=osv_get_ecosystem_counts_cached())\n\n\[email protected]('/about')\ndef about():\n return render_template('about.html')\n\n\[email protected]('/list')\ndef list_vulnerabilities():\n \"\"\"Main page.\"\"\"\n is_turbo_frame = request.headers.get('Turbo-Frame')\n\n # Remove page parameter if not from turbo frame\n if not is_turbo_frame:\n if request.args.get('page', 1) != 1:\n q = parse.parse_qs(request.query_string)\n q.pop(b'page', None)\n return redirect(\n url_for(request.endpoint) + '?' + parse.urlencode(q, True))\n\n query = request.args.get('q', '')\n page = int(request.args.get('page', 1))\n ecosystem = request.args.get('ecosystem')\n results = osv_query(query, page, False, ecosystem)\n\n # Fetch ecosystems by default. As an optimization, skip when rendering page\n # fragments.\n ecosystem_counts = osv_get_ecosystem_counts_cached(\n ) if not is_turbo_frame else None\n\n return render_template(\n 'list.html',\n page=page,\n total_pages=math.ceil(results['total'] / _PAGE_SIZE),\n query=query,\n selected_ecosystem=ecosystem,\n ecosystem_counts=ecosystem_counts,\n vulnerabilities=results['items'])\n\n\[email protected]('/vulnerability/<vuln_id>')\ndef vulnerability(vuln_id):\n \"\"\"Vulnerability page.\"\"\"\n vuln = osv_get_by_id(vuln_id)\n return render_template('vulnerability.html', vulnerability=vuln)\n\n\ndef bug_to_response(bug, detailed=True):\n \"\"\"Convert a Bug entity to a response object.\"\"\"\n response = osv.vulnerability_to_dict(bug.to_vulnerability())\n response.update({\n 'isFixed': bug.is_fixed,\n 'invalid': bug.status == osv.BugStatus.INVALID\n })\n\n if detailed:\n add_links(response)\n add_source_info(bug, response)\n add_related_aliases(bug, response)\n return response\n\n\ndef add_links(bug):\n \"\"\"Add VCS links where possible.\"\"\"\n\n for entry in bug.get('affected', []):\n for i, affected_range in enumerate(entry.get('ranges', [])):\n affected_range['id'] = i\n if affected_range['type'] != 'GIT':\n continue\n\n repo_url = affected_range.get('repo')\n if not repo_url:\n continue\n\n for event in affected_range.get('events', []):\n if event.get('introduced'):\n event['introduced_link'] = _commit_to_link(repo_url,\n event['introduced'])\n continue\n\n if event.get('fixed'):\n event['fixed_link'] = _commit_to_link(repo_url, event['fixed'])\n continue\n\n if event.get('limit'):\n event['limit_link'] = _commit_to_link(repo_url, event['limit'])\n continue\n\n\ndef add_source_info(bug, response):\n \"\"\"Add source information to `response`.\"\"\"\n if bug.source_of_truth == osv.SourceOfTruth.INTERNAL:\n response['source'] = 'INTERNAL'\n return\n\n source_repo = osv.get_source_repository(bug.source)\n if not source_repo or not source_repo.link:\n return\n\n source_path = osv.source_path(source_repo, bug)\n response['source'] = source_repo.link + source_path\n response['source_link'] = response['source']\n\n\ndef add_related_aliases(bug: osv.Bug, response):\n \"\"\"Add links to other osv entries that's related through aliases\"\"\"\n # Add links to other entries if they exist\n aliases = {}\n for alias in bug.aliases:\n # only if there aren't too many, otherwise skip this\n if len(bug.aliases) <= _MAX_LINKING_ALIASES:\n result = bug.get_by_id(alias)\n else:\n result = False\n aliases[alias] = {'exists': result, 'same_alias_entries': []}\n\n # Add links to other entries that have the same alias or references this\n if bug.aliases:\n query = osv.Bug.query(osv.Bug.aliases.IN(bug.aliases + [bug.id()]))\n for other in query:\n if other.id() == bug.id():\n continue\n for other_alias in other.aliases:\n if other_alias in aliases:\n aliases[other_alias]['same_alias_entries'].append(other.id())\n if bug.id() in other.aliases:\n aliases[other.id()] = {'exists': True, 'same_alias_entries': []}\n\n # Remove self if it was added\n aliases.pop(bug.id(), None)\n\n response['aliases'] = [{\n 'alias_id': aid,\n 'exists': ex['exists'],\n 'same_alias_entries': ex['same_alias_entries']\n } for aid, ex in aliases.items()]\n\n\ndef _commit_to_link(repo_url, commit):\n \"\"\"Convert commit to link.\"\"\"\n vcs = source_mapper.get_vcs_viewer_for_url(repo_url)\n if not vcs:\n return None\n\n if ':' not in commit:\n return vcs.get_source_url_for_revision(commit)\n\n commit_parts = commit.split(':')\n if len(commit_parts) != 2:\n return None\n\n start, end = commit_parts\n if start == 'unknown':\n return None\n\n return vcs.get_source_url_for_revision_diff(start, end)\n\n\ndef osv_get_ecosystems():\n \"\"\"Get list of ecosystems.\"\"\"\n query = osv.Bug.query(projection=[osv.Bug.ecosystem], distinct=True)\n return sorted([bug.ecosystem[0] for bug in query if bug.ecosystem],\n key=str.lower)\n\n\n# TODO: Figure out how to skip cache when testing\[email protected](\n timeout=24 * 60 * 60, key_prefix='osv_get_ecosystem_counts')\ndef osv_get_ecosystem_counts_cached():\n \"\"\"Get count of vulnerabilities per ecosystem, cached\"\"\"\n return osv_get_ecosystem_counts()\n\n\ndef osv_get_ecosystem_counts():\n \"\"\"Get count of vulnerabilities per ecosystem.\"\"\"\n counts = {}\n ecosystems = osv_get_ecosystems()\n for ecosystem in ecosystems:\n if ':' in ecosystem:\n # Count by the base ecosystem index. Otherwise we'll overcount as a\n # single entry may refer to multiple sub-ecosystems.\n continue\n\n counts[ecosystem] = osv.Bug.query(\n osv.Bug.ecosystem == ecosystem,\n osv.Bug.public == True, # pylint: disable=singleton-comparison\n osv.Bug.status == osv.BugStatus.PROCESSED).count()\n\n return counts\n\n\ndef osv_query(search_string, page, affected_only, ecosystem):\n \"\"\"Run an OSV query.\"\"\"\n query = osv.Bug.query(osv.Bug.status == osv.BugStatus.PROCESSED,\n osv.Bug.public == True) # pylint: disable=singleton-comparison\n\n if search_string:\n query = query.filter(osv.Bug.search_indices == search_string.lower())\n\n if affected_only:\n query = query.filter(osv.Bug.has_affected == True) # pylint: disable=singleton-comparison\n\n if ecosystem:\n query = query.filter(osv.Bug.ecosystem == ecosystem)\n\n query = query.order(-osv.Bug.last_modified)\n total = query.count()\n results = {\n 'total': total,\n 'items': [],\n }\n\n bugs, _, _ = query.fetch_page(\n page_size=_PAGE_SIZE, offset=(page - 1) * _PAGE_SIZE)\n for bug in bugs:\n results['items'].append(bug_to_response(bug, detailed=False))\n\n return results\n\n\ndef osv_get_by_id(vuln_id):\n \"\"\"Gets bug details from its id. If invalid, aborts the request.\"\"\"\n if not vuln_id:\n abort(400)\n return None\n\n bug = osv.Bug.get_by_id(vuln_id)\n if not bug:\n abort(404)\n return None\n\n if bug.status == osv.BugStatus.UNPROCESSED:\n abort(404)\n return None\n\n if not bug.public:\n abort(403)\n return None\n\n return bug_to_response(bug)\n\n\[email protected]_template_filter('event_type')\ndef event_type(event):\n \"\"\"Get the type from an event.\"\"\"\n if event.get('introduced'):\n return 'Introduced'\n if event.get('fixed'):\n return 'Fixed'\n if event.get('limit'):\n return 'Limit'\n if event.get('last_affected'):\n return 'Last affected'\n\n return None\n\n\[email protected]_template_filter('event_link')\ndef event_link(event):\n \"\"\"Get the link from an event.\"\"\"\n if event.get('introduced_link'):\n return event['introduced_link']\n if event.get('fixed_link'):\n return event['fixed_link']\n if event.get('limit_link'):\n return event['limit_link']\n if event.get('last_affected_link'):\n return event['last_affected_link']\n\n return None\n\n\[email protected]_template_filter('event_value')\ndef event_value(event):\n \"\"\"Get the value from an event.\"\"\"\n if event.get('introduced'):\n return event['introduced']\n if event.get('fixed'):\n return event['fixed']\n if event.get('limit'):\n return event['limit']\n if event.get('last_affected'):\n return event['last_affected']\n\n return None\n\n\[email protected]_template_filter('should_collapse')\ndef should_collapse(affected):\n \"\"\"Whether if we should collapse the package tab bar.\"\"\"\n total_package_length = sum(\n len(entry.get('package', {}).get('name', '')) for entry in affected)\n return total_package_length > 70 or len(affected) > 5\n\n\[email protected]_template_filter('group_versions')\ndef group_versions(versions):\n \"\"\"Group versions by prefix.\"\"\"\n groups = {}\n\n for version in sorted(versions):\n if '.' not in version:\n groups.setdefault('Other', []).append(version)\n continue\n\n label = version.split('.')[0] + '.*'\n groups.setdefault(label, []).append(version)\n\n return groups\n\n\[email protected]_template_filter('markdown')\ndef markdown(text):\n \"\"\"Render markdown.\"\"\"\n if text:\n return markdown2.markdown(\n text, safe_mode='escape', extras=['fenced-code-blocks'])\n\n return ''\n\n\[email protected]_template_filter('display_json')\ndef display_json(data):\n # We can't use the default `tojson` filter as it's intended for code (and\n # escapes characters like '<' to '\\u003c'). We want to render the JSON for\n # display purposes and use HTML escaping ('&lt;') instead so it's rendered\n # as '<'.\n return json.dumps(data, indent=4)\n\n\[email protected]_template_filter('log')\ndef logarithm(n):\n return math.log(n)\n", "path": "gcp/appengine/frontend_handlers.py" } ]
diff --git a/gcp/appengine/frontend_handlers.py b/gcp/appengine/frontend_handlers.py index fa89259ea86..810f3bbe70e 100644 --- a/gcp/appengine/frontend_handlers.py +++ b/gcp/appengine/frontend_handlers.py @@ -403,7 +403,8 @@ def group_versions(versions): def markdown(text): """Render markdown.""" if text: - return markdown2.markdown(text, extras=['fenced-code-blocks']) + return markdown2.markdown( + text, safe_mode='escape', extras=['fenced-code-blocks']) return ''